re PR c/63554 (ice in "execute_todo, at passes.c:1797" with -O3)
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "tree-object-size.h"
32 #include "realmpfr.h"
33 #include "basic-block.h"
34 #include "tree-ssa-alias.h"
35 #include "internal-fn.h"
36 #include "gimple-expr.h"
37 #include "is-a.h"
38 #include "gimple.h"
39 #include "flags.h"
40 #include "regs.h"
41 #include "hard-reg-set.h"
42 #include "except.h"
43 #include "function.h"
44 #include "insn-config.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "recog.h"
49 #include "output.h"
50 #include "typeclass.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "langhooks.h"
55 #include "tree-ssanames.h"
56 #include "tree-dfa.h"
57 #include "value-prof.h"
58 #include "diagnostic-core.h"
59 #include "builtins.h"
60 #include "ubsan.h"
61 #include "cilk.h"
62
63
64 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
65
66 struct target_builtins default_target_builtins;
67 #if SWITCHABLE_TARGET
68 struct target_builtins *this_target_builtins = &default_target_builtins;
69 #endif
70
71 /* Define the names of the builtin function types and codes. */
72 const char *const built_in_class_names[BUILT_IN_LAST]
73 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
74
75 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
76 const char * built_in_names[(int) END_BUILTINS] =
77 {
78 #include "builtins.def"
79 };
80 #undef DEF_BUILTIN
81
82 /* Setup an array of _DECL trees, make sure each element is
83 initialized to NULL_TREE. */
84 builtin_info_type builtin_info;
85
86 /* Non-zero if __builtin_constant_p should be folded right away. */
87 bool force_folding_builtin_constant_p;
88
89 static rtx c_readstr (const char *, enum machine_mode);
90 static int target_char_cast (tree, char *);
91 static rtx get_memory_rtx (tree, tree);
92 static int apply_args_size (void);
93 static int apply_result_size (void);
94 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
95 static rtx result_vector (int, rtx);
96 #endif
97 static void expand_builtin_update_setjmp_buf (rtx);
98 static void expand_builtin_prefetch (tree);
99 static rtx expand_builtin_apply_args (void);
100 static rtx expand_builtin_apply_args_1 (void);
101 static rtx expand_builtin_apply (rtx, rtx, rtx);
102 static void expand_builtin_return (rtx);
103 static enum type_class type_to_class (tree);
104 static rtx expand_builtin_classify_type (tree);
105 static void expand_errno_check (tree, rtx);
106 static rtx expand_builtin_mathfn (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
109 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
110 static rtx expand_builtin_interclass_mathfn (tree, rtx);
111 static rtx expand_builtin_sincos (tree);
112 static rtx expand_builtin_cexpi (tree, rtx);
113 static rtx expand_builtin_int_roundingfn (tree, rtx);
114 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
115 static rtx expand_builtin_next_arg (void);
116 static rtx expand_builtin_va_start (tree);
117 static rtx expand_builtin_va_end (tree);
118 static rtx expand_builtin_va_copy (tree);
119 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strcmp (tree, rtx);
121 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
122 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
123 static rtx expand_builtin_memcpy (tree, rtx);
124 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
126 enum machine_mode, int);
127 static rtx expand_builtin_strcpy (tree, rtx);
128 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
129 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx);
131 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
132 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
133 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_alloca (tree, bool);
137 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
138 static rtx expand_builtin_frame_address (tree, tree);
139 static tree stabilize_va_list_loc (location_t, tree, int);
140 static rtx expand_builtin_expect (tree, rtx);
141 static tree fold_builtin_constant_p (tree);
142 static tree fold_builtin_classify_type (tree);
143 static tree fold_builtin_strlen (location_t, tree, tree);
144 static tree fold_builtin_inf (location_t, tree, int);
145 static tree fold_builtin_nan (tree, tree, int);
146 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
147 static bool validate_arg (const_tree, enum tree_code code);
148 static bool integer_valued_real_p (tree);
149 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
150 static rtx expand_builtin_fabs (tree, rtx, rtx);
151 static rtx expand_builtin_signbit (tree, rtx);
152 static tree fold_builtin_sqrt (location_t, tree, tree);
153 static tree fold_builtin_cbrt (location_t, tree, tree);
154 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
155 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
156 static tree fold_builtin_cos (location_t, tree, tree, tree);
157 static tree fold_builtin_cosh (location_t, tree, tree, tree);
158 static tree fold_builtin_tan (tree, tree);
159 static tree fold_builtin_trunc (location_t, tree, tree);
160 static tree fold_builtin_floor (location_t, tree, tree);
161 static tree fold_builtin_ceil (location_t, tree, tree);
162 static tree fold_builtin_round (location_t, tree, tree);
163 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
164 static tree fold_builtin_bitop (tree, tree);
165 static tree fold_builtin_strchr (location_t, tree, tree, tree);
166 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
167 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
168 static tree fold_builtin_strcmp (location_t, tree, tree);
169 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
170 static tree fold_builtin_signbit (location_t, tree, tree);
171 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_isascii (location_t, tree);
173 static tree fold_builtin_toascii (location_t, tree);
174 static tree fold_builtin_isdigit (location_t, tree);
175 static tree fold_builtin_fabs (location_t, tree, tree);
176 static tree fold_builtin_abs (location_t, tree, tree);
177 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
178 enum tree_code);
179 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
180 static tree fold_builtin_0 (location_t, tree, bool);
181 static tree fold_builtin_1 (location_t, tree, tree, bool);
182 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
183 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
184 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
185 static tree fold_builtin_varargs (location_t, tree, tree, bool);
186
187 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
188 static tree fold_builtin_strstr (location_t, tree, tree, tree);
189 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
190 static tree fold_builtin_strncat (location_t, tree, tree, tree);
191 static tree fold_builtin_strspn (location_t, tree, tree);
192 static tree fold_builtin_strcspn (location_t, tree, tree);
193
194 static rtx expand_builtin_object_size (tree);
195 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
196 enum built_in_function);
197 static void maybe_emit_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
199 static void maybe_emit_free_warning (tree);
200 static tree fold_builtin_object_size (tree, tree);
201 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
202 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
203 enum built_in_function);
204
205 static unsigned HOST_WIDE_INT target_newline;
206 unsigned HOST_WIDE_INT target_percent;
207 static unsigned HOST_WIDE_INT target_c;
208 static unsigned HOST_WIDE_INT target_s;
209 static char target_percent_c[3];
210 char target_percent_s[3];
211 static char target_percent_s_newline[4];
212 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
213 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
214 static tree do_mpfr_arg2 (tree, tree, tree,
215 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
216 static tree do_mpfr_arg3 (tree, tree, tree, tree,
217 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
218 static tree do_mpfr_sincos (tree, tree, tree);
219 static tree do_mpfr_bessel_n (tree, tree, tree,
220 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
221 const REAL_VALUE_TYPE *, bool);
222 static tree do_mpfr_remquo (tree, tree, tree);
223 static tree do_mpfr_lgamma_r (tree, tree, tree);
224 static void expand_builtin_sync_synchronize (void);
225
226 /* Return true if NAME starts with __builtin_ or __sync_. */
227
228 static bool
229 is_builtin_name (const char *name)
230 {
231 if (strncmp (name, "__builtin_", 10) == 0)
232 return true;
233 if (strncmp (name, "__sync_", 7) == 0)
234 return true;
235 if (strncmp (name, "__atomic_", 9) == 0)
236 return true;
237 if (flag_cilkplus
238 && (!strcmp (name, "__cilkrts_detach")
239 || !strcmp (name, "__cilkrts_pop_frame")))
240 return true;
241 return false;
242 }
243
244
245 /* Return true if DECL is a function symbol representing a built-in. */
246
247 bool
248 is_builtin_fn (tree decl)
249 {
250 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
251 }
252
253 /* Return true if NODE should be considered for inline expansion regardless
254 of the optimization level. This means whenever a function is invoked with
255 its "internal" name, which normally contains the prefix "__builtin". */
256
257 static bool
258 called_as_built_in (tree node)
259 {
260 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
261 we want the name used to call the function, not the name it
262 will have. */
263 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
264 return is_builtin_name (name);
265 }
266
267 /* Compute values M and N such that M divides (address of EXP - N) and such
268 that N < M. If these numbers can be determined, store M in alignp and N in
269 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
270 *alignp and any bit-offset to *bitposp.
271
272 Note that the address (and thus the alignment) computed here is based
273 on the address to which a symbol resolves, whereas DECL_ALIGN is based
274 on the address at which an object is actually located. These two
275 addresses are not always the same. For example, on ARM targets,
276 the address &foo of a Thumb function foo() has the lowest bit set,
277 whereas foo() itself starts on an even address.
278
279 If ADDR_P is true we are taking the address of the memory reference EXP
280 and thus cannot rely on the access taking place. */
281
282 static bool
283 get_object_alignment_2 (tree exp, unsigned int *alignp,
284 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
285 {
286 HOST_WIDE_INT bitsize, bitpos;
287 tree offset;
288 enum machine_mode mode;
289 int unsignedp, volatilep;
290 unsigned int align = BITS_PER_UNIT;
291 bool known_alignment = false;
292
293 /* Get the innermost object and the constant (bitpos) and possibly
294 variable (offset) offset of the access. */
295 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
296 &mode, &unsignedp, &volatilep, true);
297
298 /* Extract alignment information from the innermost object and
299 possibly adjust bitpos and offset. */
300 if (TREE_CODE (exp) == FUNCTION_DECL)
301 {
302 /* Function addresses can encode extra information besides their
303 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
304 allows the low bit to be used as a virtual bit, we know
305 that the address itself must be at least 2-byte aligned. */
306 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
307 align = 2 * BITS_PER_UNIT;
308 }
309 else if (TREE_CODE (exp) == LABEL_DECL)
310 ;
311 else if (TREE_CODE (exp) == CONST_DECL)
312 {
313 /* The alignment of a CONST_DECL is determined by its initializer. */
314 exp = DECL_INITIAL (exp);
315 align = TYPE_ALIGN (TREE_TYPE (exp));
316 #ifdef CONSTANT_ALIGNMENT
317 if (CONSTANT_CLASS_P (exp))
318 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
319 #endif
320 known_alignment = true;
321 }
322 else if (DECL_P (exp))
323 {
324 align = DECL_ALIGN (exp);
325 known_alignment = true;
326 }
327 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
328 {
329 align = TYPE_ALIGN (TREE_TYPE (exp));
330 }
331 else if (TREE_CODE (exp) == INDIRECT_REF
332 || TREE_CODE (exp) == MEM_REF
333 || TREE_CODE (exp) == TARGET_MEM_REF)
334 {
335 tree addr = TREE_OPERAND (exp, 0);
336 unsigned ptr_align;
337 unsigned HOST_WIDE_INT ptr_bitpos;
338
339 if (TREE_CODE (addr) == BIT_AND_EXPR
340 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
341 {
342 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
343 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
344 align *= BITS_PER_UNIT;
345 addr = TREE_OPERAND (addr, 0);
346 }
347
348 known_alignment
349 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
350 align = MAX (ptr_align, align);
351
352 /* The alignment of the pointer operand in a TARGET_MEM_REF
353 has to take the variable offset parts into account. */
354 if (TREE_CODE (exp) == TARGET_MEM_REF)
355 {
356 if (TMR_INDEX (exp))
357 {
358 unsigned HOST_WIDE_INT step = 1;
359 if (TMR_STEP (exp))
360 step = TREE_INT_CST_LOW (TMR_STEP (exp));
361 align = MIN (align, (step & -step) * BITS_PER_UNIT);
362 }
363 if (TMR_INDEX2 (exp))
364 align = BITS_PER_UNIT;
365 known_alignment = false;
366 }
367
368 /* When EXP is an actual memory reference then we can use
369 TYPE_ALIGN of a pointer indirection to derive alignment.
370 Do so only if get_pointer_alignment_1 did not reveal absolute
371 alignment knowledge and if using that alignment would
372 improve the situation. */
373 if (!addr_p && !known_alignment
374 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
375 align = TYPE_ALIGN (TREE_TYPE (exp));
376 else
377 {
378 /* Else adjust bitpos accordingly. */
379 bitpos += ptr_bitpos;
380 if (TREE_CODE (exp) == MEM_REF
381 || TREE_CODE (exp) == TARGET_MEM_REF)
382 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
383 }
384 }
385 else if (TREE_CODE (exp) == STRING_CST)
386 {
387 /* STRING_CST are the only constant objects we allow to be not
388 wrapped inside a CONST_DECL. */
389 align = TYPE_ALIGN (TREE_TYPE (exp));
390 #ifdef CONSTANT_ALIGNMENT
391 if (CONSTANT_CLASS_P (exp))
392 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
393 #endif
394 known_alignment = true;
395 }
396
397 /* If there is a non-constant offset part extract the maximum
398 alignment that can prevail. */
399 if (offset)
400 {
401 unsigned int trailing_zeros = tree_ctz (offset);
402 if (trailing_zeros < HOST_BITS_PER_INT)
403 {
404 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
405 if (inner)
406 align = MIN (align, inner);
407 }
408 }
409
410 *alignp = align;
411 *bitposp = bitpos & (*alignp - 1);
412 return known_alignment;
413 }
414
415 /* For a memory reference expression EXP compute values M and N such that M
416 divides (&EXP - N) and such that N < M. If these numbers can be determined,
417 store M in alignp and N in *BITPOSP and return true. Otherwise return false
418 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
419
420 bool
421 get_object_alignment_1 (tree exp, unsigned int *alignp,
422 unsigned HOST_WIDE_INT *bitposp)
423 {
424 return get_object_alignment_2 (exp, alignp, bitposp, false);
425 }
426
427 /* Return the alignment in bits of EXP, an object. */
428
429 unsigned int
430 get_object_alignment (tree exp)
431 {
432 unsigned HOST_WIDE_INT bitpos = 0;
433 unsigned int align;
434
435 get_object_alignment_1 (exp, &align, &bitpos);
436
437 /* align and bitpos now specify known low bits of the pointer.
438 ptr & (align - 1) == bitpos. */
439
440 if (bitpos != 0)
441 align = (bitpos & -bitpos);
442 return align;
443 }
444
445 /* For a pointer valued expression EXP compute values M and N such that M
446 divides (EXP - N) and such that N < M. If these numbers can be determined,
447 store M in alignp and N in *BITPOSP and return true. Return false if
448 the results are just a conservative approximation.
449
450 If EXP is not a pointer, false is returned too. */
451
452 bool
453 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
454 unsigned HOST_WIDE_INT *bitposp)
455 {
456 STRIP_NOPS (exp);
457
458 if (TREE_CODE (exp) == ADDR_EXPR)
459 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
460 alignp, bitposp, true);
461 else if (TREE_CODE (exp) == SSA_NAME
462 && POINTER_TYPE_P (TREE_TYPE (exp)))
463 {
464 unsigned int ptr_align, ptr_misalign;
465 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
466
467 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
468 {
469 *bitposp = ptr_misalign * BITS_PER_UNIT;
470 *alignp = ptr_align * BITS_PER_UNIT;
471 /* We cannot really tell whether this result is an approximation. */
472 return true;
473 }
474 else
475 {
476 *bitposp = 0;
477 *alignp = BITS_PER_UNIT;
478 return false;
479 }
480 }
481 else if (TREE_CODE (exp) == INTEGER_CST)
482 {
483 *alignp = BIGGEST_ALIGNMENT;
484 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
485 & (BIGGEST_ALIGNMENT - 1));
486 return true;
487 }
488
489 *bitposp = 0;
490 *alignp = BITS_PER_UNIT;
491 return false;
492 }
493
494 /* Return the alignment in bits of EXP, a pointer valued expression.
495 The alignment returned is, by default, the alignment of the thing that
496 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
497
498 Otherwise, look at the expression to see if we can do better, i.e., if the
499 expression is actually pointing at an object whose alignment is tighter. */
500
501 unsigned int
502 get_pointer_alignment (tree exp)
503 {
504 unsigned HOST_WIDE_INT bitpos = 0;
505 unsigned int align;
506
507 get_pointer_alignment_1 (exp, &align, &bitpos);
508
509 /* align and bitpos now specify known low bits of the pointer.
510 ptr & (align - 1) == bitpos. */
511
512 if (bitpos != 0)
513 align = (bitpos & -bitpos);
514
515 return align;
516 }
517
518 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
519 way, because it could contain a zero byte in the middle.
520 TREE_STRING_LENGTH is the size of the character array, not the string.
521
522 ONLY_VALUE should be nonzero if the result is not going to be emitted
523 into the instruction stream and zero if it is going to be expanded.
524 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
525 is returned, otherwise NULL, since
526 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
527 evaluate the side-effects.
528
529 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
530 accesses. Note that this implies the result is not going to be emitted
531 into the instruction stream.
532
533 The value returned is of type `ssizetype'.
534
535 Unfortunately, string_constant can't access the values of const char
536 arrays with initializers, so neither can we do so here. */
537
538 tree
539 c_strlen (tree src, int only_value)
540 {
541 tree offset_node;
542 HOST_WIDE_INT offset;
543 int max;
544 const char *ptr;
545 location_t loc;
546
547 STRIP_NOPS (src);
548 if (TREE_CODE (src) == COND_EXPR
549 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
550 {
551 tree len1, len2;
552
553 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
554 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
555 if (tree_int_cst_equal (len1, len2))
556 return len1;
557 }
558
559 if (TREE_CODE (src) == COMPOUND_EXPR
560 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
561 return c_strlen (TREE_OPERAND (src, 1), only_value);
562
563 loc = EXPR_LOC_OR_LOC (src, input_location);
564
565 src = string_constant (src, &offset_node);
566 if (src == 0)
567 return NULL_TREE;
568
569 max = TREE_STRING_LENGTH (src) - 1;
570 ptr = TREE_STRING_POINTER (src);
571
572 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
573 {
574 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
575 compute the offset to the following null if we don't know where to
576 start searching for it. */
577 int i;
578
579 for (i = 0; i < max; i++)
580 if (ptr[i] == 0)
581 return NULL_TREE;
582
583 /* We don't know the starting offset, but we do know that the string
584 has no internal zero bytes. We can assume that the offset falls
585 within the bounds of the string; otherwise, the programmer deserves
586 what he gets. Subtract the offset from the length of the string,
587 and return that. This would perhaps not be valid if we were dealing
588 with named arrays in addition to literal string constants. */
589
590 return size_diffop_loc (loc, size_int (max), offset_node);
591 }
592
593 /* We have a known offset into the string. Start searching there for
594 a null character if we can represent it as a single HOST_WIDE_INT. */
595 if (offset_node == 0)
596 offset = 0;
597 else if (! tree_fits_shwi_p (offset_node))
598 offset = -1;
599 else
600 offset = tree_to_shwi (offset_node);
601
602 /* If the offset is known to be out of bounds, warn, and call strlen at
603 runtime. */
604 if (offset < 0 || offset > max)
605 {
606 /* Suppress multiple warnings for propagated constant strings. */
607 if (only_value != 2
608 && !TREE_NO_WARNING (src))
609 {
610 warning_at (loc, 0, "offset outside bounds of constant string");
611 TREE_NO_WARNING (src) = 1;
612 }
613 return NULL_TREE;
614 }
615
616 /* Use strlen to search for the first zero byte. Since any strings
617 constructed with build_string will have nulls appended, we win even
618 if we get handed something like (char[4])"abcd".
619
620 Since OFFSET is our starting index into the string, no further
621 calculation is needed. */
622 return ssize_int (strlen (ptr + offset));
623 }
624
625 /* Return a char pointer for a C string if it is a string constant
626 or sum of string constant and integer constant. */
627
628 const char *
629 c_getstr (tree src)
630 {
631 tree offset_node;
632
633 src = string_constant (src, &offset_node);
634 if (src == 0)
635 return 0;
636
637 if (offset_node == 0)
638 return TREE_STRING_POINTER (src);
639 else if (!tree_fits_uhwi_p (offset_node)
640 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
641 return 0;
642
643 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
644 }
645
646 /* Return a constant integer corresponding to target reading
647 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
648
649 static rtx
650 c_readstr (const char *str, enum machine_mode mode)
651 {
652 HOST_WIDE_INT ch;
653 unsigned int i, j;
654 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
655
656 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
657 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
658 / HOST_BITS_PER_WIDE_INT;
659
660 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
661 for (i = 0; i < len; i++)
662 tmp[i] = 0;
663
664 ch = 1;
665 for (i = 0; i < GET_MODE_SIZE (mode); i++)
666 {
667 j = i;
668 if (WORDS_BIG_ENDIAN)
669 j = GET_MODE_SIZE (mode) - i - 1;
670 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
671 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
672 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
673 j *= BITS_PER_UNIT;
674
675 if (ch)
676 ch = (unsigned char) str[i];
677 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
678 }
679
680 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
681 return immed_wide_int_const (c, mode);
682 }
683
684 /* Cast a target constant CST to target CHAR and if that value fits into
685 host char type, return zero and put that value into variable pointed to by
686 P. */
687
688 static int
689 target_char_cast (tree cst, char *p)
690 {
691 unsigned HOST_WIDE_INT val, hostval;
692
693 if (TREE_CODE (cst) != INTEGER_CST
694 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
695 return 1;
696
697 /* Do not care if it fits or not right here. */
698 val = TREE_INT_CST_LOW (cst);
699
700 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
701 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
702
703 hostval = val;
704 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
705 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
706
707 if (val != hostval)
708 return 1;
709
710 *p = hostval;
711 return 0;
712 }
713
714 /* Similar to save_expr, but assumes that arbitrary code is not executed
715 in between the multiple evaluations. In particular, we assume that a
716 non-addressable local variable will not be modified. */
717
718 static tree
719 builtin_save_expr (tree exp)
720 {
721 if (TREE_CODE (exp) == SSA_NAME
722 || (TREE_ADDRESSABLE (exp) == 0
723 && (TREE_CODE (exp) == PARM_DECL
724 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
725 return exp;
726
727 return save_expr (exp);
728 }
729
730 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
731 times to get the address of either a higher stack frame, or a return
732 address located within it (depending on FNDECL_CODE). */
733
734 static rtx
735 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
736 {
737 int i;
738
739 #ifdef INITIAL_FRAME_ADDRESS_RTX
740 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
741 #else
742 rtx tem;
743
744 /* For a zero count with __builtin_return_address, we don't care what
745 frame address we return, because target-specific definitions will
746 override us. Therefore frame pointer elimination is OK, and using
747 the soft frame pointer is OK.
748
749 For a nonzero count, or a zero count with __builtin_frame_address,
750 we require a stable offset from the current frame pointer to the
751 previous one, so we must use the hard frame pointer, and
752 we must disable frame pointer elimination. */
753 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
754 tem = frame_pointer_rtx;
755 else
756 {
757 tem = hard_frame_pointer_rtx;
758
759 /* Tell reload not to eliminate the frame pointer. */
760 crtl->accesses_prior_frames = 1;
761 }
762 #endif
763
764 /* Some machines need special handling before we can access
765 arbitrary frames. For example, on the SPARC, we must first flush
766 all register windows to the stack. */
767 #ifdef SETUP_FRAME_ADDRESSES
768 if (count > 0)
769 SETUP_FRAME_ADDRESSES ();
770 #endif
771
772 /* On the SPARC, the return address is not in the frame, it is in a
773 register. There is no way to access it off of the current frame
774 pointer, but it can be accessed off the previous frame pointer by
775 reading the value from the register window save area. */
776 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
777 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
778 count--;
779 #endif
780
781 /* Scan back COUNT frames to the specified frame. */
782 for (i = 0; i < count; i++)
783 {
784 /* Assume the dynamic chain pointer is in the word that the
785 frame address points to, unless otherwise specified. */
786 #ifdef DYNAMIC_CHAIN_ADDRESS
787 tem = DYNAMIC_CHAIN_ADDRESS (tem);
788 #endif
789 tem = memory_address (Pmode, tem);
790 tem = gen_frame_mem (Pmode, tem);
791 tem = copy_to_reg (tem);
792 }
793
794 /* For __builtin_frame_address, return what we've got. But, on
795 the SPARC for example, we may have to add a bias. */
796 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
797 #ifdef FRAME_ADDR_RTX
798 return FRAME_ADDR_RTX (tem);
799 #else
800 return tem;
801 #endif
802
803 /* For __builtin_return_address, get the return address from that frame. */
804 #ifdef RETURN_ADDR_RTX
805 tem = RETURN_ADDR_RTX (count, tem);
806 #else
807 tem = memory_address (Pmode,
808 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
809 tem = gen_frame_mem (Pmode, tem);
810 #endif
811 return tem;
812 }
813
814 /* Alias set used for setjmp buffer. */
815 static alias_set_type setjmp_alias_set = -1;
816
817 /* Construct the leading half of a __builtin_setjmp call. Control will
818 return to RECEIVER_LABEL. This is also called directly by the SJLJ
819 exception handling code. */
820
821 void
822 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
823 {
824 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
825 rtx stack_save;
826 rtx mem;
827
828 if (setjmp_alias_set == -1)
829 setjmp_alias_set = new_alias_set ();
830
831 buf_addr = convert_memory_address (Pmode, buf_addr);
832
833 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
834
835 /* We store the frame pointer and the address of receiver_label in
836 the buffer and use the rest of it for the stack save area, which
837 is machine-dependent. */
838
839 mem = gen_rtx_MEM (Pmode, buf_addr);
840 set_mem_alias_set (mem, setjmp_alias_set);
841 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
842
843 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
844 GET_MODE_SIZE (Pmode))),
845 set_mem_alias_set (mem, setjmp_alias_set);
846
847 emit_move_insn (validize_mem (mem),
848 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
849
850 stack_save = gen_rtx_MEM (sa_mode,
851 plus_constant (Pmode, buf_addr,
852 2 * GET_MODE_SIZE (Pmode)));
853 set_mem_alias_set (stack_save, setjmp_alias_set);
854 emit_stack_save (SAVE_NONLOCAL, &stack_save);
855
856 /* If there is further processing to do, do it. */
857 #ifdef HAVE_builtin_setjmp_setup
858 if (HAVE_builtin_setjmp_setup)
859 emit_insn (gen_builtin_setjmp_setup (buf_addr));
860 #endif
861
862 /* We have a nonlocal label. */
863 cfun->has_nonlocal_label = 1;
864 }
865
866 /* Construct the trailing part of a __builtin_setjmp call. This is
867 also called directly by the SJLJ exception handling code.
868 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
869
870 void
871 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
872 {
873 rtx chain;
874
875 /* Mark the FP as used when we get here, so we have to make sure it's
876 marked as used by this function. */
877 emit_use (hard_frame_pointer_rtx);
878
879 /* Mark the static chain as clobbered here so life information
880 doesn't get messed up for it. */
881 chain = targetm.calls.static_chain (current_function_decl, true);
882 if (chain && REG_P (chain))
883 emit_clobber (chain);
884
885 /* Now put in the code to restore the frame pointer, and argument
886 pointer, if needed. */
887 #ifdef HAVE_nonlocal_goto
888 if (! HAVE_nonlocal_goto)
889 #endif
890 {
891 /* First adjust our frame pointer to its actual value. It was
892 previously set to the start of the virtual area corresponding to
893 the stacked variables when we branched here and now needs to be
894 adjusted to the actual hardware fp value.
895
896 Assignments to virtual registers are converted by
897 instantiate_virtual_regs into the corresponding assignment
898 to the underlying register (fp in this case) that makes
899 the original assignment true.
900 So the following insn will actually be decrementing fp by
901 STARTING_FRAME_OFFSET. */
902 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
903
904 /* Restoring the frame pointer also modifies the hard frame pointer.
905 Mark it used (so that the previous assignment remains live once
906 the frame pointer is eliminated) and clobbered (to represent the
907 implicit update from the assignment). */
908 emit_use (hard_frame_pointer_rtx);
909 emit_clobber (hard_frame_pointer_rtx);
910 }
911
912 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
913 if (fixed_regs[ARG_POINTER_REGNUM])
914 {
915 #ifdef ELIMINABLE_REGS
916 /* If the argument pointer can be eliminated in favor of the
917 frame pointer, we don't need to restore it. We assume here
918 that if such an elimination is present, it can always be used.
919 This is the case on all known machines; if we don't make this
920 assumption, we do unnecessary saving on many machines. */
921 size_t i;
922 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
923
924 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
925 if (elim_regs[i].from == ARG_POINTER_REGNUM
926 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
927 break;
928
929 if (i == ARRAY_SIZE (elim_regs))
930 #endif
931 {
932 /* Now restore our arg pointer from the address at which it
933 was saved in our stack frame. */
934 emit_move_insn (crtl->args.internal_arg_pointer,
935 copy_to_reg (get_arg_pointer_save_area ()));
936 }
937 }
938 #endif
939
940 #ifdef HAVE_builtin_setjmp_receiver
941 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
942 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
943 else
944 #endif
945 #ifdef HAVE_nonlocal_goto_receiver
946 if (HAVE_nonlocal_goto_receiver)
947 emit_insn (gen_nonlocal_goto_receiver ());
948 else
949 #endif
950 { /* Nothing */ }
951
952 /* We must not allow the code we just generated to be reordered by
953 scheduling. Specifically, the update of the frame pointer must
954 happen immediately, not later. */
955 emit_insn (gen_blockage ());
956 }
957
958 /* __builtin_longjmp is passed a pointer to an array of five words (not
959 all will be used on all machines). It operates similarly to the C
960 library function of the same name, but is more efficient. Much of
961 the code below is copied from the handling of non-local gotos. */
962
963 static void
964 expand_builtin_longjmp (rtx buf_addr, rtx value)
965 {
966 rtx fp, lab, stack;
967 rtx_insn *insn, *last;
968 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
969
970 /* DRAP is needed for stack realign if longjmp is expanded to current
971 function */
972 if (SUPPORTS_STACK_ALIGNMENT)
973 crtl->need_drap = true;
974
975 if (setjmp_alias_set == -1)
976 setjmp_alias_set = new_alias_set ();
977
978 buf_addr = convert_memory_address (Pmode, buf_addr);
979
980 buf_addr = force_reg (Pmode, buf_addr);
981
982 /* We require that the user must pass a second argument of 1, because
983 that is what builtin_setjmp will return. */
984 gcc_assert (value == const1_rtx);
985
986 last = get_last_insn ();
987 #ifdef HAVE_builtin_longjmp
988 if (HAVE_builtin_longjmp)
989 emit_insn (gen_builtin_longjmp (buf_addr));
990 else
991 #endif
992 {
993 fp = gen_rtx_MEM (Pmode, buf_addr);
994 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
995 GET_MODE_SIZE (Pmode)));
996
997 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
998 2 * GET_MODE_SIZE (Pmode)));
999 set_mem_alias_set (fp, setjmp_alias_set);
1000 set_mem_alias_set (lab, setjmp_alias_set);
1001 set_mem_alias_set (stack, setjmp_alias_set);
1002
1003 /* Pick up FP, label, and SP from the block and jump. This code is
1004 from expand_goto in stmt.c; see there for detailed comments. */
1005 #ifdef HAVE_nonlocal_goto
1006 if (HAVE_nonlocal_goto)
1007 /* We have to pass a value to the nonlocal_goto pattern that will
1008 get copied into the static_chain pointer, but it does not matter
1009 what that value is, because builtin_setjmp does not use it. */
1010 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1011 else
1012 #endif
1013 {
1014 lab = copy_to_reg (lab);
1015
1016 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1017 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1018
1019 emit_move_insn (hard_frame_pointer_rtx, fp);
1020 emit_stack_restore (SAVE_NONLOCAL, stack);
1021
1022 emit_use (hard_frame_pointer_rtx);
1023 emit_use (stack_pointer_rtx);
1024 emit_indirect_jump (lab);
1025 }
1026 }
1027
1028 /* Search backwards and mark the jump insn as a non-local goto.
1029 Note that this precludes the use of __builtin_longjmp to a
1030 __builtin_setjmp target in the same function. However, we've
1031 already cautioned the user that these functions are for
1032 internal exception handling use only. */
1033 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1034 {
1035 gcc_assert (insn != last);
1036
1037 if (JUMP_P (insn))
1038 {
1039 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1040 break;
1041 }
1042 else if (CALL_P (insn))
1043 break;
1044 }
1045 }
1046
1047 static inline bool
1048 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1049 {
1050 return (iter->i < iter->n);
1051 }
1052
1053 /* This function validates the types of a function call argument list
1054 against a specified list of tree_codes. If the last specifier is a 0,
1055 that represents an ellipses, otherwise the last specifier must be a
1056 VOID_TYPE. */
1057
1058 static bool
1059 validate_arglist (const_tree callexpr, ...)
1060 {
1061 enum tree_code code;
1062 bool res = 0;
1063 va_list ap;
1064 const_call_expr_arg_iterator iter;
1065 const_tree arg;
1066
1067 va_start (ap, callexpr);
1068 init_const_call_expr_arg_iterator (callexpr, &iter);
1069
1070 do
1071 {
1072 code = (enum tree_code) va_arg (ap, int);
1073 switch (code)
1074 {
1075 case 0:
1076 /* This signifies an ellipses, any further arguments are all ok. */
1077 res = true;
1078 goto end;
1079 case VOID_TYPE:
1080 /* This signifies an endlink, if no arguments remain, return
1081 true, otherwise return false. */
1082 res = !more_const_call_expr_args_p (&iter);
1083 goto end;
1084 default:
1085 /* If no parameters remain or the parameter's code does not
1086 match the specified code, return false. Otherwise continue
1087 checking any remaining arguments. */
1088 arg = next_const_call_expr_arg (&iter);
1089 if (!validate_arg (arg, code))
1090 goto end;
1091 break;
1092 }
1093 }
1094 while (1);
1095
1096 /* We need gotos here since we can only have one VA_CLOSE in a
1097 function. */
1098 end: ;
1099 va_end (ap);
1100
1101 return res;
1102 }
1103
1104 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1105 and the address of the save area. */
1106
1107 static rtx
1108 expand_builtin_nonlocal_goto (tree exp)
1109 {
1110 tree t_label, t_save_area;
1111 rtx r_label, r_save_area, r_fp, r_sp;
1112 rtx_insn *insn;
1113
1114 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1115 return NULL_RTX;
1116
1117 t_label = CALL_EXPR_ARG (exp, 0);
1118 t_save_area = CALL_EXPR_ARG (exp, 1);
1119
1120 r_label = expand_normal (t_label);
1121 r_label = convert_memory_address (Pmode, r_label);
1122 r_save_area = expand_normal (t_save_area);
1123 r_save_area = convert_memory_address (Pmode, r_save_area);
1124 /* Copy the address of the save location to a register just in case it was
1125 based on the frame pointer. */
1126 r_save_area = copy_to_reg (r_save_area);
1127 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1128 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1129 plus_constant (Pmode, r_save_area,
1130 GET_MODE_SIZE (Pmode)));
1131
1132 crtl->has_nonlocal_goto = 1;
1133
1134 #ifdef HAVE_nonlocal_goto
1135 /* ??? We no longer need to pass the static chain value, afaik. */
1136 if (HAVE_nonlocal_goto)
1137 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1138 else
1139 #endif
1140 {
1141 r_label = copy_to_reg (r_label);
1142
1143 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1144 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1145
1146 /* Restore frame pointer for containing function. */
1147 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1148 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1149
1150 /* USE of hard_frame_pointer_rtx added for consistency;
1151 not clear if really needed. */
1152 emit_use (hard_frame_pointer_rtx);
1153 emit_use (stack_pointer_rtx);
1154
1155 /* If the architecture is using a GP register, we must
1156 conservatively assume that the target function makes use of it.
1157 The prologue of functions with nonlocal gotos must therefore
1158 initialize the GP register to the appropriate value, and we
1159 must then make sure that this value is live at the point
1160 of the jump. (Note that this doesn't necessarily apply
1161 to targets with a nonlocal_goto pattern; they are free
1162 to implement it in their own way. Note also that this is
1163 a no-op if the GP register is a global invariant.) */
1164 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1165 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1166 emit_use (pic_offset_table_rtx);
1167
1168 emit_indirect_jump (r_label);
1169 }
1170
1171 /* Search backwards to the jump insn and mark it as a
1172 non-local goto. */
1173 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1174 {
1175 if (JUMP_P (insn))
1176 {
1177 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1178 break;
1179 }
1180 else if (CALL_P (insn))
1181 break;
1182 }
1183
1184 return const0_rtx;
1185 }
1186
1187 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1188 (not all will be used on all machines) that was passed to __builtin_setjmp.
1189 It updates the stack pointer in that block to correspond to the current
1190 stack pointer. */
1191
1192 static void
1193 expand_builtin_update_setjmp_buf (rtx buf_addr)
1194 {
1195 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1196 rtx stack_save
1197 = gen_rtx_MEM (sa_mode,
1198 memory_address
1199 (sa_mode,
1200 plus_constant (Pmode, buf_addr,
1201 2 * GET_MODE_SIZE (Pmode))));
1202
1203 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1204 }
1205
1206 /* Expand a call to __builtin_prefetch. For a target that does not support
1207 data prefetch, evaluate the memory address argument in case it has side
1208 effects. */
1209
1210 static void
1211 expand_builtin_prefetch (tree exp)
1212 {
1213 tree arg0, arg1, arg2;
1214 int nargs;
1215 rtx op0, op1, op2;
1216
1217 if (!validate_arglist (exp, POINTER_TYPE, 0))
1218 return;
1219
1220 arg0 = CALL_EXPR_ARG (exp, 0);
1221
1222 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1223 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1224 locality). */
1225 nargs = call_expr_nargs (exp);
1226 if (nargs > 1)
1227 arg1 = CALL_EXPR_ARG (exp, 1);
1228 else
1229 arg1 = integer_zero_node;
1230 if (nargs > 2)
1231 arg2 = CALL_EXPR_ARG (exp, 2);
1232 else
1233 arg2 = integer_three_node;
1234
1235 /* Argument 0 is an address. */
1236 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1237
1238 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1239 if (TREE_CODE (arg1) != INTEGER_CST)
1240 {
1241 error ("second argument to %<__builtin_prefetch%> must be a constant");
1242 arg1 = integer_zero_node;
1243 }
1244 op1 = expand_normal (arg1);
1245 /* Argument 1 must be either zero or one. */
1246 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1247 {
1248 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1249 " using zero");
1250 op1 = const0_rtx;
1251 }
1252
1253 /* Argument 2 (locality) must be a compile-time constant int. */
1254 if (TREE_CODE (arg2) != INTEGER_CST)
1255 {
1256 error ("third argument to %<__builtin_prefetch%> must be a constant");
1257 arg2 = integer_zero_node;
1258 }
1259 op2 = expand_normal (arg2);
1260 /* Argument 2 must be 0, 1, 2, or 3. */
1261 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1262 {
1263 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1264 op2 = const0_rtx;
1265 }
1266
1267 #ifdef HAVE_prefetch
1268 if (HAVE_prefetch)
1269 {
1270 struct expand_operand ops[3];
1271
1272 create_address_operand (&ops[0], op0);
1273 create_integer_operand (&ops[1], INTVAL (op1));
1274 create_integer_operand (&ops[2], INTVAL (op2));
1275 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1276 return;
1277 }
1278 #endif
1279
1280 /* Don't do anything with direct references to volatile memory, but
1281 generate code to handle other side effects. */
1282 if (!MEM_P (op0) && side_effects_p (op0))
1283 emit_insn (op0);
1284 }
1285
1286 /* Get a MEM rtx for expression EXP which is the address of an operand
1287 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1288 the maximum length of the block of memory that might be accessed or
1289 NULL if unknown. */
1290
1291 static rtx
1292 get_memory_rtx (tree exp, tree len)
1293 {
1294 tree orig_exp = exp;
1295 rtx addr, mem;
1296
1297 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1298 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1299 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1300 exp = TREE_OPERAND (exp, 0);
1301
1302 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1303 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1304
1305 /* Get an expression we can use to find the attributes to assign to MEM.
1306 First remove any nops. */
1307 while (CONVERT_EXPR_P (exp)
1308 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1309 exp = TREE_OPERAND (exp, 0);
1310
1311 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1312 (as builtin stringops may alias with anything). */
1313 exp = fold_build2 (MEM_REF,
1314 build_array_type (char_type_node,
1315 build_range_type (sizetype,
1316 size_one_node, len)),
1317 exp, build_int_cst (ptr_type_node, 0));
1318
1319 /* If the MEM_REF has no acceptable address, try to get the base object
1320 from the original address we got, and build an all-aliasing
1321 unknown-sized access to that one. */
1322 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1323 set_mem_attributes (mem, exp, 0);
1324 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1325 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1326 0))))
1327 {
1328 exp = build_fold_addr_expr (exp);
1329 exp = fold_build2 (MEM_REF,
1330 build_array_type (char_type_node,
1331 build_range_type (sizetype,
1332 size_zero_node,
1333 NULL)),
1334 exp, build_int_cst (ptr_type_node, 0));
1335 set_mem_attributes (mem, exp, 0);
1336 }
1337 set_mem_alias_set (mem, 0);
1338 return mem;
1339 }
1340 \f
1341 /* Built-in functions to perform an untyped call and return. */
1342
1343 #define apply_args_mode \
1344 (this_target_builtins->x_apply_args_mode)
1345 #define apply_result_mode \
1346 (this_target_builtins->x_apply_result_mode)
1347
1348 /* Return the size required for the block returned by __builtin_apply_args,
1349 and initialize apply_args_mode. */
1350
1351 static int
1352 apply_args_size (void)
1353 {
1354 static int size = -1;
1355 int align;
1356 unsigned int regno;
1357 enum machine_mode mode;
1358
1359 /* The values computed by this function never change. */
1360 if (size < 0)
1361 {
1362 /* The first value is the incoming arg-pointer. */
1363 size = GET_MODE_SIZE (Pmode);
1364
1365 /* The second value is the structure value address unless this is
1366 passed as an "invisible" first argument. */
1367 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1368 size += GET_MODE_SIZE (Pmode);
1369
1370 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1371 if (FUNCTION_ARG_REGNO_P (regno))
1372 {
1373 mode = targetm.calls.get_raw_arg_mode (regno);
1374
1375 gcc_assert (mode != VOIDmode);
1376
1377 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1378 if (size % align != 0)
1379 size = CEIL (size, align) * align;
1380 size += GET_MODE_SIZE (mode);
1381 apply_args_mode[regno] = mode;
1382 }
1383 else
1384 {
1385 apply_args_mode[regno] = VOIDmode;
1386 }
1387 }
1388 return size;
1389 }
1390
1391 /* Return the size required for the block returned by __builtin_apply,
1392 and initialize apply_result_mode. */
1393
1394 static int
1395 apply_result_size (void)
1396 {
1397 static int size = -1;
1398 int align, regno;
1399 enum machine_mode mode;
1400
1401 /* The values computed by this function never change. */
1402 if (size < 0)
1403 {
1404 size = 0;
1405
1406 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1407 if (targetm.calls.function_value_regno_p (regno))
1408 {
1409 mode = targetm.calls.get_raw_result_mode (regno);
1410
1411 gcc_assert (mode != VOIDmode);
1412
1413 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1414 if (size % align != 0)
1415 size = CEIL (size, align) * align;
1416 size += GET_MODE_SIZE (mode);
1417 apply_result_mode[regno] = mode;
1418 }
1419 else
1420 apply_result_mode[regno] = VOIDmode;
1421
1422 /* Allow targets that use untyped_call and untyped_return to override
1423 the size so that machine-specific information can be stored here. */
1424 #ifdef APPLY_RESULT_SIZE
1425 size = APPLY_RESULT_SIZE;
1426 #endif
1427 }
1428 return size;
1429 }
1430
1431 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1432 /* Create a vector describing the result block RESULT. If SAVEP is true,
1433 the result block is used to save the values; otherwise it is used to
1434 restore the values. */
1435
1436 static rtx
1437 result_vector (int savep, rtx result)
1438 {
1439 int regno, size, align, nelts;
1440 enum machine_mode mode;
1441 rtx reg, mem;
1442 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1443
1444 size = nelts = 0;
1445 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1446 if ((mode = apply_result_mode[regno]) != VOIDmode)
1447 {
1448 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1449 if (size % align != 0)
1450 size = CEIL (size, align) * align;
1451 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1452 mem = adjust_address (result, mode, size);
1453 savevec[nelts++] = (savep
1454 ? gen_rtx_SET (VOIDmode, mem, reg)
1455 : gen_rtx_SET (VOIDmode, reg, mem));
1456 size += GET_MODE_SIZE (mode);
1457 }
1458 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1459 }
1460 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1461
1462 /* Save the state required to perform an untyped call with the same
1463 arguments as were passed to the current function. */
1464
1465 static rtx
1466 expand_builtin_apply_args_1 (void)
1467 {
1468 rtx registers, tem;
1469 int size, align, regno;
1470 enum machine_mode mode;
1471 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1472
1473 /* Create a block where the arg-pointer, structure value address,
1474 and argument registers can be saved. */
1475 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1476
1477 /* Walk past the arg-pointer and structure value address. */
1478 size = GET_MODE_SIZE (Pmode);
1479 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1480 size += GET_MODE_SIZE (Pmode);
1481
1482 /* Save each register used in calling a function to the block. */
1483 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1484 if ((mode = apply_args_mode[regno]) != VOIDmode)
1485 {
1486 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1487 if (size % align != 0)
1488 size = CEIL (size, align) * align;
1489
1490 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1491
1492 emit_move_insn (adjust_address (registers, mode, size), tem);
1493 size += GET_MODE_SIZE (mode);
1494 }
1495
1496 /* Save the arg pointer to the block. */
1497 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1498 #ifdef STACK_GROWS_DOWNWARD
1499 /* We need the pointer as the caller actually passed them to us, not
1500 as we might have pretended they were passed. Make sure it's a valid
1501 operand, as emit_move_insn isn't expected to handle a PLUS. */
1502 tem
1503 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1504 NULL_RTX);
1505 #endif
1506 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1507
1508 size = GET_MODE_SIZE (Pmode);
1509
1510 /* Save the structure value address unless this is passed as an
1511 "invisible" first argument. */
1512 if (struct_incoming_value)
1513 {
1514 emit_move_insn (adjust_address (registers, Pmode, size),
1515 copy_to_reg (struct_incoming_value));
1516 size += GET_MODE_SIZE (Pmode);
1517 }
1518
1519 /* Return the address of the block. */
1520 return copy_addr_to_reg (XEXP (registers, 0));
1521 }
1522
1523 /* __builtin_apply_args returns block of memory allocated on
1524 the stack into which is stored the arg pointer, structure
1525 value address, static chain, and all the registers that might
1526 possibly be used in performing a function call. The code is
1527 moved to the start of the function so the incoming values are
1528 saved. */
1529
1530 static rtx
1531 expand_builtin_apply_args (void)
1532 {
1533 /* Don't do __builtin_apply_args more than once in a function.
1534 Save the result of the first call and reuse it. */
1535 if (apply_args_value != 0)
1536 return apply_args_value;
1537 {
1538 /* When this function is called, it means that registers must be
1539 saved on entry to this function. So we migrate the
1540 call to the first insn of this function. */
1541 rtx temp;
1542 rtx seq;
1543
1544 start_sequence ();
1545 temp = expand_builtin_apply_args_1 ();
1546 seq = get_insns ();
1547 end_sequence ();
1548
1549 apply_args_value = temp;
1550
1551 /* Put the insns after the NOTE that starts the function.
1552 If this is inside a start_sequence, make the outer-level insn
1553 chain current, so the code is placed at the start of the
1554 function. If internal_arg_pointer is a non-virtual pseudo,
1555 it needs to be placed after the function that initializes
1556 that pseudo. */
1557 push_topmost_sequence ();
1558 if (REG_P (crtl->args.internal_arg_pointer)
1559 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1560 emit_insn_before (seq, parm_birth_insn);
1561 else
1562 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1563 pop_topmost_sequence ();
1564 return temp;
1565 }
1566 }
1567
1568 /* Perform an untyped call and save the state required to perform an
1569 untyped return of whatever value was returned by the given function. */
1570
1571 static rtx
1572 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1573 {
1574 int size, align, regno;
1575 enum machine_mode mode;
1576 rtx incoming_args, result, reg, dest, src;
1577 rtx_call_insn *call_insn;
1578 rtx old_stack_level = 0;
1579 rtx call_fusage = 0;
1580 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1581
1582 arguments = convert_memory_address (Pmode, arguments);
1583
1584 /* Create a block where the return registers can be saved. */
1585 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1586
1587 /* Fetch the arg pointer from the ARGUMENTS block. */
1588 incoming_args = gen_reg_rtx (Pmode);
1589 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1590 #ifndef STACK_GROWS_DOWNWARD
1591 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1592 incoming_args, 0, OPTAB_LIB_WIDEN);
1593 #endif
1594
1595 /* Push a new argument block and copy the arguments. Do not allow
1596 the (potential) memcpy call below to interfere with our stack
1597 manipulations. */
1598 do_pending_stack_adjust ();
1599 NO_DEFER_POP;
1600
1601 /* Save the stack with nonlocal if available. */
1602 #ifdef HAVE_save_stack_nonlocal
1603 if (HAVE_save_stack_nonlocal)
1604 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1605 else
1606 #endif
1607 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1608
1609 /* Allocate a block of memory onto the stack and copy the memory
1610 arguments to the outgoing arguments address. We can pass TRUE
1611 as the 4th argument because we just saved the stack pointer
1612 and will restore it right after the call. */
1613 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1614
1615 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1616 may have already set current_function_calls_alloca to true.
1617 current_function_calls_alloca won't be set if argsize is zero,
1618 so we have to guarantee need_drap is true here. */
1619 if (SUPPORTS_STACK_ALIGNMENT)
1620 crtl->need_drap = true;
1621
1622 dest = virtual_outgoing_args_rtx;
1623 #ifndef STACK_GROWS_DOWNWARD
1624 if (CONST_INT_P (argsize))
1625 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1626 else
1627 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1628 #endif
1629 dest = gen_rtx_MEM (BLKmode, dest);
1630 set_mem_align (dest, PARM_BOUNDARY);
1631 src = gen_rtx_MEM (BLKmode, incoming_args);
1632 set_mem_align (src, PARM_BOUNDARY);
1633 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1634
1635 /* Refer to the argument block. */
1636 apply_args_size ();
1637 arguments = gen_rtx_MEM (BLKmode, arguments);
1638 set_mem_align (arguments, PARM_BOUNDARY);
1639
1640 /* Walk past the arg-pointer and structure value address. */
1641 size = GET_MODE_SIZE (Pmode);
1642 if (struct_value)
1643 size += GET_MODE_SIZE (Pmode);
1644
1645 /* Restore each of the registers previously saved. Make USE insns
1646 for each of these registers for use in making the call. */
1647 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1648 if ((mode = apply_args_mode[regno]) != VOIDmode)
1649 {
1650 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1651 if (size % align != 0)
1652 size = CEIL (size, align) * align;
1653 reg = gen_rtx_REG (mode, regno);
1654 emit_move_insn (reg, adjust_address (arguments, mode, size));
1655 use_reg (&call_fusage, reg);
1656 size += GET_MODE_SIZE (mode);
1657 }
1658
1659 /* Restore the structure value address unless this is passed as an
1660 "invisible" first argument. */
1661 size = GET_MODE_SIZE (Pmode);
1662 if (struct_value)
1663 {
1664 rtx value = gen_reg_rtx (Pmode);
1665 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1666 emit_move_insn (struct_value, value);
1667 if (REG_P (struct_value))
1668 use_reg (&call_fusage, struct_value);
1669 size += GET_MODE_SIZE (Pmode);
1670 }
1671
1672 /* All arguments and registers used for the call are set up by now! */
1673 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1674
1675 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1676 and we don't want to load it into a register as an optimization,
1677 because prepare_call_address already did it if it should be done. */
1678 if (GET_CODE (function) != SYMBOL_REF)
1679 function = memory_address (FUNCTION_MODE, function);
1680
1681 /* Generate the actual call instruction and save the return value. */
1682 #ifdef HAVE_untyped_call
1683 if (HAVE_untyped_call)
1684 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1685 result, result_vector (1, result)));
1686 else
1687 #endif
1688 #ifdef HAVE_call_value
1689 if (HAVE_call_value)
1690 {
1691 rtx valreg = 0;
1692
1693 /* Locate the unique return register. It is not possible to
1694 express a call that sets more than one return register using
1695 call_value; use untyped_call for that. In fact, untyped_call
1696 only needs to save the return registers in the given block. */
1697 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1698 if ((mode = apply_result_mode[regno]) != VOIDmode)
1699 {
1700 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1701
1702 valreg = gen_rtx_REG (mode, regno);
1703 }
1704
1705 emit_call_insn (GEN_CALL_VALUE (valreg,
1706 gen_rtx_MEM (FUNCTION_MODE, function),
1707 const0_rtx, NULL_RTX, const0_rtx));
1708
1709 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1710 }
1711 else
1712 #endif
1713 gcc_unreachable ();
1714
1715 /* Find the CALL insn we just emitted, and attach the register usage
1716 information. */
1717 call_insn = last_call_insn ();
1718 add_function_usage_to (call_insn, call_fusage);
1719
1720 /* Restore the stack. */
1721 #ifdef HAVE_save_stack_nonlocal
1722 if (HAVE_save_stack_nonlocal)
1723 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1724 else
1725 #endif
1726 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1727 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1728
1729 OK_DEFER_POP;
1730
1731 /* Return the address of the result block. */
1732 result = copy_addr_to_reg (XEXP (result, 0));
1733 return convert_memory_address (ptr_mode, result);
1734 }
1735
1736 /* Perform an untyped return. */
1737
1738 static void
1739 expand_builtin_return (rtx result)
1740 {
1741 int size, align, regno;
1742 enum machine_mode mode;
1743 rtx reg;
1744 rtx_insn *call_fusage = 0;
1745
1746 result = convert_memory_address (Pmode, result);
1747
1748 apply_result_size ();
1749 result = gen_rtx_MEM (BLKmode, result);
1750
1751 #ifdef HAVE_untyped_return
1752 if (HAVE_untyped_return)
1753 {
1754 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1755 emit_barrier ();
1756 return;
1757 }
1758 #endif
1759
1760 /* Restore the return value and note that each value is used. */
1761 size = 0;
1762 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1763 if ((mode = apply_result_mode[regno]) != VOIDmode)
1764 {
1765 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1766 if (size % align != 0)
1767 size = CEIL (size, align) * align;
1768 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1769 emit_move_insn (reg, adjust_address (result, mode, size));
1770
1771 push_to_sequence (call_fusage);
1772 emit_use (reg);
1773 call_fusage = get_insns ();
1774 end_sequence ();
1775 size += GET_MODE_SIZE (mode);
1776 }
1777
1778 /* Put the USE insns before the return. */
1779 emit_insn (call_fusage);
1780
1781 /* Return whatever values was restored by jumping directly to the end
1782 of the function. */
1783 expand_naked_return ();
1784 }
1785
1786 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1787
1788 static enum type_class
1789 type_to_class (tree type)
1790 {
1791 switch (TREE_CODE (type))
1792 {
1793 case VOID_TYPE: return void_type_class;
1794 case INTEGER_TYPE: return integer_type_class;
1795 case ENUMERAL_TYPE: return enumeral_type_class;
1796 case BOOLEAN_TYPE: return boolean_type_class;
1797 case POINTER_TYPE: return pointer_type_class;
1798 case REFERENCE_TYPE: return reference_type_class;
1799 case OFFSET_TYPE: return offset_type_class;
1800 case REAL_TYPE: return real_type_class;
1801 case COMPLEX_TYPE: return complex_type_class;
1802 case FUNCTION_TYPE: return function_type_class;
1803 case METHOD_TYPE: return method_type_class;
1804 case RECORD_TYPE: return record_type_class;
1805 case UNION_TYPE:
1806 case QUAL_UNION_TYPE: return union_type_class;
1807 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1808 ? string_type_class : array_type_class);
1809 case LANG_TYPE: return lang_type_class;
1810 default: return no_type_class;
1811 }
1812 }
1813
1814 /* Expand a call EXP to __builtin_classify_type. */
1815
1816 static rtx
1817 expand_builtin_classify_type (tree exp)
1818 {
1819 if (call_expr_nargs (exp))
1820 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1821 return GEN_INT (no_type_class);
1822 }
1823
1824 /* This helper macro, meant to be used in mathfn_built_in below,
1825 determines which among a set of three builtin math functions is
1826 appropriate for a given type mode. The `F' and `L' cases are
1827 automatically generated from the `double' case. */
1828 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1829 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1830 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1831 fcodel = BUILT_IN_MATHFN##L ; break;
1832 /* Similar to above, but appends _R after any F/L suffix. */
1833 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1834 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1835 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1836 fcodel = BUILT_IN_MATHFN##L_R ; break;
1837
1838 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1839 if available. If IMPLICIT is true use the implicit builtin declaration,
1840 otherwise use the explicit declaration. If we can't do the conversion,
1841 return zero. */
1842
1843 static tree
1844 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1845 {
1846 enum built_in_function fcode, fcodef, fcodel, fcode2;
1847
1848 switch (fn)
1849 {
1850 CASE_MATHFN (BUILT_IN_ACOS)
1851 CASE_MATHFN (BUILT_IN_ACOSH)
1852 CASE_MATHFN (BUILT_IN_ASIN)
1853 CASE_MATHFN (BUILT_IN_ASINH)
1854 CASE_MATHFN (BUILT_IN_ATAN)
1855 CASE_MATHFN (BUILT_IN_ATAN2)
1856 CASE_MATHFN (BUILT_IN_ATANH)
1857 CASE_MATHFN (BUILT_IN_CBRT)
1858 CASE_MATHFN (BUILT_IN_CEIL)
1859 CASE_MATHFN (BUILT_IN_CEXPI)
1860 CASE_MATHFN (BUILT_IN_COPYSIGN)
1861 CASE_MATHFN (BUILT_IN_COS)
1862 CASE_MATHFN (BUILT_IN_COSH)
1863 CASE_MATHFN (BUILT_IN_DREM)
1864 CASE_MATHFN (BUILT_IN_ERF)
1865 CASE_MATHFN (BUILT_IN_ERFC)
1866 CASE_MATHFN (BUILT_IN_EXP)
1867 CASE_MATHFN (BUILT_IN_EXP10)
1868 CASE_MATHFN (BUILT_IN_EXP2)
1869 CASE_MATHFN (BUILT_IN_EXPM1)
1870 CASE_MATHFN (BUILT_IN_FABS)
1871 CASE_MATHFN (BUILT_IN_FDIM)
1872 CASE_MATHFN (BUILT_IN_FLOOR)
1873 CASE_MATHFN (BUILT_IN_FMA)
1874 CASE_MATHFN (BUILT_IN_FMAX)
1875 CASE_MATHFN (BUILT_IN_FMIN)
1876 CASE_MATHFN (BUILT_IN_FMOD)
1877 CASE_MATHFN (BUILT_IN_FREXP)
1878 CASE_MATHFN (BUILT_IN_GAMMA)
1879 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1880 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1881 CASE_MATHFN (BUILT_IN_HYPOT)
1882 CASE_MATHFN (BUILT_IN_ILOGB)
1883 CASE_MATHFN (BUILT_IN_ICEIL)
1884 CASE_MATHFN (BUILT_IN_IFLOOR)
1885 CASE_MATHFN (BUILT_IN_INF)
1886 CASE_MATHFN (BUILT_IN_IRINT)
1887 CASE_MATHFN (BUILT_IN_IROUND)
1888 CASE_MATHFN (BUILT_IN_ISINF)
1889 CASE_MATHFN (BUILT_IN_J0)
1890 CASE_MATHFN (BUILT_IN_J1)
1891 CASE_MATHFN (BUILT_IN_JN)
1892 CASE_MATHFN (BUILT_IN_LCEIL)
1893 CASE_MATHFN (BUILT_IN_LDEXP)
1894 CASE_MATHFN (BUILT_IN_LFLOOR)
1895 CASE_MATHFN (BUILT_IN_LGAMMA)
1896 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1897 CASE_MATHFN (BUILT_IN_LLCEIL)
1898 CASE_MATHFN (BUILT_IN_LLFLOOR)
1899 CASE_MATHFN (BUILT_IN_LLRINT)
1900 CASE_MATHFN (BUILT_IN_LLROUND)
1901 CASE_MATHFN (BUILT_IN_LOG)
1902 CASE_MATHFN (BUILT_IN_LOG10)
1903 CASE_MATHFN (BUILT_IN_LOG1P)
1904 CASE_MATHFN (BUILT_IN_LOG2)
1905 CASE_MATHFN (BUILT_IN_LOGB)
1906 CASE_MATHFN (BUILT_IN_LRINT)
1907 CASE_MATHFN (BUILT_IN_LROUND)
1908 CASE_MATHFN (BUILT_IN_MODF)
1909 CASE_MATHFN (BUILT_IN_NAN)
1910 CASE_MATHFN (BUILT_IN_NANS)
1911 CASE_MATHFN (BUILT_IN_NEARBYINT)
1912 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1913 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1914 CASE_MATHFN (BUILT_IN_POW)
1915 CASE_MATHFN (BUILT_IN_POWI)
1916 CASE_MATHFN (BUILT_IN_POW10)
1917 CASE_MATHFN (BUILT_IN_REMAINDER)
1918 CASE_MATHFN (BUILT_IN_REMQUO)
1919 CASE_MATHFN (BUILT_IN_RINT)
1920 CASE_MATHFN (BUILT_IN_ROUND)
1921 CASE_MATHFN (BUILT_IN_SCALB)
1922 CASE_MATHFN (BUILT_IN_SCALBLN)
1923 CASE_MATHFN (BUILT_IN_SCALBN)
1924 CASE_MATHFN (BUILT_IN_SIGNBIT)
1925 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1926 CASE_MATHFN (BUILT_IN_SIN)
1927 CASE_MATHFN (BUILT_IN_SINCOS)
1928 CASE_MATHFN (BUILT_IN_SINH)
1929 CASE_MATHFN (BUILT_IN_SQRT)
1930 CASE_MATHFN (BUILT_IN_TAN)
1931 CASE_MATHFN (BUILT_IN_TANH)
1932 CASE_MATHFN (BUILT_IN_TGAMMA)
1933 CASE_MATHFN (BUILT_IN_TRUNC)
1934 CASE_MATHFN (BUILT_IN_Y0)
1935 CASE_MATHFN (BUILT_IN_Y1)
1936 CASE_MATHFN (BUILT_IN_YN)
1937
1938 default:
1939 return NULL_TREE;
1940 }
1941
1942 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1943 fcode2 = fcode;
1944 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1945 fcode2 = fcodef;
1946 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1947 fcode2 = fcodel;
1948 else
1949 return NULL_TREE;
1950
1951 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1952 return NULL_TREE;
1953
1954 return builtin_decl_explicit (fcode2);
1955 }
1956
1957 /* Like mathfn_built_in_1(), but always use the implicit array. */
1958
1959 tree
1960 mathfn_built_in (tree type, enum built_in_function fn)
1961 {
1962 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1963 }
1964
1965 /* If errno must be maintained, expand the RTL to check if the result,
1966 TARGET, of a built-in function call, EXP, is NaN, and if so set
1967 errno to EDOM. */
1968
1969 static void
1970 expand_errno_check (tree exp, rtx target)
1971 {
1972 rtx_code_label *lab = gen_label_rtx ();
1973
1974 /* Test the result; if it is NaN, set errno=EDOM because
1975 the argument was not in the domain. */
1976 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1977 NULL_RTX, NULL_RTX, lab,
1978 /* The jump is very likely. */
1979 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1980
1981 #ifdef TARGET_EDOM
1982 /* If this built-in doesn't throw an exception, set errno directly. */
1983 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1984 {
1985 #ifdef GEN_ERRNO_RTX
1986 rtx errno_rtx = GEN_ERRNO_RTX;
1987 #else
1988 rtx errno_rtx
1989 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1990 #endif
1991 emit_move_insn (errno_rtx,
1992 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1993 emit_label (lab);
1994 return;
1995 }
1996 #endif
1997
1998 /* Make sure the library call isn't expanded as a tail call. */
1999 CALL_EXPR_TAILCALL (exp) = 0;
2000
2001 /* We can't set errno=EDOM directly; let the library call do it.
2002 Pop the arguments right away in case the call gets deleted. */
2003 NO_DEFER_POP;
2004 expand_call (exp, target, 0);
2005 OK_DEFER_POP;
2006 emit_label (lab);
2007 }
2008
2009 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2010 Return NULL_RTX if a normal call should be emitted rather than expanding
2011 the function in-line. EXP is the expression that is a call to the builtin
2012 function; if convenient, the result should be placed in TARGET.
2013 SUBTARGET may be used as the target for computing one of EXP's operands. */
2014
2015 static rtx
2016 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2017 {
2018 optab builtin_optab;
2019 rtx op0;
2020 rtx_insn *insns;
2021 tree fndecl = get_callee_fndecl (exp);
2022 enum machine_mode mode;
2023 bool errno_set = false;
2024 bool try_widening = false;
2025 tree arg;
2026
2027 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2028 return NULL_RTX;
2029
2030 arg = CALL_EXPR_ARG (exp, 0);
2031
2032 switch (DECL_FUNCTION_CODE (fndecl))
2033 {
2034 CASE_FLT_FN (BUILT_IN_SQRT):
2035 errno_set = ! tree_expr_nonnegative_p (arg);
2036 try_widening = true;
2037 builtin_optab = sqrt_optab;
2038 break;
2039 CASE_FLT_FN (BUILT_IN_EXP):
2040 errno_set = true; builtin_optab = exp_optab; break;
2041 CASE_FLT_FN (BUILT_IN_EXP10):
2042 CASE_FLT_FN (BUILT_IN_POW10):
2043 errno_set = true; builtin_optab = exp10_optab; break;
2044 CASE_FLT_FN (BUILT_IN_EXP2):
2045 errno_set = true; builtin_optab = exp2_optab; break;
2046 CASE_FLT_FN (BUILT_IN_EXPM1):
2047 errno_set = true; builtin_optab = expm1_optab; break;
2048 CASE_FLT_FN (BUILT_IN_LOGB):
2049 errno_set = true; builtin_optab = logb_optab; break;
2050 CASE_FLT_FN (BUILT_IN_LOG):
2051 errno_set = true; builtin_optab = log_optab; break;
2052 CASE_FLT_FN (BUILT_IN_LOG10):
2053 errno_set = true; builtin_optab = log10_optab; break;
2054 CASE_FLT_FN (BUILT_IN_LOG2):
2055 errno_set = true; builtin_optab = log2_optab; break;
2056 CASE_FLT_FN (BUILT_IN_LOG1P):
2057 errno_set = true; builtin_optab = log1p_optab; break;
2058 CASE_FLT_FN (BUILT_IN_ASIN):
2059 builtin_optab = asin_optab; break;
2060 CASE_FLT_FN (BUILT_IN_ACOS):
2061 builtin_optab = acos_optab; break;
2062 CASE_FLT_FN (BUILT_IN_TAN):
2063 builtin_optab = tan_optab; break;
2064 CASE_FLT_FN (BUILT_IN_ATAN):
2065 builtin_optab = atan_optab; break;
2066 CASE_FLT_FN (BUILT_IN_FLOOR):
2067 builtin_optab = floor_optab; break;
2068 CASE_FLT_FN (BUILT_IN_CEIL):
2069 builtin_optab = ceil_optab; break;
2070 CASE_FLT_FN (BUILT_IN_TRUNC):
2071 builtin_optab = btrunc_optab; break;
2072 CASE_FLT_FN (BUILT_IN_ROUND):
2073 builtin_optab = round_optab; break;
2074 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2075 builtin_optab = nearbyint_optab;
2076 if (flag_trapping_math)
2077 break;
2078 /* Else fallthrough and expand as rint. */
2079 CASE_FLT_FN (BUILT_IN_RINT):
2080 builtin_optab = rint_optab; break;
2081 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2082 builtin_optab = significand_optab; break;
2083 default:
2084 gcc_unreachable ();
2085 }
2086
2087 /* Make a suitable register to place result in. */
2088 mode = TYPE_MODE (TREE_TYPE (exp));
2089
2090 if (! flag_errno_math || ! HONOR_NANS (mode))
2091 errno_set = false;
2092
2093 /* Before working hard, check whether the instruction is available, but try
2094 to widen the mode for specific operations. */
2095 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2096 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2097 && (!errno_set || !optimize_insn_for_size_p ()))
2098 {
2099 rtx result = gen_reg_rtx (mode);
2100
2101 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2102 need to expand the argument again. This way, we will not perform
2103 side-effects more the once. */
2104 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2105
2106 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2107
2108 start_sequence ();
2109
2110 /* Compute into RESULT.
2111 Set RESULT to wherever the result comes back. */
2112 result = expand_unop (mode, builtin_optab, op0, result, 0);
2113
2114 if (result != 0)
2115 {
2116 if (errno_set)
2117 expand_errno_check (exp, result);
2118
2119 /* Output the entire sequence. */
2120 insns = get_insns ();
2121 end_sequence ();
2122 emit_insn (insns);
2123 return result;
2124 }
2125
2126 /* If we were unable to expand via the builtin, stop the sequence
2127 (without outputting the insns) and call to the library function
2128 with the stabilized argument list. */
2129 end_sequence ();
2130 }
2131
2132 return expand_call (exp, target, target == const0_rtx);
2133 }
2134
2135 /* Expand a call to the builtin binary math functions (pow and atan2).
2136 Return NULL_RTX if a normal call should be emitted rather than expanding the
2137 function in-line. EXP is the expression that is a call to the builtin
2138 function; if convenient, the result should be placed in TARGET.
2139 SUBTARGET may be used as the target for computing one of EXP's
2140 operands. */
2141
2142 static rtx
2143 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2144 {
2145 optab builtin_optab;
2146 rtx op0, op1, result;
2147 rtx_insn *insns;
2148 int op1_type = REAL_TYPE;
2149 tree fndecl = get_callee_fndecl (exp);
2150 tree arg0, arg1;
2151 enum machine_mode mode;
2152 bool errno_set = true;
2153
2154 switch (DECL_FUNCTION_CODE (fndecl))
2155 {
2156 CASE_FLT_FN (BUILT_IN_SCALBN):
2157 CASE_FLT_FN (BUILT_IN_SCALBLN):
2158 CASE_FLT_FN (BUILT_IN_LDEXP):
2159 op1_type = INTEGER_TYPE;
2160 default:
2161 break;
2162 }
2163
2164 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2165 return NULL_RTX;
2166
2167 arg0 = CALL_EXPR_ARG (exp, 0);
2168 arg1 = CALL_EXPR_ARG (exp, 1);
2169
2170 switch (DECL_FUNCTION_CODE (fndecl))
2171 {
2172 CASE_FLT_FN (BUILT_IN_POW):
2173 builtin_optab = pow_optab; break;
2174 CASE_FLT_FN (BUILT_IN_ATAN2):
2175 builtin_optab = atan2_optab; break;
2176 CASE_FLT_FN (BUILT_IN_SCALB):
2177 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2178 return 0;
2179 builtin_optab = scalb_optab; break;
2180 CASE_FLT_FN (BUILT_IN_SCALBN):
2181 CASE_FLT_FN (BUILT_IN_SCALBLN):
2182 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2183 return 0;
2184 /* Fall through... */
2185 CASE_FLT_FN (BUILT_IN_LDEXP):
2186 builtin_optab = ldexp_optab; break;
2187 CASE_FLT_FN (BUILT_IN_FMOD):
2188 builtin_optab = fmod_optab; break;
2189 CASE_FLT_FN (BUILT_IN_REMAINDER):
2190 CASE_FLT_FN (BUILT_IN_DREM):
2191 builtin_optab = remainder_optab; break;
2192 default:
2193 gcc_unreachable ();
2194 }
2195
2196 /* Make a suitable register to place result in. */
2197 mode = TYPE_MODE (TREE_TYPE (exp));
2198
2199 /* Before working hard, check whether the instruction is available. */
2200 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2201 return NULL_RTX;
2202
2203 result = gen_reg_rtx (mode);
2204
2205 if (! flag_errno_math || ! HONOR_NANS (mode))
2206 errno_set = false;
2207
2208 if (errno_set && optimize_insn_for_size_p ())
2209 return 0;
2210
2211 /* Always stabilize the argument list. */
2212 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2213 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2214
2215 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2216 op1 = expand_normal (arg1);
2217
2218 start_sequence ();
2219
2220 /* Compute into RESULT.
2221 Set RESULT to wherever the result comes back. */
2222 result = expand_binop (mode, builtin_optab, op0, op1,
2223 result, 0, OPTAB_DIRECT);
2224
2225 /* If we were unable to expand via the builtin, stop the sequence
2226 (without outputting the insns) and call to the library function
2227 with the stabilized argument list. */
2228 if (result == 0)
2229 {
2230 end_sequence ();
2231 return expand_call (exp, target, target == const0_rtx);
2232 }
2233
2234 if (errno_set)
2235 expand_errno_check (exp, result);
2236
2237 /* Output the entire sequence. */
2238 insns = get_insns ();
2239 end_sequence ();
2240 emit_insn (insns);
2241
2242 return result;
2243 }
2244
2245 /* Expand a call to the builtin trinary math functions (fma).
2246 Return NULL_RTX if a normal call should be emitted rather than expanding the
2247 function in-line. EXP is the expression that is a call to the builtin
2248 function; if convenient, the result should be placed in TARGET.
2249 SUBTARGET may be used as the target for computing one of EXP's
2250 operands. */
2251
2252 static rtx
2253 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2254 {
2255 optab builtin_optab;
2256 rtx op0, op1, op2, result;
2257 rtx_insn *insns;
2258 tree fndecl = get_callee_fndecl (exp);
2259 tree arg0, arg1, arg2;
2260 enum machine_mode mode;
2261
2262 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2263 return NULL_RTX;
2264
2265 arg0 = CALL_EXPR_ARG (exp, 0);
2266 arg1 = CALL_EXPR_ARG (exp, 1);
2267 arg2 = CALL_EXPR_ARG (exp, 2);
2268
2269 switch (DECL_FUNCTION_CODE (fndecl))
2270 {
2271 CASE_FLT_FN (BUILT_IN_FMA):
2272 builtin_optab = fma_optab; break;
2273 default:
2274 gcc_unreachable ();
2275 }
2276
2277 /* Make a suitable register to place result in. */
2278 mode = TYPE_MODE (TREE_TYPE (exp));
2279
2280 /* Before working hard, check whether the instruction is available. */
2281 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2282 return NULL_RTX;
2283
2284 result = gen_reg_rtx (mode);
2285
2286 /* Always stabilize the argument list. */
2287 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2288 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2289 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2290
2291 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2292 op1 = expand_normal (arg1);
2293 op2 = expand_normal (arg2);
2294
2295 start_sequence ();
2296
2297 /* Compute into RESULT.
2298 Set RESULT to wherever the result comes back. */
2299 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2300 result, 0);
2301
2302 /* If we were unable to expand via the builtin, stop the sequence
2303 (without outputting the insns) and call to the library function
2304 with the stabilized argument list. */
2305 if (result == 0)
2306 {
2307 end_sequence ();
2308 return expand_call (exp, target, target == const0_rtx);
2309 }
2310
2311 /* Output the entire sequence. */
2312 insns = get_insns ();
2313 end_sequence ();
2314 emit_insn (insns);
2315
2316 return result;
2317 }
2318
2319 /* Expand a call to the builtin sin and cos math functions.
2320 Return NULL_RTX if a normal call should be emitted rather than expanding the
2321 function in-line. EXP is the expression that is a call to the builtin
2322 function; if convenient, the result should be placed in TARGET.
2323 SUBTARGET may be used as the target for computing one of EXP's
2324 operands. */
2325
2326 static rtx
2327 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2328 {
2329 optab builtin_optab;
2330 rtx op0;
2331 rtx_insn *insns;
2332 tree fndecl = get_callee_fndecl (exp);
2333 enum machine_mode mode;
2334 tree arg;
2335
2336 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2337 return NULL_RTX;
2338
2339 arg = CALL_EXPR_ARG (exp, 0);
2340
2341 switch (DECL_FUNCTION_CODE (fndecl))
2342 {
2343 CASE_FLT_FN (BUILT_IN_SIN):
2344 CASE_FLT_FN (BUILT_IN_COS):
2345 builtin_optab = sincos_optab; break;
2346 default:
2347 gcc_unreachable ();
2348 }
2349
2350 /* Make a suitable register to place result in. */
2351 mode = TYPE_MODE (TREE_TYPE (exp));
2352
2353 /* Check if sincos insn is available, otherwise fallback
2354 to sin or cos insn. */
2355 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2356 switch (DECL_FUNCTION_CODE (fndecl))
2357 {
2358 CASE_FLT_FN (BUILT_IN_SIN):
2359 builtin_optab = sin_optab; break;
2360 CASE_FLT_FN (BUILT_IN_COS):
2361 builtin_optab = cos_optab; break;
2362 default:
2363 gcc_unreachable ();
2364 }
2365
2366 /* Before working hard, check whether the instruction is available. */
2367 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2368 {
2369 rtx result = gen_reg_rtx (mode);
2370
2371 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2372 need to expand the argument again. This way, we will not perform
2373 side-effects more the once. */
2374 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2375
2376 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2377
2378 start_sequence ();
2379
2380 /* Compute into RESULT.
2381 Set RESULT to wherever the result comes back. */
2382 if (builtin_optab == sincos_optab)
2383 {
2384 int ok;
2385
2386 switch (DECL_FUNCTION_CODE (fndecl))
2387 {
2388 CASE_FLT_FN (BUILT_IN_SIN):
2389 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2390 break;
2391 CASE_FLT_FN (BUILT_IN_COS):
2392 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2393 break;
2394 default:
2395 gcc_unreachable ();
2396 }
2397 gcc_assert (ok);
2398 }
2399 else
2400 result = expand_unop (mode, builtin_optab, op0, result, 0);
2401
2402 if (result != 0)
2403 {
2404 /* Output the entire sequence. */
2405 insns = get_insns ();
2406 end_sequence ();
2407 emit_insn (insns);
2408 return result;
2409 }
2410
2411 /* If we were unable to expand via the builtin, stop the sequence
2412 (without outputting the insns) and call to the library function
2413 with the stabilized argument list. */
2414 end_sequence ();
2415 }
2416
2417 return expand_call (exp, target, target == const0_rtx);
2418 }
2419
2420 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2421 return an RTL instruction code that implements the functionality.
2422 If that isn't possible or available return CODE_FOR_nothing. */
2423
2424 static enum insn_code
2425 interclass_mathfn_icode (tree arg, tree fndecl)
2426 {
2427 bool errno_set = false;
2428 optab builtin_optab = unknown_optab;
2429 enum machine_mode mode;
2430
2431 switch (DECL_FUNCTION_CODE (fndecl))
2432 {
2433 CASE_FLT_FN (BUILT_IN_ILOGB):
2434 errno_set = true; builtin_optab = ilogb_optab; break;
2435 CASE_FLT_FN (BUILT_IN_ISINF):
2436 builtin_optab = isinf_optab; break;
2437 case BUILT_IN_ISNORMAL:
2438 case BUILT_IN_ISFINITE:
2439 CASE_FLT_FN (BUILT_IN_FINITE):
2440 case BUILT_IN_FINITED32:
2441 case BUILT_IN_FINITED64:
2442 case BUILT_IN_FINITED128:
2443 case BUILT_IN_ISINFD32:
2444 case BUILT_IN_ISINFD64:
2445 case BUILT_IN_ISINFD128:
2446 /* These builtins have no optabs (yet). */
2447 break;
2448 default:
2449 gcc_unreachable ();
2450 }
2451
2452 /* There's no easy way to detect the case we need to set EDOM. */
2453 if (flag_errno_math && errno_set)
2454 return CODE_FOR_nothing;
2455
2456 /* Optab mode depends on the mode of the input argument. */
2457 mode = TYPE_MODE (TREE_TYPE (arg));
2458
2459 if (builtin_optab)
2460 return optab_handler (builtin_optab, mode);
2461 return CODE_FOR_nothing;
2462 }
2463
2464 /* Expand a call to one of the builtin math functions that operate on
2465 floating point argument and output an integer result (ilogb, isinf,
2466 isnan, etc).
2467 Return 0 if a normal call should be emitted rather than expanding the
2468 function in-line. EXP is the expression that is a call to the builtin
2469 function; if convenient, the result should be placed in TARGET. */
2470
2471 static rtx
2472 expand_builtin_interclass_mathfn (tree exp, rtx target)
2473 {
2474 enum insn_code icode = CODE_FOR_nothing;
2475 rtx op0;
2476 tree fndecl = get_callee_fndecl (exp);
2477 enum machine_mode mode;
2478 tree arg;
2479
2480 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2481 return NULL_RTX;
2482
2483 arg = CALL_EXPR_ARG (exp, 0);
2484 icode = interclass_mathfn_icode (arg, fndecl);
2485 mode = TYPE_MODE (TREE_TYPE (arg));
2486
2487 if (icode != CODE_FOR_nothing)
2488 {
2489 struct expand_operand ops[1];
2490 rtx_insn *last = get_last_insn ();
2491 tree orig_arg = arg;
2492
2493 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2494 need to expand the argument again. This way, we will not perform
2495 side-effects more the once. */
2496 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2497
2498 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2499
2500 if (mode != GET_MODE (op0))
2501 op0 = convert_to_mode (mode, op0, 0);
2502
2503 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2504 if (maybe_legitimize_operands (icode, 0, 1, ops)
2505 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2506 return ops[0].value;
2507
2508 delete_insns_since (last);
2509 CALL_EXPR_ARG (exp, 0) = orig_arg;
2510 }
2511
2512 return NULL_RTX;
2513 }
2514
2515 /* Expand a call to the builtin sincos math function.
2516 Return NULL_RTX if a normal call should be emitted rather than expanding the
2517 function in-line. EXP is the expression that is a call to the builtin
2518 function. */
2519
2520 static rtx
2521 expand_builtin_sincos (tree exp)
2522 {
2523 rtx op0, op1, op2, target1, target2;
2524 enum machine_mode mode;
2525 tree arg, sinp, cosp;
2526 int result;
2527 location_t loc = EXPR_LOCATION (exp);
2528 tree alias_type, alias_off;
2529
2530 if (!validate_arglist (exp, REAL_TYPE,
2531 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2532 return NULL_RTX;
2533
2534 arg = CALL_EXPR_ARG (exp, 0);
2535 sinp = CALL_EXPR_ARG (exp, 1);
2536 cosp = CALL_EXPR_ARG (exp, 2);
2537
2538 /* Make a suitable register to place result in. */
2539 mode = TYPE_MODE (TREE_TYPE (arg));
2540
2541 /* Check if sincos insn is available, otherwise emit the call. */
2542 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2543 return NULL_RTX;
2544
2545 target1 = gen_reg_rtx (mode);
2546 target2 = gen_reg_rtx (mode);
2547
2548 op0 = expand_normal (arg);
2549 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2550 alias_off = build_int_cst (alias_type, 0);
2551 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2552 sinp, alias_off));
2553 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2554 cosp, alias_off));
2555
2556 /* Compute into target1 and target2.
2557 Set TARGET to wherever the result comes back. */
2558 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2559 gcc_assert (result);
2560
2561 /* Move target1 and target2 to the memory locations indicated
2562 by op1 and op2. */
2563 emit_move_insn (op1, target1);
2564 emit_move_insn (op2, target2);
2565
2566 return const0_rtx;
2567 }
2568
2569 /* Expand a call to the internal cexpi builtin to the sincos math function.
2570 EXP is the expression that is a call to the builtin function; if convenient,
2571 the result should be placed in TARGET. */
2572
2573 static rtx
2574 expand_builtin_cexpi (tree exp, rtx target)
2575 {
2576 tree fndecl = get_callee_fndecl (exp);
2577 tree arg, type;
2578 enum machine_mode mode;
2579 rtx op0, op1, op2;
2580 location_t loc = EXPR_LOCATION (exp);
2581
2582 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2583 return NULL_RTX;
2584
2585 arg = CALL_EXPR_ARG (exp, 0);
2586 type = TREE_TYPE (arg);
2587 mode = TYPE_MODE (TREE_TYPE (arg));
2588
2589 /* Try expanding via a sincos optab, fall back to emitting a libcall
2590 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2591 is only generated from sincos, cexp or if we have either of them. */
2592 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2593 {
2594 op1 = gen_reg_rtx (mode);
2595 op2 = gen_reg_rtx (mode);
2596
2597 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2598
2599 /* Compute into op1 and op2. */
2600 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2601 }
2602 else if (targetm.libc_has_function (function_sincos))
2603 {
2604 tree call, fn = NULL_TREE;
2605 tree top1, top2;
2606 rtx op1a, op2a;
2607
2608 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2609 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2610 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2611 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2612 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2613 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2614 else
2615 gcc_unreachable ();
2616
2617 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2618 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2619 op1a = copy_addr_to_reg (XEXP (op1, 0));
2620 op2a = copy_addr_to_reg (XEXP (op2, 0));
2621 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2622 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2623
2624 /* Make sure not to fold the sincos call again. */
2625 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2626 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2627 call, 3, arg, top1, top2));
2628 }
2629 else
2630 {
2631 tree call, fn = NULL_TREE, narg;
2632 tree ctype = build_complex_type (type);
2633
2634 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2635 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2636 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2637 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2638 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2639 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2640 else
2641 gcc_unreachable ();
2642
2643 /* If we don't have a decl for cexp create one. This is the
2644 friendliest fallback if the user calls __builtin_cexpi
2645 without full target C99 function support. */
2646 if (fn == NULL_TREE)
2647 {
2648 tree fntype;
2649 const char *name = NULL;
2650
2651 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2652 name = "cexpf";
2653 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2654 name = "cexp";
2655 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2656 name = "cexpl";
2657
2658 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2659 fn = build_fn_decl (name, fntype);
2660 }
2661
2662 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2663 build_real (type, dconst0), arg);
2664
2665 /* Make sure not to fold the cexp call again. */
2666 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2667 return expand_expr (build_call_nary (ctype, call, 1, narg),
2668 target, VOIDmode, EXPAND_NORMAL);
2669 }
2670
2671 /* Now build the proper return type. */
2672 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2673 make_tree (TREE_TYPE (arg), op2),
2674 make_tree (TREE_TYPE (arg), op1)),
2675 target, VOIDmode, EXPAND_NORMAL);
2676 }
2677
2678 /* Conveniently construct a function call expression. FNDECL names the
2679 function to be called, N is the number of arguments, and the "..."
2680 parameters are the argument expressions. Unlike build_call_exr
2681 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2682
2683 static tree
2684 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2685 {
2686 va_list ap;
2687 tree fntype = TREE_TYPE (fndecl);
2688 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2689
2690 va_start (ap, n);
2691 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2692 va_end (ap);
2693 SET_EXPR_LOCATION (fn, loc);
2694 return fn;
2695 }
2696
2697 /* Expand a call to one of the builtin rounding functions gcc defines
2698 as an extension (lfloor and lceil). As these are gcc extensions we
2699 do not need to worry about setting errno to EDOM.
2700 If expanding via optab fails, lower expression to (int)(floor(x)).
2701 EXP is the expression that is a call to the builtin function;
2702 if convenient, the result should be placed in TARGET. */
2703
2704 static rtx
2705 expand_builtin_int_roundingfn (tree exp, rtx target)
2706 {
2707 convert_optab builtin_optab;
2708 rtx op0, tmp;
2709 rtx_insn *insns;
2710 tree fndecl = get_callee_fndecl (exp);
2711 enum built_in_function fallback_fn;
2712 tree fallback_fndecl;
2713 enum machine_mode mode;
2714 tree arg;
2715
2716 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2717 gcc_unreachable ();
2718
2719 arg = CALL_EXPR_ARG (exp, 0);
2720
2721 switch (DECL_FUNCTION_CODE (fndecl))
2722 {
2723 CASE_FLT_FN (BUILT_IN_ICEIL):
2724 CASE_FLT_FN (BUILT_IN_LCEIL):
2725 CASE_FLT_FN (BUILT_IN_LLCEIL):
2726 builtin_optab = lceil_optab;
2727 fallback_fn = BUILT_IN_CEIL;
2728 break;
2729
2730 CASE_FLT_FN (BUILT_IN_IFLOOR):
2731 CASE_FLT_FN (BUILT_IN_LFLOOR):
2732 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2733 builtin_optab = lfloor_optab;
2734 fallback_fn = BUILT_IN_FLOOR;
2735 break;
2736
2737 default:
2738 gcc_unreachable ();
2739 }
2740
2741 /* Make a suitable register to place result in. */
2742 mode = TYPE_MODE (TREE_TYPE (exp));
2743
2744 target = gen_reg_rtx (mode);
2745
2746 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2747 need to expand the argument again. This way, we will not perform
2748 side-effects more the once. */
2749 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2750
2751 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2752
2753 start_sequence ();
2754
2755 /* Compute into TARGET. */
2756 if (expand_sfix_optab (target, op0, builtin_optab))
2757 {
2758 /* Output the entire sequence. */
2759 insns = get_insns ();
2760 end_sequence ();
2761 emit_insn (insns);
2762 return target;
2763 }
2764
2765 /* If we were unable to expand via the builtin, stop the sequence
2766 (without outputting the insns). */
2767 end_sequence ();
2768
2769 /* Fall back to floating point rounding optab. */
2770 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2771
2772 /* For non-C99 targets we may end up without a fallback fndecl here
2773 if the user called __builtin_lfloor directly. In this case emit
2774 a call to the floor/ceil variants nevertheless. This should result
2775 in the best user experience for not full C99 targets. */
2776 if (fallback_fndecl == NULL_TREE)
2777 {
2778 tree fntype;
2779 const char *name = NULL;
2780
2781 switch (DECL_FUNCTION_CODE (fndecl))
2782 {
2783 case BUILT_IN_ICEIL:
2784 case BUILT_IN_LCEIL:
2785 case BUILT_IN_LLCEIL:
2786 name = "ceil";
2787 break;
2788 case BUILT_IN_ICEILF:
2789 case BUILT_IN_LCEILF:
2790 case BUILT_IN_LLCEILF:
2791 name = "ceilf";
2792 break;
2793 case BUILT_IN_ICEILL:
2794 case BUILT_IN_LCEILL:
2795 case BUILT_IN_LLCEILL:
2796 name = "ceill";
2797 break;
2798 case BUILT_IN_IFLOOR:
2799 case BUILT_IN_LFLOOR:
2800 case BUILT_IN_LLFLOOR:
2801 name = "floor";
2802 break;
2803 case BUILT_IN_IFLOORF:
2804 case BUILT_IN_LFLOORF:
2805 case BUILT_IN_LLFLOORF:
2806 name = "floorf";
2807 break;
2808 case BUILT_IN_IFLOORL:
2809 case BUILT_IN_LFLOORL:
2810 case BUILT_IN_LLFLOORL:
2811 name = "floorl";
2812 break;
2813 default:
2814 gcc_unreachable ();
2815 }
2816
2817 fntype = build_function_type_list (TREE_TYPE (arg),
2818 TREE_TYPE (arg), NULL_TREE);
2819 fallback_fndecl = build_fn_decl (name, fntype);
2820 }
2821
2822 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2823
2824 tmp = expand_normal (exp);
2825 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2826
2827 /* Truncate the result of floating point optab to integer
2828 via expand_fix (). */
2829 target = gen_reg_rtx (mode);
2830 expand_fix (target, tmp, 0);
2831
2832 return target;
2833 }
2834
2835 /* Expand a call to one of the builtin math functions doing integer
2836 conversion (lrint).
2837 Return 0 if a normal call should be emitted rather than expanding the
2838 function in-line. EXP is the expression that is a call to the builtin
2839 function; if convenient, the result should be placed in TARGET. */
2840
2841 static rtx
2842 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2843 {
2844 convert_optab builtin_optab;
2845 rtx op0;
2846 rtx_insn *insns;
2847 tree fndecl = get_callee_fndecl (exp);
2848 tree arg;
2849 enum machine_mode mode;
2850 enum built_in_function fallback_fn = BUILT_IN_NONE;
2851
2852 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2853 gcc_unreachable ();
2854
2855 arg = CALL_EXPR_ARG (exp, 0);
2856
2857 switch (DECL_FUNCTION_CODE (fndecl))
2858 {
2859 CASE_FLT_FN (BUILT_IN_IRINT):
2860 fallback_fn = BUILT_IN_LRINT;
2861 /* FALLTHRU */
2862 CASE_FLT_FN (BUILT_IN_LRINT):
2863 CASE_FLT_FN (BUILT_IN_LLRINT):
2864 builtin_optab = lrint_optab;
2865 break;
2866
2867 CASE_FLT_FN (BUILT_IN_IROUND):
2868 fallback_fn = BUILT_IN_LROUND;
2869 /* FALLTHRU */
2870 CASE_FLT_FN (BUILT_IN_LROUND):
2871 CASE_FLT_FN (BUILT_IN_LLROUND):
2872 builtin_optab = lround_optab;
2873 break;
2874
2875 default:
2876 gcc_unreachable ();
2877 }
2878
2879 /* There's no easy way to detect the case we need to set EDOM. */
2880 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2881 return NULL_RTX;
2882
2883 /* Make a suitable register to place result in. */
2884 mode = TYPE_MODE (TREE_TYPE (exp));
2885
2886 /* There's no easy way to detect the case we need to set EDOM. */
2887 if (!flag_errno_math)
2888 {
2889 rtx result = gen_reg_rtx (mode);
2890
2891 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2892 need to expand the argument again. This way, we will not perform
2893 side-effects more the once. */
2894 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2895
2896 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2897
2898 start_sequence ();
2899
2900 if (expand_sfix_optab (result, op0, builtin_optab))
2901 {
2902 /* Output the entire sequence. */
2903 insns = get_insns ();
2904 end_sequence ();
2905 emit_insn (insns);
2906 return result;
2907 }
2908
2909 /* If we were unable to expand via the builtin, stop the sequence
2910 (without outputting the insns) and call to the library function
2911 with the stabilized argument list. */
2912 end_sequence ();
2913 }
2914
2915 if (fallback_fn != BUILT_IN_NONE)
2916 {
2917 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2918 targets, (int) round (x) should never be transformed into
2919 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2920 a call to lround in the hope that the target provides at least some
2921 C99 functions. This should result in the best user experience for
2922 not full C99 targets. */
2923 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2924 fallback_fn, 0);
2925
2926 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2927 fallback_fndecl, 1, arg);
2928
2929 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2930 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2931 return convert_to_mode (mode, target, 0);
2932 }
2933
2934 return expand_call (exp, target, target == const0_rtx);
2935 }
2936
2937 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2938 a normal call should be emitted rather than expanding the function
2939 in-line. EXP is the expression that is a call to the builtin
2940 function; if convenient, the result should be placed in TARGET. */
2941
2942 static rtx
2943 expand_builtin_powi (tree exp, rtx target)
2944 {
2945 tree arg0, arg1;
2946 rtx op0, op1;
2947 enum machine_mode mode;
2948 enum machine_mode mode2;
2949
2950 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2951 return NULL_RTX;
2952
2953 arg0 = CALL_EXPR_ARG (exp, 0);
2954 arg1 = CALL_EXPR_ARG (exp, 1);
2955 mode = TYPE_MODE (TREE_TYPE (exp));
2956
2957 /* Emit a libcall to libgcc. */
2958
2959 /* Mode of the 2nd argument must match that of an int. */
2960 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2961
2962 if (target == NULL_RTX)
2963 target = gen_reg_rtx (mode);
2964
2965 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2966 if (GET_MODE (op0) != mode)
2967 op0 = convert_to_mode (mode, op0, 0);
2968 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2969 if (GET_MODE (op1) != mode2)
2970 op1 = convert_to_mode (mode2, op1, 0);
2971
2972 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2973 target, LCT_CONST, mode, 2,
2974 op0, mode, op1, mode2);
2975
2976 return target;
2977 }
2978
2979 /* Expand expression EXP which is a call to the strlen builtin. Return
2980 NULL_RTX if we failed the caller should emit a normal call, otherwise
2981 try to get the result in TARGET, if convenient. */
2982
2983 static rtx
2984 expand_builtin_strlen (tree exp, rtx target,
2985 enum machine_mode target_mode)
2986 {
2987 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2988 return NULL_RTX;
2989 else
2990 {
2991 struct expand_operand ops[4];
2992 rtx pat;
2993 tree len;
2994 tree src = CALL_EXPR_ARG (exp, 0);
2995 rtx src_reg;
2996 rtx_insn *before_strlen;
2997 enum machine_mode insn_mode = target_mode;
2998 enum insn_code icode = CODE_FOR_nothing;
2999 unsigned int align;
3000
3001 /* If the length can be computed at compile-time, return it. */
3002 len = c_strlen (src, 0);
3003 if (len)
3004 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3005
3006 /* If the length can be computed at compile-time and is constant
3007 integer, but there are side-effects in src, evaluate
3008 src for side-effects, then return len.
3009 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3010 can be optimized into: i++; x = 3; */
3011 len = c_strlen (src, 1);
3012 if (len && TREE_CODE (len) == INTEGER_CST)
3013 {
3014 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3015 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3016 }
3017
3018 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3019
3020 /* If SRC is not a pointer type, don't do this operation inline. */
3021 if (align == 0)
3022 return NULL_RTX;
3023
3024 /* Bail out if we can't compute strlen in the right mode. */
3025 while (insn_mode != VOIDmode)
3026 {
3027 icode = optab_handler (strlen_optab, insn_mode);
3028 if (icode != CODE_FOR_nothing)
3029 break;
3030
3031 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3032 }
3033 if (insn_mode == VOIDmode)
3034 return NULL_RTX;
3035
3036 /* Make a place to hold the source address. We will not expand
3037 the actual source until we are sure that the expansion will
3038 not fail -- there are trees that cannot be expanded twice. */
3039 src_reg = gen_reg_rtx (Pmode);
3040
3041 /* Mark the beginning of the strlen sequence so we can emit the
3042 source operand later. */
3043 before_strlen = get_last_insn ();
3044
3045 create_output_operand (&ops[0], target, insn_mode);
3046 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3047 create_integer_operand (&ops[2], 0);
3048 create_integer_operand (&ops[3], align);
3049 if (!maybe_expand_insn (icode, 4, ops))
3050 return NULL_RTX;
3051
3052 /* Now that we are assured of success, expand the source. */
3053 start_sequence ();
3054 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3055 if (pat != src_reg)
3056 {
3057 #ifdef POINTERS_EXTEND_UNSIGNED
3058 if (GET_MODE (pat) != Pmode)
3059 pat = convert_to_mode (Pmode, pat,
3060 POINTERS_EXTEND_UNSIGNED);
3061 #endif
3062 emit_move_insn (src_reg, pat);
3063 }
3064 pat = get_insns ();
3065 end_sequence ();
3066
3067 if (before_strlen)
3068 emit_insn_after (pat, before_strlen);
3069 else
3070 emit_insn_before (pat, get_insns ());
3071
3072 /* Return the value in the proper mode for this function. */
3073 if (GET_MODE (ops[0].value) == target_mode)
3074 target = ops[0].value;
3075 else if (target != 0)
3076 convert_move (target, ops[0].value, 0);
3077 else
3078 target = convert_to_mode (target_mode, ops[0].value, 0);
3079
3080 return target;
3081 }
3082 }
3083
3084 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3085 bytes from constant string DATA + OFFSET and return it as target
3086 constant. */
3087
3088 static rtx
3089 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3090 enum machine_mode mode)
3091 {
3092 const char *str = (const char *) data;
3093
3094 gcc_assert (offset >= 0
3095 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3096 <= strlen (str) + 1));
3097
3098 return c_readstr (str + offset, mode);
3099 }
3100
3101 /* LEN specify length of the block of memcpy/memset operation.
3102 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3103 In some cases we can make very likely guess on max size, then we
3104 set it into PROBABLE_MAX_SIZE. */
3105
3106 static void
3107 determine_block_size (tree len, rtx len_rtx,
3108 unsigned HOST_WIDE_INT *min_size,
3109 unsigned HOST_WIDE_INT *max_size,
3110 unsigned HOST_WIDE_INT *probable_max_size)
3111 {
3112 if (CONST_INT_P (len_rtx))
3113 {
3114 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3115 return;
3116 }
3117 else
3118 {
3119 wide_int min, max;
3120 enum value_range_type range_type = VR_UNDEFINED;
3121
3122 /* Determine bounds from the type. */
3123 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3124 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3125 else
3126 *min_size = 0;
3127 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3128 *probable_max_size = *max_size
3129 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3130 else
3131 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3132
3133 if (TREE_CODE (len) == SSA_NAME)
3134 range_type = get_range_info (len, &min, &max);
3135 if (range_type == VR_RANGE)
3136 {
3137 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3138 *min_size = min.to_uhwi ();
3139 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3140 *probable_max_size = *max_size = max.to_uhwi ();
3141 }
3142 else if (range_type == VR_ANTI_RANGE)
3143 {
3144 /* Anti range 0...N lets us to determine minimal size to N+1. */
3145 if (min == 0)
3146 {
3147 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3148 *min_size = max.to_uhwi () + 1;
3149 }
3150 /* Code like
3151
3152 int n;
3153 if (n < 100)
3154 memcpy (a, b, n)
3155
3156 Produce anti range allowing negative values of N. We still
3157 can use the information and make a guess that N is not negative.
3158 */
3159 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3160 *probable_max_size = min.to_uhwi () - 1;
3161 }
3162 }
3163 gcc_checking_assert (*max_size <=
3164 (unsigned HOST_WIDE_INT)
3165 GET_MODE_MASK (GET_MODE (len_rtx)));
3166 }
3167
3168 /* Expand a call EXP to the memcpy builtin.
3169 Return NULL_RTX if we failed, the caller should emit a normal call,
3170 otherwise try to get the result in TARGET, if convenient (and in
3171 mode MODE if that's convenient). */
3172
3173 static rtx
3174 expand_builtin_memcpy (tree exp, rtx target)
3175 {
3176 if (!validate_arglist (exp,
3177 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3178 return NULL_RTX;
3179 else
3180 {
3181 tree dest = CALL_EXPR_ARG (exp, 0);
3182 tree src = CALL_EXPR_ARG (exp, 1);
3183 tree len = CALL_EXPR_ARG (exp, 2);
3184 const char *src_str;
3185 unsigned int src_align = get_pointer_alignment (src);
3186 unsigned int dest_align = get_pointer_alignment (dest);
3187 rtx dest_mem, src_mem, dest_addr, len_rtx;
3188 HOST_WIDE_INT expected_size = -1;
3189 unsigned int expected_align = 0;
3190 unsigned HOST_WIDE_INT min_size;
3191 unsigned HOST_WIDE_INT max_size;
3192 unsigned HOST_WIDE_INT probable_max_size;
3193
3194 /* If DEST is not a pointer type, call the normal function. */
3195 if (dest_align == 0)
3196 return NULL_RTX;
3197
3198 /* If either SRC is not a pointer type, don't do this
3199 operation in-line. */
3200 if (src_align == 0)
3201 return NULL_RTX;
3202
3203 if (currently_expanding_gimple_stmt)
3204 stringop_block_profile (currently_expanding_gimple_stmt,
3205 &expected_align, &expected_size);
3206
3207 if (expected_align < dest_align)
3208 expected_align = dest_align;
3209 dest_mem = get_memory_rtx (dest, len);
3210 set_mem_align (dest_mem, dest_align);
3211 len_rtx = expand_normal (len);
3212 determine_block_size (len, len_rtx, &min_size, &max_size,
3213 &probable_max_size);
3214 src_str = c_getstr (src);
3215
3216 /* If SRC is a string constant and block move would be done
3217 by pieces, we can avoid loading the string from memory
3218 and only stored the computed constants. */
3219 if (src_str
3220 && CONST_INT_P (len_rtx)
3221 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3222 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3223 CONST_CAST (char *, src_str),
3224 dest_align, false))
3225 {
3226 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3227 builtin_memcpy_read_str,
3228 CONST_CAST (char *, src_str),
3229 dest_align, false, 0);
3230 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3231 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3232 return dest_mem;
3233 }
3234
3235 src_mem = get_memory_rtx (src, len);
3236 set_mem_align (src_mem, src_align);
3237
3238 /* Copy word part most expediently. */
3239 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3240 CALL_EXPR_TAILCALL (exp)
3241 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3242 expected_align, expected_size,
3243 min_size, max_size, probable_max_size);
3244
3245 if (dest_addr == 0)
3246 {
3247 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3248 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3249 }
3250 return dest_addr;
3251 }
3252 }
3253
3254 /* Expand a call EXP to the mempcpy builtin.
3255 Return NULL_RTX if we failed; the caller should emit a normal call,
3256 otherwise try to get the result in TARGET, if convenient (and in
3257 mode MODE if that's convenient). If ENDP is 0 return the
3258 destination pointer, if ENDP is 1 return the end pointer ala
3259 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3260 stpcpy. */
3261
3262 static rtx
3263 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3264 {
3265 if (!validate_arglist (exp,
3266 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3267 return NULL_RTX;
3268 else
3269 {
3270 tree dest = CALL_EXPR_ARG (exp, 0);
3271 tree src = CALL_EXPR_ARG (exp, 1);
3272 tree len = CALL_EXPR_ARG (exp, 2);
3273 return expand_builtin_mempcpy_args (dest, src, len,
3274 target, mode, /*endp=*/ 1);
3275 }
3276 }
3277
3278 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3279 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3280 so that this can also be called without constructing an actual CALL_EXPR.
3281 The other arguments and return value are the same as for
3282 expand_builtin_mempcpy. */
3283
3284 static rtx
3285 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3286 rtx target, enum machine_mode mode, int endp)
3287 {
3288 /* If return value is ignored, transform mempcpy into memcpy. */
3289 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3290 {
3291 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3292 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3293 dest, src, len);
3294 return expand_expr (result, target, mode, EXPAND_NORMAL);
3295 }
3296 else
3297 {
3298 const char *src_str;
3299 unsigned int src_align = get_pointer_alignment (src);
3300 unsigned int dest_align = get_pointer_alignment (dest);
3301 rtx dest_mem, src_mem, len_rtx;
3302
3303 /* If either SRC or DEST is not a pointer type, don't do this
3304 operation in-line. */
3305 if (dest_align == 0 || src_align == 0)
3306 return NULL_RTX;
3307
3308 /* If LEN is not constant, call the normal function. */
3309 if (! tree_fits_uhwi_p (len))
3310 return NULL_RTX;
3311
3312 len_rtx = expand_normal (len);
3313 src_str = c_getstr (src);
3314
3315 /* If SRC is a string constant and block move would be done
3316 by pieces, we can avoid loading the string from memory
3317 and only stored the computed constants. */
3318 if (src_str
3319 && CONST_INT_P (len_rtx)
3320 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3321 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3322 CONST_CAST (char *, src_str),
3323 dest_align, false))
3324 {
3325 dest_mem = get_memory_rtx (dest, len);
3326 set_mem_align (dest_mem, dest_align);
3327 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3328 builtin_memcpy_read_str,
3329 CONST_CAST (char *, src_str),
3330 dest_align, false, endp);
3331 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3332 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3333 return dest_mem;
3334 }
3335
3336 if (CONST_INT_P (len_rtx)
3337 && can_move_by_pieces (INTVAL (len_rtx),
3338 MIN (dest_align, src_align)))
3339 {
3340 dest_mem = get_memory_rtx (dest, len);
3341 set_mem_align (dest_mem, dest_align);
3342 src_mem = get_memory_rtx (src, len);
3343 set_mem_align (src_mem, src_align);
3344 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3345 MIN (dest_align, src_align), endp);
3346 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3347 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3348 return dest_mem;
3349 }
3350
3351 return NULL_RTX;
3352 }
3353 }
3354
3355 #ifndef HAVE_movstr
3356 # define HAVE_movstr 0
3357 # define CODE_FOR_movstr CODE_FOR_nothing
3358 #endif
3359
3360 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3361 we failed, the caller should emit a normal call, otherwise try to
3362 get the result in TARGET, if convenient. If ENDP is 0 return the
3363 destination pointer, if ENDP is 1 return the end pointer ala
3364 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3365 stpcpy. */
3366
3367 static rtx
3368 expand_movstr (tree dest, tree src, rtx target, int endp)
3369 {
3370 struct expand_operand ops[3];
3371 rtx dest_mem;
3372 rtx src_mem;
3373
3374 if (!HAVE_movstr)
3375 return NULL_RTX;
3376
3377 dest_mem = get_memory_rtx (dest, NULL);
3378 src_mem = get_memory_rtx (src, NULL);
3379 if (!endp)
3380 {
3381 target = force_reg (Pmode, XEXP (dest_mem, 0));
3382 dest_mem = replace_equiv_address (dest_mem, target);
3383 }
3384
3385 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3386 create_fixed_operand (&ops[1], dest_mem);
3387 create_fixed_operand (&ops[2], src_mem);
3388 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3389 return NULL_RTX;
3390
3391 if (endp && target != const0_rtx)
3392 {
3393 target = ops[0].value;
3394 /* movstr is supposed to set end to the address of the NUL
3395 terminator. If the caller requested a mempcpy-like return value,
3396 adjust it. */
3397 if (endp == 1)
3398 {
3399 rtx tem = plus_constant (GET_MODE (target),
3400 gen_lowpart (GET_MODE (target), target), 1);
3401 emit_move_insn (target, force_operand (tem, NULL_RTX));
3402 }
3403 }
3404 return target;
3405 }
3406
3407 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3408 NULL_RTX if we failed the caller should emit a normal call, otherwise
3409 try to get the result in TARGET, if convenient (and in mode MODE if that's
3410 convenient). */
3411
3412 static rtx
3413 expand_builtin_strcpy (tree exp, rtx target)
3414 {
3415 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3416 {
3417 tree dest = CALL_EXPR_ARG (exp, 0);
3418 tree src = CALL_EXPR_ARG (exp, 1);
3419 return expand_builtin_strcpy_args (dest, src, target);
3420 }
3421 return NULL_RTX;
3422 }
3423
3424 /* Helper function to do the actual work for expand_builtin_strcpy. The
3425 arguments to the builtin_strcpy call DEST and SRC are broken out
3426 so that this can also be called without constructing an actual CALL_EXPR.
3427 The other arguments and return value are the same as for
3428 expand_builtin_strcpy. */
3429
3430 static rtx
3431 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3432 {
3433 return expand_movstr (dest, src, target, /*endp=*/0);
3434 }
3435
3436 /* Expand a call EXP to the stpcpy builtin.
3437 Return NULL_RTX if we failed the caller should emit a normal call,
3438 otherwise try to get the result in TARGET, if convenient (and in
3439 mode MODE if that's convenient). */
3440
3441 static rtx
3442 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3443 {
3444 tree dst, src;
3445 location_t loc = EXPR_LOCATION (exp);
3446
3447 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3448 return NULL_RTX;
3449
3450 dst = CALL_EXPR_ARG (exp, 0);
3451 src = CALL_EXPR_ARG (exp, 1);
3452
3453 /* If return value is ignored, transform stpcpy into strcpy. */
3454 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3455 {
3456 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3457 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3458 return expand_expr (result, target, mode, EXPAND_NORMAL);
3459 }
3460 else
3461 {
3462 tree len, lenp1;
3463 rtx ret;
3464
3465 /* Ensure we get an actual string whose length can be evaluated at
3466 compile-time, not an expression containing a string. This is
3467 because the latter will potentially produce pessimized code
3468 when used to produce the return value. */
3469 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3470 return expand_movstr (dst, src, target, /*endp=*/2);
3471
3472 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3473 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3474 target, mode, /*endp=*/2);
3475
3476 if (ret)
3477 return ret;
3478
3479 if (TREE_CODE (len) == INTEGER_CST)
3480 {
3481 rtx len_rtx = expand_normal (len);
3482
3483 if (CONST_INT_P (len_rtx))
3484 {
3485 ret = expand_builtin_strcpy_args (dst, src, target);
3486
3487 if (ret)
3488 {
3489 if (! target)
3490 {
3491 if (mode != VOIDmode)
3492 target = gen_reg_rtx (mode);
3493 else
3494 target = gen_reg_rtx (GET_MODE (ret));
3495 }
3496 if (GET_MODE (target) != GET_MODE (ret))
3497 ret = gen_lowpart (GET_MODE (target), ret);
3498
3499 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3500 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3501 gcc_assert (ret);
3502
3503 return target;
3504 }
3505 }
3506 }
3507
3508 return expand_movstr (dst, src, target, /*endp=*/2);
3509 }
3510 }
3511
3512 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3513 bytes from constant string DATA + OFFSET and return it as target
3514 constant. */
3515
3516 rtx
3517 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3518 enum machine_mode mode)
3519 {
3520 const char *str = (const char *) data;
3521
3522 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3523 return const0_rtx;
3524
3525 return c_readstr (str + offset, mode);
3526 }
3527
3528 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3529 NULL_RTX if we failed the caller should emit a normal call. */
3530
3531 static rtx
3532 expand_builtin_strncpy (tree exp, rtx target)
3533 {
3534 location_t loc = EXPR_LOCATION (exp);
3535
3536 if (validate_arglist (exp,
3537 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3538 {
3539 tree dest = CALL_EXPR_ARG (exp, 0);
3540 tree src = CALL_EXPR_ARG (exp, 1);
3541 tree len = CALL_EXPR_ARG (exp, 2);
3542 tree slen = c_strlen (src, 1);
3543
3544 /* We must be passed a constant len and src parameter. */
3545 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3546 return NULL_RTX;
3547
3548 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3549
3550 /* We're required to pad with trailing zeros if the requested
3551 len is greater than strlen(s2)+1. In that case try to
3552 use store_by_pieces, if it fails, punt. */
3553 if (tree_int_cst_lt (slen, len))
3554 {
3555 unsigned int dest_align = get_pointer_alignment (dest);
3556 const char *p = c_getstr (src);
3557 rtx dest_mem;
3558
3559 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3560 || !can_store_by_pieces (tree_to_uhwi (len),
3561 builtin_strncpy_read_str,
3562 CONST_CAST (char *, p),
3563 dest_align, false))
3564 return NULL_RTX;
3565
3566 dest_mem = get_memory_rtx (dest, len);
3567 store_by_pieces (dest_mem, tree_to_uhwi (len),
3568 builtin_strncpy_read_str,
3569 CONST_CAST (char *, p), dest_align, false, 0);
3570 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3571 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3572 return dest_mem;
3573 }
3574 }
3575 return NULL_RTX;
3576 }
3577
3578 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3579 bytes from constant string DATA + OFFSET and return it as target
3580 constant. */
3581
3582 rtx
3583 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3584 enum machine_mode mode)
3585 {
3586 const char *c = (const char *) data;
3587 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3588
3589 memset (p, *c, GET_MODE_SIZE (mode));
3590
3591 return c_readstr (p, mode);
3592 }
3593
3594 /* Callback routine for store_by_pieces. Return the RTL of a register
3595 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3596 char value given in the RTL register data. For example, if mode is
3597 4 bytes wide, return the RTL for 0x01010101*data. */
3598
3599 static rtx
3600 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3601 enum machine_mode mode)
3602 {
3603 rtx target, coeff;
3604 size_t size;
3605 char *p;
3606
3607 size = GET_MODE_SIZE (mode);
3608 if (size == 1)
3609 return (rtx) data;
3610
3611 p = XALLOCAVEC (char, size);
3612 memset (p, 1, size);
3613 coeff = c_readstr (p, mode);
3614
3615 target = convert_to_mode (mode, (rtx) data, 1);
3616 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3617 return force_reg (mode, target);
3618 }
3619
3620 /* Expand expression EXP, which is a call to the memset builtin. Return
3621 NULL_RTX if we failed the caller should emit a normal call, otherwise
3622 try to get the result in TARGET, if convenient (and in mode MODE if that's
3623 convenient). */
3624
3625 static rtx
3626 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3627 {
3628 if (!validate_arglist (exp,
3629 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3630 return NULL_RTX;
3631 else
3632 {
3633 tree dest = CALL_EXPR_ARG (exp, 0);
3634 tree val = CALL_EXPR_ARG (exp, 1);
3635 tree len = CALL_EXPR_ARG (exp, 2);
3636 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3637 }
3638 }
3639
3640 /* Helper function to do the actual work for expand_builtin_memset. The
3641 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3642 so that this can also be called without constructing an actual CALL_EXPR.
3643 The other arguments and return value are the same as for
3644 expand_builtin_memset. */
3645
3646 static rtx
3647 expand_builtin_memset_args (tree dest, tree val, tree len,
3648 rtx target, enum machine_mode mode, tree orig_exp)
3649 {
3650 tree fndecl, fn;
3651 enum built_in_function fcode;
3652 enum machine_mode val_mode;
3653 char c;
3654 unsigned int dest_align;
3655 rtx dest_mem, dest_addr, len_rtx;
3656 HOST_WIDE_INT expected_size = -1;
3657 unsigned int expected_align = 0;
3658 unsigned HOST_WIDE_INT min_size;
3659 unsigned HOST_WIDE_INT max_size;
3660 unsigned HOST_WIDE_INT probable_max_size;
3661
3662 dest_align = get_pointer_alignment (dest);
3663
3664 /* If DEST is not a pointer type, don't do this operation in-line. */
3665 if (dest_align == 0)
3666 return NULL_RTX;
3667
3668 if (currently_expanding_gimple_stmt)
3669 stringop_block_profile (currently_expanding_gimple_stmt,
3670 &expected_align, &expected_size);
3671
3672 if (expected_align < dest_align)
3673 expected_align = dest_align;
3674
3675 /* If the LEN parameter is zero, return DEST. */
3676 if (integer_zerop (len))
3677 {
3678 /* Evaluate and ignore VAL in case it has side-effects. */
3679 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3680 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3681 }
3682
3683 /* Stabilize the arguments in case we fail. */
3684 dest = builtin_save_expr (dest);
3685 val = builtin_save_expr (val);
3686 len = builtin_save_expr (len);
3687
3688 len_rtx = expand_normal (len);
3689 determine_block_size (len, len_rtx, &min_size, &max_size,
3690 &probable_max_size);
3691 dest_mem = get_memory_rtx (dest, len);
3692 val_mode = TYPE_MODE (unsigned_char_type_node);
3693
3694 if (TREE_CODE (val) != INTEGER_CST)
3695 {
3696 rtx val_rtx;
3697
3698 val_rtx = expand_normal (val);
3699 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3700
3701 /* Assume that we can memset by pieces if we can store
3702 * the coefficients by pieces (in the required modes).
3703 * We can't pass builtin_memset_gen_str as that emits RTL. */
3704 c = 1;
3705 if (tree_fits_uhwi_p (len)
3706 && can_store_by_pieces (tree_to_uhwi (len),
3707 builtin_memset_read_str, &c, dest_align,
3708 true))
3709 {
3710 val_rtx = force_reg (val_mode, val_rtx);
3711 store_by_pieces (dest_mem, tree_to_uhwi (len),
3712 builtin_memset_gen_str, val_rtx, dest_align,
3713 true, 0);
3714 }
3715 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3716 dest_align, expected_align,
3717 expected_size, min_size, max_size,
3718 probable_max_size))
3719 goto do_libcall;
3720
3721 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3722 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3723 return dest_mem;
3724 }
3725
3726 if (target_char_cast (val, &c))
3727 goto do_libcall;
3728
3729 if (c)
3730 {
3731 if (tree_fits_uhwi_p (len)
3732 && can_store_by_pieces (tree_to_uhwi (len),
3733 builtin_memset_read_str, &c, dest_align,
3734 true))
3735 store_by_pieces (dest_mem, tree_to_uhwi (len),
3736 builtin_memset_read_str, &c, dest_align, true, 0);
3737 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3738 gen_int_mode (c, val_mode),
3739 dest_align, expected_align,
3740 expected_size, min_size, max_size,
3741 probable_max_size))
3742 goto do_libcall;
3743
3744 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3745 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3746 return dest_mem;
3747 }
3748
3749 set_mem_align (dest_mem, dest_align);
3750 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3751 CALL_EXPR_TAILCALL (orig_exp)
3752 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3753 expected_align, expected_size,
3754 min_size, max_size,
3755 probable_max_size);
3756
3757 if (dest_addr == 0)
3758 {
3759 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3760 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3761 }
3762
3763 return dest_addr;
3764
3765 do_libcall:
3766 fndecl = get_callee_fndecl (orig_exp);
3767 fcode = DECL_FUNCTION_CODE (fndecl);
3768 if (fcode == BUILT_IN_MEMSET)
3769 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3770 dest, val, len);
3771 else if (fcode == BUILT_IN_BZERO)
3772 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3773 dest, len);
3774 else
3775 gcc_unreachable ();
3776 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3777 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3778 return expand_call (fn, target, target == const0_rtx);
3779 }
3780
3781 /* Expand expression EXP, which is a call to the bzero builtin. Return
3782 NULL_RTX if we failed the caller should emit a normal call. */
3783
3784 static rtx
3785 expand_builtin_bzero (tree exp)
3786 {
3787 tree dest, size;
3788 location_t loc = EXPR_LOCATION (exp);
3789
3790 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3791 return NULL_RTX;
3792
3793 dest = CALL_EXPR_ARG (exp, 0);
3794 size = CALL_EXPR_ARG (exp, 1);
3795
3796 /* New argument list transforming bzero(ptr x, int y) to
3797 memset(ptr x, int 0, size_t y). This is done this way
3798 so that if it isn't expanded inline, we fallback to
3799 calling bzero instead of memset. */
3800
3801 return expand_builtin_memset_args (dest, integer_zero_node,
3802 fold_convert_loc (loc,
3803 size_type_node, size),
3804 const0_rtx, VOIDmode, exp);
3805 }
3806
3807 /* Expand expression EXP, which is a call to the memcmp built-in function.
3808 Return NULL_RTX if we failed and the caller should emit a normal call,
3809 otherwise try to get the result in TARGET, if convenient (and in mode
3810 MODE, if that's convenient). */
3811
3812 static rtx
3813 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3814 ATTRIBUTE_UNUSED enum machine_mode mode)
3815 {
3816 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3817
3818 if (!validate_arglist (exp,
3819 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3820 return NULL_RTX;
3821
3822 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3823 implementing memcmp because it will stop if it encounters two
3824 zero bytes. */
3825 #if defined HAVE_cmpmemsi
3826 {
3827 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3828 rtx result;
3829 rtx insn;
3830 tree arg1 = CALL_EXPR_ARG (exp, 0);
3831 tree arg2 = CALL_EXPR_ARG (exp, 1);
3832 tree len = CALL_EXPR_ARG (exp, 2);
3833
3834 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3835 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3836 enum machine_mode insn_mode;
3837
3838 if (HAVE_cmpmemsi)
3839 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3840 else
3841 return NULL_RTX;
3842
3843 /* If we don't have POINTER_TYPE, call the function. */
3844 if (arg1_align == 0 || arg2_align == 0)
3845 return NULL_RTX;
3846
3847 /* Make a place to write the result of the instruction. */
3848 result = target;
3849 if (! (result != 0
3850 && REG_P (result) && GET_MODE (result) == insn_mode
3851 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3852 result = gen_reg_rtx (insn_mode);
3853
3854 arg1_rtx = get_memory_rtx (arg1, len);
3855 arg2_rtx = get_memory_rtx (arg2, len);
3856 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3857
3858 /* Set MEM_SIZE as appropriate. */
3859 if (CONST_INT_P (arg3_rtx))
3860 {
3861 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3862 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3863 }
3864
3865 if (HAVE_cmpmemsi)
3866 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3867 GEN_INT (MIN (arg1_align, arg2_align)));
3868 else
3869 gcc_unreachable ();
3870
3871 if (insn)
3872 emit_insn (insn);
3873 else
3874 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3875 TYPE_MODE (integer_type_node), 3,
3876 XEXP (arg1_rtx, 0), Pmode,
3877 XEXP (arg2_rtx, 0), Pmode,
3878 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3879 TYPE_UNSIGNED (sizetype)),
3880 TYPE_MODE (sizetype));
3881
3882 /* Return the value in the proper mode for this function. */
3883 mode = TYPE_MODE (TREE_TYPE (exp));
3884 if (GET_MODE (result) == mode)
3885 return result;
3886 else if (target != 0)
3887 {
3888 convert_move (target, result, 0);
3889 return target;
3890 }
3891 else
3892 return convert_to_mode (mode, result, 0);
3893 }
3894 #endif /* HAVE_cmpmemsi. */
3895
3896 return NULL_RTX;
3897 }
3898
3899 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3900 if we failed the caller should emit a normal call, otherwise try to get
3901 the result in TARGET, if convenient. */
3902
3903 static rtx
3904 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3905 {
3906 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3907 return NULL_RTX;
3908
3909 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3910 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3911 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3912 {
3913 rtx arg1_rtx, arg2_rtx;
3914 rtx result, insn = NULL_RTX;
3915 tree fndecl, fn;
3916 tree arg1 = CALL_EXPR_ARG (exp, 0);
3917 tree arg2 = CALL_EXPR_ARG (exp, 1);
3918
3919 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3920 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3921
3922 /* If we don't have POINTER_TYPE, call the function. */
3923 if (arg1_align == 0 || arg2_align == 0)
3924 return NULL_RTX;
3925
3926 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3927 arg1 = builtin_save_expr (arg1);
3928 arg2 = builtin_save_expr (arg2);
3929
3930 arg1_rtx = get_memory_rtx (arg1, NULL);
3931 arg2_rtx = get_memory_rtx (arg2, NULL);
3932
3933 #ifdef HAVE_cmpstrsi
3934 /* Try to call cmpstrsi. */
3935 if (HAVE_cmpstrsi)
3936 {
3937 enum machine_mode insn_mode
3938 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3939
3940 /* Make a place to write the result of the instruction. */
3941 result = target;
3942 if (! (result != 0
3943 && REG_P (result) && GET_MODE (result) == insn_mode
3944 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3945 result = gen_reg_rtx (insn_mode);
3946
3947 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3948 GEN_INT (MIN (arg1_align, arg2_align)));
3949 }
3950 #endif
3951 #ifdef HAVE_cmpstrnsi
3952 /* Try to determine at least one length and call cmpstrnsi. */
3953 if (!insn && HAVE_cmpstrnsi)
3954 {
3955 tree len;
3956 rtx arg3_rtx;
3957
3958 enum machine_mode insn_mode
3959 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3960 tree len1 = c_strlen (arg1, 1);
3961 tree len2 = c_strlen (arg2, 1);
3962
3963 if (len1)
3964 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3965 if (len2)
3966 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3967
3968 /* If we don't have a constant length for the first, use the length
3969 of the second, if we know it. We don't require a constant for
3970 this case; some cost analysis could be done if both are available
3971 but neither is constant. For now, assume they're equally cheap,
3972 unless one has side effects. If both strings have constant lengths,
3973 use the smaller. */
3974
3975 if (!len1)
3976 len = len2;
3977 else if (!len2)
3978 len = len1;
3979 else if (TREE_SIDE_EFFECTS (len1))
3980 len = len2;
3981 else if (TREE_SIDE_EFFECTS (len2))
3982 len = len1;
3983 else if (TREE_CODE (len1) != INTEGER_CST)
3984 len = len2;
3985 else if (TREE_CODE (len2) != INTEGER_CST)
3986 len = len1;
3987 else if (tree_int_cst_lt (len1, len2))
3988 len = len1;
3989 else
3990 len = len2;
3991
3992 /* If both arguments have side effects, we cannot optimize. */
3993 if (!len || TREE_SIDE_EFFECTS (len))
3994 goto do_libcall;
3995
3996 arg3_rtx = expand_normal (len);
3997
3998 /* Make a place to write the result of the instruction. */
3999 result = target;
4000 if (! (result != 0
4001 && REG_P (result) && GET_MODE (result) == insn_mode
4002 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4003 result = gen_reg_rtx (insn_mode);
4004
4005 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4006 GEN_INT (MIN (arg1_align, arg2_align)));
4007 }
4008 #endif
4009
4010 if (insn)
4011 {
4012 enum machine_mode mode;
4013 emit_insn (insn);
4014
4015 /* Return the value in the proper mode for this function. */
4016 mode = TYPE_MODE (TREE_TYPE (exp));
4017 if (GET_MODE (result) == mode)
4018 return result;
4019 if (target == 0)
4020 return convert_to_mode (mode, result, 0);
4021 convert_move (target, result, 0);
4022 return target;
4023 }
4024
4025 /* Expand the library call ourselves using a stabilized argument
4026 list to avoid re-evaluating the function's arguments twice. */
4027 #ifdef HAVE_cmpstrnsi
4028 do_libcall:
4029 #endif
4030 fndecl = get_callee_fndecl (exp);
4031 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4032 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4033 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4034 return expand_call (fn, target, target == const0_rtx);
4035 }
4036 #endif
4037 return NULL_RTX;
4038 }
4039
4040 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4041 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4042 the result in TARGET, if convenient. */
4043
4044 static rtx
4045 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4046 ATTRIBUTE_UNUSED enum machine_mode mode)
4047 {
4048 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4049
4050 if (!validate_arglist (exp,
4051 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4052 return NULL_RTX;
4053
4054 /* If c_strlen can determine an expression for one of the string
4055 lengths, and it doesn't have side effects, then emit cmpstrnsi
4056 using length MIN(strlen(string)+1, arg3). */
4057 #ifdef HAVE_cmpstrnsi
4058 if (HAVE_cmpstrnsi)
4059 {
4060 tree len, len1, len2;
4061 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4062 rtx result, insn;
4063 tree fndecl, fn;
4064 tree arg1 = CALL_EXPR_ARG (exp, 0);
4065 tree arg2 = CALL_EXPR_ARG (exp, 1);
4066 tree arg3 = CALL_EXPR_ARG (exp, 2);
4067
4068 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4069 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4070 enum machine_mode insn_mode
4071 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4072
4073 len1 = c_strlen (arg1, 1);
4074 len2 = c_strlen (arg2, 1);
4075
4076 if (len1)
4077 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4078 if (len2)
4079 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4080
4081 /* If we don't have a constant length for the first, use the length
4082 of the second, if we know it. We don't require a constant for
4083 this case; some cost analysis could be done if both are available
4084 but neither is constant. For now, assume they're equally cheap,
4085 unless one has side effects. If both strings have constant lengths,
4086 use the smaller. */
4087
4088 if (!len1)
4089 len = len2;
4090 else if (!len2)
4091 len = len1;
4092 else if (TREE_SIDE_EFFECTS (len1))
4093 len = len2;
4094 else if (TREE_SIDE_EFFECTS (len2))
4095 len = len1;
4096 else if (TREE_CODE (len1) != INTEGER_CST)
4097 len = len2;
4098 else if (TREE_CODE (len2) != INTEGER_CST)
4099 len = len1;
4100 else if (tree_int_cst_lt (len1, len2))
4101 len = len1;
4102 else
4103 len = len2;
4104
4105 /* If both arguments have side effects, we cannot optimize. */
4106 if (!len || TREE_SIDE_EFFECTS (len))
4107 return NULL_RTX;
4108
4109 /* The actual new length parameter is MIN(len,arg3). */
4110 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4111 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4112
4113 /* If we don't have POINTER_TYPE, call the function. */
4114 if (arg1_align == 0 || arg2_align == 0)
4115 return NULL_RTX;
4116
4117 /* Make a place to write the result of the instruction. */
4118 result = target;
4119 if (! (result != 0
4120 && REG_P (result) && GET_MODE (result) == insn_mode
4121 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4122 result = gen_reg_rtx (insn_mode);
4123
4124 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4125 arg1 = builtin_save_expr (arg1);
4126 arg2 = builtin_save_expr (arg2);
4127 len = builtin_save_expr (len);
4128
4129 arg1_rtx = get_memory_rtx (arg1, len);
4130 arg2_rtx = get_memory_rtx (arg2, len);
4131 arg3_rtx = expand_normal (len);
4132 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4133 GEN_INT (MIN (arg1_align, arg2_align)));
4134 if (insn)
4135 {
4136 emit_insn (insn);
4137
4138 /* Return the value in the proper mode for this function. */
4139 mode = TYPE_MODE (TREE_TYPE (exp));
4140 if (GET_MODE (result) == mode)
4141 return result;
4142 if (target == 0)
4143 return convert_to_mode (mode, result, 0);
4144 convert_move (target, result, 0);
4145 return target;
4146 }
4147
4148 /* Expand the library call ourselves using a stabilized argument
4149 list to avoid re-evaluating the function's arguments twice. */
4150 fndecl = get_callee_fndecl (exp);
4151 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4152 arg1, arg2, len);
4153 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4154 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4155 return expand_call (fn, target, target == const0_rtx);
4156 }
4157 #endif
4158 return NULL_RTX;
4159 }
4160
4161 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4162 if that's convenient. */
4163
4164 rtx
4165 expand_builtin_saveregs (void)
4166 {
4167 rtx val;
4168 rtx_insn *seq;
4169
4170 /* Don't do __builtin_saveregs more than once in a function.
4171 Save the result of the first call and reuse it. */
4172 if (saveregs_value != 0)
4173 return saveregs_value;
4174
4175 /* When this function is called, it means that registers must be
4176 saved on entry to this function. So we migrate the call to the
4177 first insn of this function. */
4178
4179 start_sequence ();
4180
4181 /* Do whatever the machine needs done in this case. */
4182 val = targetm.calls.expand_builtin_saveregs ();
4183
4184 seq = get_insns ();
4185 end_sequence ();
4186
4187 saveregs_value = val;
4188
4189 /* Put the insns after the NOTE that starts the function. If this
4190 is inside a start_sequence, make the outer-level insn chain current, so
4191 the code is placed at the start of the function. */
4192 push_topmost_sequence ();
4193 emit_insn_after (seq, entry_of_function ());
4194 pop_topmost_sequence ();
4195
4196 return val;
4197 }
4198
4199 /* Expand a call to __builtin_next_arg. */
4200
4201 static rtx
4202 expand_builtin_next_arg (void)
4203 {
4204 /* Checking arguments is already done in fold_builtin_next_arg
4205 that must be called before this function. */
4206 return expand_binop (ptr_mode, add_optab,
4207 crtl->args.internal_arg_pointer,
4208 crtl->args.arg_offset_rtx,
4209 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4210 }
4211
4212 /* Make it easier for the backends by protecting the valist argument
4213 from multiple evaluations. */
4214
4215 static tree
4216 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4217 {
4218 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4219
4220 /* The current way of determining the type of valist is completely
4221 bogus. We should have the information on the va builtin instead. */
4222 if (!vatype)
4223 vatype = targetm.fn_abi_va_list (cfun->decl);
4224
4225 if (TREE_CODE (vatype) == ARRAY_TYPE)
4226 {
4227 if (TREE_SIDE_EFFECTS (valist))
4228 valist = save_expr (valist);
4229
4230 /* For this case, the backends will be expecting a pointer to
4231 vatype, but it's possible we've actually been given an array
4232 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4233 So fix it. */
4234 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4235 {
4236 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4237 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4238 }
4239 }
4240 else
4241 {
4242 tree pt = build_pointer_type (vatype);
4243
4244 if (! needs_lvalue)
4245 {
4246 if (! TREE_SIDE_EFFECTS (valist))
4247 return valist;
4248
4249 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4250 TREE_SIDE_EFFECTS (valist) = 1;
4251 }
4252
4253 if (TREE_SIDE_EFFECTS (valist))
4254 valist = save_expr (valist);
4255 valist = fold_build2_loc (loc, MEM_REF,
4256 vatype, valist, build_int_cst (pt, 0));
4257 }
4258
4259 return valist;
4260 }
4261
4262 /* The "standard" definition of va_list is void*. */
4263
4264 tree
4265 std_build_builtin_va_list (void)
4266 {
4267 return ptr_type_node;
4268 }
4269
4270 /* The "standard" abi va_list is va_list_type_node. */
4271
4272 tree
4273 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4274 {
4275 return va_list_type_node;
4276 }
4277
4278 /* The "standard" type of va_list is va_list_type_node. */
4279
4280 tree
4281 std_canonical_va_list_type (tree type)
4282 {
4283 tree wtype, htype;
4284
4285 if (INDIRECT_REF_P (type))
4286 type = TREE_TYPE (type);
4287 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4288 type = TREE_TYPE (type);
4289 wtype = va_list_type_node;
4290 htype = type;
4291 /* Treat structure va_list types. */
4292 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4293 htype = TREE_TYPE (htype);
4294 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4295 {
4296 /* If va_list is an array type, the argument may have decayed
4297 to a pointer type, e.g. by being passed to another function.
4298 In that case, unwrap both types so that we can compare the
4299 underlying records. */
4300 if (TREE_CODE (htype) == ARRAY_TYPE
4301 || POINTER_TYPE_P (htype))
4302 {
4303 wtype = TREE_TYPE (wtype);
4304 htype = TREE_TYPE (htype);
4305 }
4306 }
4307 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4308 return va_list_type_node;
4309
4310 return NULL_TREE;
4311 }
4312
4313 /* The "standard" implementation of va_start: just assign `nextarg' to
4314 the variable. */
4315
4316 void
4317 std_expand_builtin_va_start (tree valist, rtx nextarg)
4318 {
4319 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4320 convert_move (va_r, nextarg, 0);
4321 }
4322
4323 /* Expand EXP, a call to __builtin_va_start. */
4324
4325 static rtx
4326 expand_builtin_va_start (tree exp)
4327 {
4328 rtx nextarg;
4329 tree valist;
4330 location_t loc = EXPR_LOCATION (exp);
4331
4332 if (call_expr_nargs (exp) < 2)
4333 {
4334 error_at (loc, "too few arguments to function %<va_start%>");
4335 return const0_rtx;
4336 }
4337
4338 if (fold_builtin_next_arg (exp, true))
4339 return const0_rtx;
4340
4341 nextarg = expand_builtin_next_arg ();
4342 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4343
4344 if (targetm.expand_builtin_va_start)
4345 targetm.expand_builtin_va_start (valist, nextarg);
4346 else
4347 std_expand_builtin_va_start (valist, nextarg);
4348
4349 return const0_rtx;
4350 }
4351
4352 /* Expand EXP, a call to __builtin_va_end. */
4353
4354 static rtx
4355 expand_builtin_va_end (tree exp)
4356 {
4357 tree valist = CALL_EXPR_ARG (exp, 0);
4358
4359 /* Evaluate for side effects, if needed. I hate macros that don't
4360 do that. */
4361 if (TREE_SIDE_EFFECTS (valist))
4362 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4363
4364 return const0_rtx;
4365 }
4366
4367 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4368 builtin rather than just as an assignment in stdarg.h because of the
4369 nastiness of array-type va_list types. */
4370
4371 static rtx
4372 expand_builtin_va_copy (tree exp)
4373 {
4374 tree dst, src, t;
4375 location_t loc = EXPR_LOCATION (exp);
4376
4377 dst = CALL_EXPR_ARG (exp, 0);
4378 src = CALL_EXPR_ARG (exp, 1);
4379
4380 dst = stabilize_va_list_loc (loc, dst, 1);
4381 src = stabilize_va_list_loc (loc, src, 0);
4382
4383 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4384
4385 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4386 {
4387 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4388 TREE_SIDE_EFFECTS (t) = 1;
4389 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4390 }
4391 else
4392 {
4393 rtx dstb, srcb, size;
4394
4395 /* Evaluate to pointers. */
4396 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4397 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4398 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4399 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4400
4401 dstb = convert_memory_address (Pmode, dstb);
4402 srcb = convert_memory_address (Pmode, srcb);
4403
4404 /* "Dereference" to BLKmode memories. */
4405 dstb = gen_rtx_MEM (BLKmode, dstb);
4406 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4407 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4408 srcb = gen_rtx_MEM (BLKmode, srcb);
4409 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4410 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4411
4412 /* Copy. */
4413 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4414 }
4415
4416 return const0_rtx;
4417 }
4418
4419 /* Expand a call to one of the builtin functions __builtin_frame_address or
4420 __builtin_return_address. */
4421
4422 static rtx
4423 expand_builtin_frame_address (tree fndecl, tree exp)
4424 {
4425 /* The argument must be a nonnegative integer constant.
4426 It counts the number of frames to scan up the stack.
4427 The value is the return address saved in that frame. */
4428 if (call_expr_nargs (exp) == 0)
4429 /* Warning about missing arg was already issued. */
4430 return const0_rtx;
4431 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4432 {
4433 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4434 error ("invalid argument to %<__builtin_frame_address%>");
4435 else
4436 error ("invalid argument to %<__builtin_return_address%>");
4437 return const0_rtx;
4438 }
4439 else
4440 {
4441 rtx tem
4442 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4443 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4444
4445 /* Some ports cannot access arbitrary stack frames. */
4446 if (tem == NULL)
4447 {
4448 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4449 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4450 else
4451 warning (0, "unsupported argument to %<__builtin_return_address%>");
4452 return const0_rtx;
4453 }
4454
4455 /* For __builtin_frame_address, return what we've got. */
4456 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4457 return tem;
4458
4459 if (!REG_P (tem)
4460 && ! CONSTANT_P (tem))
4461 tem = copy_addr_to_reg (tem);
4462 return tem;
4463 }
4464 }
4465
4466 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4467 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4468 is the same as for allocate_dynamic_stack_space. */
4469
4470 static rtx
4471 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4472 {
4473 rtx op0;
4474 rtx result;
4475 bool valid_arglist;
4476 unsigned int align;
4477 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4478 == BUILT_IN_ALLOCA_WITH_ALIGN);
4479
4480 valid_arglist
4481 = (alloca_with_align
4482 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4483 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4484
4485 if (!valid_arglist)
4486 return NULL_RTX;
4487
4488 /* Compute the argument. */
4489 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4490
4491 /* Compute the alignment. */
4492 align = (alloca_with_align
4493 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4494 : BIGGEST_ALIGNMENT);
4495
4496 /* Allocate the desired space. */
4497 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4498 result = convert_memory_address (ptr_mode, result);
4499
4500 return result;
4501 }
4502
4503 /* Expand a call to bswap builtin in EXP.
4504 Return NULL_RTX if a normal call should be emitted rather than expanding the
4505 function in-line. If convenient, the result should be placed in TARGET.
4506 SUBTARGET may be used as the target for computing one of EXP's operands. */
4507
4508 static rtx
4509 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4510 rtx subtarget)
4511 {
4512 tree arg;
4513 rtx op0;
4514
4515 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4516 return NULL_RTX;
4517
4518 arg = CALL_EXPR_ARG (exp, 0);
4519 op0 = expand_expr (arg,
4520 subtarget && GET_MODE (subtarget) == target_mode
4521 ? subtarget : NULL_RTX,
4522 target_mode, EXPAND_NORMAL);
4523 if (GET_MODE (op0) != target_mode)
4524 op0 = convert_to_mode (target_mode, op0, 1);
4525
4526 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4527
4528 gcc_assert (target);
4529
4530 return convert_to_mode (target_mode, target, 1);
4531 }
4532
4533 /* Expand a call to a unary builtin in EXP.
4534 Return NULL_RTX if a normal call should be emitted rather than expanding the
4535 function in-line. If convenient, the result should be placed in TARGET.
4536 SUBTARGET may be used as the target for computing one of EXP's operands. */
4537
4538 static rtx
4539 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4540 rtx subtarget, optab op_optab)
4541 {
4542 rtx op0;
4543
4544 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4545 return NULL_RTX;
4546
4547 /* Compute the argument. */
4548 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4549 (subtarget
4550 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4551 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4552 VOIDmode, EXPAND_NORMAL);
4553 /* Compute op, into TARGET if possible.
4554 Set TARGET to wherever the result comes back. */
4555 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4556 op_optab, op0, target, op_optab != clrsb_optab);
4557 gcc_assert (target);
4558
4559 return convert_to_mode (target_mode, target, 0);
4560 }
4561
4562 /* Expand a call to __builtin_expect. We just return our argument
4563 as the builtin_expect semantic should've been already executed by
4564 tree branch prediction pass. */
4565
4566 static rtx
4567 expand_builtin_expect (tree exp, rtx target)
4568 {
4569 tree arg;
4570
4571 if (call_expr_nargs (exp) < 2)
4572 return const0_rtx;
4573 arg = CALL_EXPR_ARG (exp, 0);
4574
4575 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4576 /* When guessing was done, the hints should be already stripped away. */
4577 gcc_assert (!flag_guess_branch_prob
4578 || optimize == 0 || seen_error ());
4579 return target;
4580 }
4581
4582 /* Expand a call to __builtin_assume_aligned. We just return our first
4583 argument as the builtin_assume_aligned semantic should've been already
4584 executed by CCP. */
4585
4586 static rtx
4587 expand_builtin_assume_aligned (tree exp, rtx target)
4588 {
4589 if (call_expr_nargs (exp) < 2)
4590 return const0_rtx;
4591 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4592 EXPAND_NORMAL);
4593 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4594 && (call_expr_nargs (exp) < 3
4595 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4596 return target;
4597 }
4598
4599 void
4600 expand_builtin_trap (void)
4601 {
4602 #ifdef HAVE_trap
4603 if (HAVE_trap)
4604 {
4605 rtx insn = emit_insn (gen_trap ());
4606 /* For trap insns when not accumulating outgoing args force
4607 REG_ARGS_SIZE note to prevent crossjumping of calls with
4608 different args sizes. */
4609 if (!ACCUMULATE_OUTGOING_ARGS)
4610 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4611 }
4612 else
4613 #endif
4614 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4615 emit_barrier ();
4616 }
4617
4618 /* Expand a call to __builtin_unreachable. We do nothing except emit
4619 a barrier saying that control flow will not pass here.
4620
4621 It is the responsibility of the program being compiled to ensure
4622 that control flow does never reach __builtin_unreachable. */
4623 static void
4624 expand_builtin_unreachable (void)
4625 {
4626 emit_barrier ();
4627 }
4628
4629 /* Expand EXP, a call to fabs, fabsf or fabsl.
4630 Return NULL_RTX if a normal call should be emitted rather than expanding
4631 the function inline. If convenient, the result should be placed
4632 in TARGET. SUBTARGET may be used as the target for computing
4633 the operand. */
4634
4635 static rtx
4636 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4637 {
4638 enum machine_mode mode;
4639 tree arg;
4640 rtx op0;
4641
4642 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4643 return NULL_RTX;
4644
4645 arg = CALL_EXPR_ARG (exp, 0);
4646 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4647 mode = TYPE_MODE (TREE_TYPE (arg));
4648 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4649 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4650 }
4651
4652 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4653 Return NULL is a normal call should be emitted rather than expanding the
4654 function inline. If convenient, the result should be placed in TARGET.
4655 SUBTARGET may be used as the target for computing the operand. */
4656
4657 static rtx
4658 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4659 {
4660 rtx op0, op1;
4661 tree arg;
4662
4663 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4664 return NULL_RTX;
4665
4666 arg = CALL_EXPR_ARG (exp, 0);
4667 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4668
4669 arg = CALL_EXPR_ARG (exp, 1);
4670 op1 = expand_normal (arg);
4671
4672 return expand_copysign (op0, op1, target);
4673 }
4674
4675 /* Expand a call to __builtin___clear_cache. */
4676
4677 static rtx
4678 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4679 {
4680 #ifndef HAVE_clear_cache
4681 #ifdef CLEAR_INSN_CACHE
4682 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4683 does something. Just do the default expansion to a call to
4684 __clear_cache(). */
4685 return NULL_RTX;
4686 #else
4687 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4688 does nothing. There is no need to call it. Do nothing. */
4689 return const0_rtx;
4690 #endif /* CLEAR_INSN_CACHE */
4691 #else
4692 /* We have a "clear_cache" insn, and it will handle everything. */
4693 tree begin, end;
4694 rtx begin_rtx, end_rtx;
4695
4696 /* We must not expand to a library call. If we did, any
4697 fallback library function in libgcc that might contain a call to
4698 __builtin___clear_cache() would recurse infinitely. */
4699 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4700 {
4701 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4702 return const0_rtx;
4703 }
4704
4705 if (HAVE_clear_cache)
4706 {
4707 struct expand_operand ops[2];
4708
4709 begin = CALL_EXPR_ARG (exp, 0);
4710 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4711
4712 end = CALL_EXPR_ARG (exp, 1);
4713 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4714
4715 create_address_operand (&ops[0], begin_rtx);
4716 create_address_operand (&ops[1], end_rtx);
4717 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4718 return const0_rtx;
4719 }
4720 return const0_rtx;
4721 #endif /* HAVE_clear_cache */
4722 }
4723
4724 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4725
4726 static rtx
4727 round_trampoline_addr (rtx tramp)
4728 {
4729 rtx temp, addend, mask;
4730
4731 /* If we don't need too much alignment, we'll have been guaranteed
4732 proper alignment by get_trampoline_type. */
4733 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4734 return tramp;
4735
4736 /* Round address up to desired boundary. */
4737 temp = gen_reg_rtx (Pmode);
4738 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4739 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4740
4741 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4742 temp, 0, OPTAB_LIB_WIDEN);
4743 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4744 temp, 0, OPTAB_LIB_WIDEN);
4745
4746 return tramp;
4747 }
4748
4749 static rtx
4750 expand_builtin_init_trampoline (tree exp, bool onstack)
4751 {
4752 tree t_tramp, t_func, t_chain;
4753 rtx m_tramp, r_tramp, r_chain, tmp;
4754
4755 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4756 POINTER_TYPE, VOID_TYPE))
4757 return NULL_RTX;
4758
4759 t_tramp = CALL_EXPR_ARG (exp, 0);
4760 t_func = CALL_EXPR_ARG (exp, 1);
4761 t_chain = CALL_EXPR_ARG (exp, 2);
4762
4763 r_tramp = expand_normal (t_tramp);
4764 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4765 MEM_NOTRAP_P (m_tramp) = 1;
4766
4767 /* If ONSTACK, the TRAMP argument should be the address of a field
4768 within the local function's FRAME decl. Either way, let's see if
4769 we can fill in the MEM_ATTRs for this memory. */
4770 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4771 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4772
4773 /* Creator of a heap trampoline is responsible for making sure the
4774 address is aligned to at least STACK_BOUNDARY. Normally malloc
4775 will ensure this anyhow. */
4776 tmp = round_trampoline_addr (r_tramp);
4777 if (tmp != r_tramp)
4778 {
4779 m_tramp = change_address (m_tramp, BLKmode, tmp);
4780 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4781 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4782 }
4783
4784 /* The FUNC argument should be the address of the nested function.
4785 Extract the actual function decl to pass to the hook. */
4786 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4787 t_func = TREE_OPERAND (t_func, 0);
4788 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4789
4790 r_chain = expand_normal (t_chain);
4791
4792 /* Generate insns to initialize the trampoline. */
4793 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4794
4795 if (onstack)
4796 {
4797 trampolines_created = 1;
4798
4799 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4800 "trampoline generated for nested function %qD", t_func);
4801 }
4802
4803 return const0_rtx;
4804 }
4805
4806 static rtx
4807 expand_builtin_adjust_trampoline (tree exp)
4808 {
4809 rtx tramp;
4810
4811 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4812 return NULL_RTX;
4813
4814 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4815 tramp = round_trampoline_addr (tramp);
4816 if (targetm.calls.trampoline_adjust_address)
4817 tramp = targetm.calls.trampoline_adjust_address (tramp);
4818
4819 return tramp;
4820 }
4821
4822 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4823 function. The function first checks whether the back end provides
4824 an insn to implement signbit for the respective mode. If not, it
4825 checks whether the floating point format of the value is such that
4826 the sign bit can be extracted. If that is not the case, the
4827 function returns NULL_RTX to indicate that a normal call should be
4828 emitted rather than expanding the function in-line. EXP is the
4829 expression that is a call to the builtin function; if convenient,
4830 the result should be placed in TARGET. */
4831 static rtx
4832 expand_builtin_signbit (tree exp, rtx target)
4833 {
4834 const struct real_format *fmt;
4835 enum machine_mode fmode, imode, rmode;
4836 tree arg;
4837 int word, bitpos;
4838 enum insn_code icode;
4839 rtx temp;
4840 location_t loc = EXPR_LOCATION (exp);
4841
4842 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4843 return NULL_RTX;
4844
4845 arg = CALL_EXPR_ARG (exp, 0);
4846 fmode = TYPE_MODE (TREE_TYPE (arg));
4847 rmode = TYPE_MODE (TREE_TYPE (exp));
4848 fmt = REAL_MODE_FORMAT (fmode);
4849
4850 arg = builtin_save_expr (arg);
4851
4852 /* Expand the argument yielding a RTX expression. */
4853 temp = expand_normal (arg);
4854
4855 /* Check if the back end provides an insn that handles signbit for the
4856 argument's mode. */
4857 icode = optab_handler (signbit_optab, fmode);
4858 if (icode != CODE_FOR_nothing)
4859 {
4860 rtx_insn *last = get_last_insn ();
4861 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4862 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4863 return target;
4864 delete_insns_since (last);
4865 }
4866
4867 /* For floating point formats without a sign bit, implement signbit
4868 as "ARG < 0.0". */
4869 bitpos = fmt->signbit_ro;
4870 if (bitpos < 0)
4871 {
4872 /* But we can't do this if the format supports signed zero. */
4873 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4874 return NULL_RTX;
4875
4876 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4877 build_real (TREE_TYPE (arg), dconst0));
4878 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4879 }
4880
4881 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4882 {
4883 imode = int_mode_for_mode (fmode);
4884 if (imode == BLKmode)
4885 return NULL_RTX;
4886 temp = gen_lowpart (imode, temp);
4887 }
4888 else
4889 {
4890 imode = word_mode;
4891 /* Handle targets with different FP word orders. */
4892 if (FLOAT_WORDS_BIG_ENDIAN)
4893 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4894 else
4895 word = bitpos / BITS_PER_WORD;
4896 temp = operand_subword_force (temp, word, fmode);
4897 bitpos = bitpos % BITS_PER_WORD;
4898 }
4899
4900 /* Force the intermediate word_mode (or narrower) result into a
4901 register. This avoids attempting to create paradoxical SUBREGs
4902 of floating point modes below. */
4903 temp = force_reg (imode, temp);
4904
4905 /* If the bitpos is within the "result mode" lowpart, the operation
4906 can be implement with a single bitwise AND. Otherwise, we need
4907 a right shift and an AND. */
4908
4909 if (bitpos < GET_MODE_BITSIZE (rmode))
4910 {
4911 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4912
4913 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4914 temp = gen_lowpart (rmode, temp);
4915 temp = expand_binop (rmode, and_optab, temp,
4916 immed_wide_int_const (mask, rmode),
4917 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4918 }
4919 else
4920 {
4921 /* Perform a logical right shift to place the signbit in the least
4922 significant bit, then truncate the result to the desired mode
4923 and mask just this bit. */
4924 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4925 temp = gen_lowpart (rmode, temp);
4926 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4927 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4928 }
4929
4930 return temp;
4931 }
4932
4933 /* Expand fork or exec calls. TARGET is the desired target of the
4934 call. EXP is the call. FN is the
4935 identificator of the actual function. IGNORE is nonzero if the
4936 value is to be ignored. */
4937
4938 static rtx
4939 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4940 {
4941 tree id, decl;
4942 tree call;
4943
4944 /* If we are not profiling, just call the function. */
4945 if (!profile_arc_flag)
4946 return NULL_RTX;
4947
4948 /* Otherwise call the wrapper. This should be equivalent for the rest of
4949 compiler, so the code does not diverge, and the wrapper may run the
4950 code necessary for keeping the profiling sane. */
4951
4952 switch (DECL_FUNCTION_CODE (fn))
4953 {
4954 case BUILT_IN_FORK:
4955 id = get_identifier ("__gcov_fork");
4956 break;
4957
4958 case BUILT_IN_EXECL:
4959 id = get_identifier ("__gcov_execl");
4960 break;
4961
4962 case BUILT_IN_EXECV:
4963 id = get_identifier ("__gcov_execv");
4964 break;
4965
4966 case BUILT_IN_EXECLP:
4967 id = get_identifier ("__gcov_execlp");
4968 break;
4969
4970 case BUILT_IN_EXECLE:
4971 id = get_identifier ("__gcov_execle");
4972 break;
4973
4974 case BUILT_IN_EXECVP:
4975 id = get_identifier ("__gcov_execvp");
4976 break;
4977
4978 case BUILT_IN_EXECVE:
4979 id = get_identifier ("__gcov_execve");
4980 break;
4981
4982 default:
4983 gcc_unreachable ();
4984 }
4985
4986 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4987 FUNCTION_DECL, id, TREE_TYPE (fn));
4988 DECL_EXTERNAL (decl) = 1;
4989 TREE_PUBLIC (decl) = 1;
4990 DECL_ARTIFICIAL (decl) = 1;
4991 TREE_NOTHROW (decl) = 1;
4992 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4993 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4994 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4995 return expand_call (call, target, ignore);
4996 }
4997
4998
4999 \f
5000 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5001 the pointer in these functions is void*, the tree optimizers may remove
5002 casts. The mode computed in expand_builtin isn't reliable either, due
5003 to __sync_bool_compare_and_swap.
5004
5005 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5006 group of builtins. This gives us log2 of the mode size. */
5007
5008 static inline enum machine_mode
5009 get_builtin_sync_mode (int fcode_diff)
5010 {
5011 /* The size is not negotiable, so ask not to get BLKmode in return
5012 if the target indicates that a smaller size would be better. */
5013 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5014 }
5015
5016 /* Expand the memory expression LOC and return the appropriate memory operand
5017 for the builtin_sync operations. */
5018
5019 static rtx
5020 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5021 {
5022 rtx addr, mem;
5023
5024 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5025 addr = convert_memory_address (Pmode, addr);
5026
5027 /* Note that we explicitly do not want any alias information for this
5028 memory, so that we kill all other live memories. Otherwise we don't
5029 satisfy the full barrier semantics of the intrinsic. */
5030 mem = validize_mem (gen_rtx_MEM (mode, addr));
5031
5032 /* The alignment needs to be at least according to that of the mode. */
5033 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5034 get_pointer_alignment (loc)));
5035 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5036 MEM_VOLATILE_P (mem) = 1;
5037
5038 return mem;
5039 }
5040
5041 /* Make sure an argument is in the right mode.
5042 EXP is the tree argument.
5043 MODE is the mode it should be in. */
5044
5045 static rtx
5046 expand_expr_force_mode (tree exp, enum machine_mode mode)
5047 {
5048 rtx val;
5049 enum machine_mode old_mode;
5050
5051 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5052 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5053 of CONST_INTs, where we know the old_mode only from the call argument. */
5054
5055 old_mode = GET_MODE (val);
5056 if (old_mode == VOIDmode)
5057 old_mode = TYPE_MODE (TREE_TYPE (exp));
5058 val = convert_modes (mode, old_mode, val, 1);
5059 return val;
5060 }
5061
5062
5063 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5064 EXP is the CALL_EXPR. CODE is the rtx code
5065 that corresponds to the arithmetic or logical operation from the name;
5066 an exception here is that NOT actually means NAND. TARGET is an optional
5067 place for us to store the results; AFTER is true if this is the
5068 fetch_and_xxx form. */
5069
5070 static rtx
5071 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5072 enum rtx_code code, bool after,
5073 rtx target)
5074 {
5075 rtx val, mem;
5076 location_t loc = EXPR_LOCATION (exp);
5077
5078 if (code == NOT && warn_sync_nand)
5079 {
5080 tree fndecl = get_callee_fndecl (exp);
5081 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5082
5083 static bool warned_f_a_n, warned_n_a_f;
5084
5085 switch (fcode)
5086 {
5087 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5088 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5089 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5090 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5091 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5092 if (warned_f_a_n)
5093 break;
5094
5095 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5096 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5097 warned_f_a_n = true;
5098 break;
5099
5100 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5101 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5102 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5103 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5104 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5105 if (warned_n_a_f)
5106 break;
5107
5108 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5109 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5110 warned_n_a_f = true;
5111 break;
5112
5113 default:
5114 gcc_unreachable ();
5115 }
5116 }
5117
5118 /* Expand the operands. */
5119 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5120 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5121
5122 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5123 after);
5124 }
5125
5126 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5127 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5128 true if this is the boolean form. TARGET is a place for us to store the
5129 results; this is NOT optional if IS_BOOL is true. */
5130
5131 static rtx
5132 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5133 bool is_bool, rtx target)
5134 {
5135 rtx old_val, new_val, mem;
5136 rtx *pbool, *poval;
5137
5138 /* Expand the operands. */
5139 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5140 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5141 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5142
5143 pbool = poval = NULL;
5144 if (target != const0_rtx)
5145 {
5146 if (is_bool)
5147 pbool = &target;
5148 else
5149 poval = &target;
5150 }
5151 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5152 false, MEMMODEL_SEQ_CST,
5153 MEMMODEL_SEQ_CST))
5154 return NULL_RTX;
5155
5156 return target;
5157 }
5158
5159 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5160 general form is actually an atomic exchange, and some targets only
5161 support a reduced form with the second argument being a constant 1.
5162 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5163 the results. */
5164
5165 static rtx
5166 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5167 rtx target)
5168 {
5169 rtx val, mem;
5170
5171 /* Expand the operands. */
5172 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5173 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5174
5175 return expand_sync_lock_test_and_set (target, mem, val);
5176 }
5177
5178 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5179
5180 static void
5181 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5182 {
5183 rtx mem;
5184
5185 /* Expand the operands. */
5186 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5187
5188 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5189 }
5190
5191 /* Given an integer representing an ``enum memmodel'', verify its
5192 correctness and return the memory model enum. */
5193
5194 static enum memmodel
5195 get_memmodel (tree exp)
5196 {
5197 rtx op;
5198 unsigned HOST_WIDE_INT val;
5199
5200 /* If the parameter is not a constant, it's a run time value so we'll just
5201 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5202 if (TREE_CODE (exp) != INTEGER_CST)
5203 return MEMMODEL_SEQ_CST;
5204
5205 op = expand_normal (exp);
5206
5207 val = INTVAL (op);
5208 if (targetm.memmodel_check)
5209 val = targetm.memmodel_check (val);
5210 else if (val & ~MEMMODEL_MASK)
5211 {
5212 warning (OPT_Winvalid_memory_model,
5213 "Unknown architecture specifier in memory model to builtin.");
5214 return MEMMODEL_SEQ_CST;
5215 }
5216
5217 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5218 {
5219 warning (OPT_Winvalid_memory_model,
5220 "invalid memory model argument to builtin");
5221 return MEMMODEL_SEQ_CST;
5222 }
5223
5224 return (enum memmodel) val;
5225 }
5226
5227 /* Expand the __atomic_exchange intrinsic:
5228 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5229 EXP is the CALL_EXPR.
5230 TARGET is an optional place for us to store the results. */
5231
5232 static rtx
5233 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5234 {
5235 rtx val, mem;
5236 enum memmodel model;
5237
5238 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5239 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5240 {
5241 error ("invalid memory model for %<__atomic_exchange%>");
5242 return NULL_RTX;
5243 }
5244
5245 if (!flag_inline_atomics)
5246 return NULL_RTX;
5247
5248 /* Expand the operands. */
5249 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5250 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5251
5252 return expand_atomic_exchange (target, mem, val, model);
5253 }
5254
5255 /* Expand the __atomic_compare_exchange intrinsic:
5256 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5257 TYPE desired, BOOL weak,
5258 enum memmodel success,
5259 enum memmodel failure)
5260 EXP is the CALL_EXPR.
5261 TARGET is an optional place for us to store the results. */
5262
5263 static rtx
5264 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5265 rtx target)
5266 {
5267 rtx expect, desired, mem, oldval;
5268 rtx_code_label *label;
5269 enum memmodel success, failure;
5270 tree weak;
5271 bool is_weak;
5272
5273 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5274 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5275
5276 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5277 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5278 {
5279 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5280 return NULL_RTX;
5281 }
5282
5283 if (failure > success)
5284 {
5285 error ("failure memory model cannot be stronger than success "
5286 "memory model for %<__atomic_compare_exchange%>");
5287 return NULL_RTX;
5288 }
5289
5290 if (!flag_inline_atomics)
5291 return NULL_RTX;
5292
5293 /* Expand the operands. */
5294 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5295
5296 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5297 expect = convert_memory_address (Pmode, expect);
5298 expect = gen_rtx_MEM (mode, expect);
5299 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5300
5301 weak = CALL_EXPR_ARG (exp, 3);
5302 is_weak = false;
5303 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5304 is_weak = true;
5305
5306 if (target == const0_rtx)
5307 target = NULL;
5308
5309 /* Lest the rtl backend create a race condition with an imporoper store
5310 to memory, always create a new pseudo for OLDVAL. */
5311 oldval = NULL;
5312
5313 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5314 is_weak, success, failure))
5315 return NULL_RTX;
5316
5317 /* Conditionally store back to EXPECT, lest we create a race condition
5318 with an improper store to memory. */
5319 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5320 the normal case where EXPECT is totally private, i.e. a register. At
5321 which point the store can be unconditional. */
5322 label = gen_label_rtx ();
5323 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5324 emit_move_insn (expect, oldval);
5325 emit_label (label);
5326
5327 return target;
5328 }
5329
5330 /* Expand the __atomic_load intrinsic:
5331 TYPE __atomic_load (TYPE *object, enum memmodel)
5332 EXP is the CALL_EXPR.
5333 TARGET is an optional place for us to store the results. */
5334
5335 static rtx
5336 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5337 {
5338 rtx mem;
5339 enum memmodel model;
5340
5341 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5342 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5343 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5344 {
5345 error ("invalid memory model for %<__atomic_load%>");
5346 return NULL_RTX;
5347 }
5348
5349 if (!flag_inline_atomics)
5350 return NULL_RTX;
5351
5352 /* Expand the operand. */
5353 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5354
5355 return expand_atomic_load (target, mem, model);
5356 }
5357
5358
5359 /* Expand the __atomic_store intrinsic:
5360 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5361 EXP is the CALL_EXPR.
5362 TARGET is an optional place for us to store the results. */
5363
5364 static rtx
5365 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5366 {
5367 rtx mem, val;
5368 enum memmodel model;
5369
5370 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5371 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5372 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5373 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5374 {
5375 error ("invalid memory model for %<__atomic_store%>");
5376 return NULL_RTX;
5377 }
5378
5379 if (!flag_inline_atomics)
5380 return NULL_RTX;
5381
5382 /* Expand the operands. */
5383 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5384 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5385
5386 return expand_atomic_store (mem, val, model, false);
5387 }
5388
5389 /* Expand the __atomic_fetch_XXX intrinsic:
5390 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5391 EXP is the CALL_EXPR.
5392 TARGET is an optional place for us to store the results.
5393 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5394 FETCH_AFTER is true if returning the result of the operation.
5395 FETCH_AFTER is false if returning the value before the operation.
5396 IGNORE is true if the result is not used.
5397 EXT_CALL is the correct builtin for an external call if this cannot be
5398 resolved to an instruction sequence. */
5399
5400 static rtx
5401 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5402 enum rtx_code code, bool fetch_after,
5403 bool ignore, enum built_in_function ext_call)
5404 {
5405 rtx val, mem, ret;
5406 enum memmodel model;
5407 tree fndecl;
5408 tree addr;
5409
5410 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5411
5412 /* Expand the operands. */
5413 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5414 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5415
5416 /* Only try generating instructions if inlining is turned on. */
5417 if (flag_inline_atomics)
5418 {
5419 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5420 if (ret)
5421 return ret;
5422 }
5423
5424 /* Return if a different routine isn't needed for the library call. */
5425 if (ext_call == BUILT_IN_NONE)
5426 return NULL_RTX;
5427
5428 /* Change the call to the specified function. */
5429 fndecl = get_callee_fndecl (exp);
5430 addr = CALL_EXPR_FN (exp);
5431 STRIP_NOPS (addr);
5432
5433 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5434 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5435
5436 /* Expand the call here so we can emit trailing code. */
5437 ret = expand_call (exp, target, ignore);
5438
5439 /* Replace the original function just in case it matters. */
5440 TREE_OPERAND (addr, 0) = fndecl;
5441
5442 /* Then issue the arithmetic correction to return the right result. */
5443 if (!ignore)
5444 {
5445 if (code == NOT)
5446 {
5447 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5448 OPTAB_LIB_WIDEN);
5449 ret = expand_simple_unop (mode, NOT, ret, target, true);
5450 }
5451 else
5452 ret = expand_simple_binop (mode, code, ret, val, target, true,
5453 OPTAB_LIB_WIDEN);
5454 }
5455 return ret;
5456 }
5457
5458
5459 #ifndef HAVE_atomic_clear
5460 # define HAVE_atomic_clear 0
5461 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5462 #endif
5463
5464 /* Expand an atomic clear operation.
5465 void _atomic_clear (BOOL *obj, enum memmodel)
5466 EXP is the call expression. */
5467
5468 static rtx
5469 expand_builtin_atomic_clear (tree exp)
5470 {
5471 enum machine_mode mode;
5472 rtx mem, ret;
5473 enum memmodel model;
5474
5475 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5476 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5477 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5478
5479 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5480 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5481 {
5482 error ("invalid memory model for %<__atomic_store%>");
5483 return const0_rtx;
5484 }
5485
5486 if (HAVE_atomic_clear)
5487 {
5488 emit_insn (gen_atomic_clear (mem, model));
5489 return const0_rtx;
5490 }
5491
5492 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5493 Failing that, a store is issued by __atomic_store. The only way this can
5494 fail is if the bool type is larger than a word size. Unlikely, but
5495 handle it anyway for completeness. Assume a single threaded model since
5496 there is no atomic support in this case, and no barriers are required. */
5497 ret = expand_atomic_store (mem, const0_rtx, model, true);
5498 if (!ret)
5499 emit_move_insn (mem, const0_rtx);
5500 return const0_rtx;
5501 }
5502
5503 /* Expand an atomic test_and_set operation.
5504 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5505 EXP is the call expression. */
5506
5507 static rtx
5508 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5509 {
5510 rtx mem;
5511 enum memmodel model;
5512 enum machine_mode mode;
5513
5514 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5515 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5516 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5517
5518 return expand_atomic_test_and_set (target, mem, model);
5519 }
5520
5521
5522 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5523 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5524
5525 static tree
5526 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5527 {
5528 int size;
5529 enum machine_mode mode;
5530 unsigned int mode_align, type_align;
5531
5532 if (TREE_CODE (arg0) != INTEGER_CST)
5533 return NULL_TREE;
5534
5535 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5536 mode = mode_for_size (size, MODE_INT, 0);
5537 mode_align = GET_MODE_ALIGNMENT (mode);
5538
5539 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5540 type_align = mode_align;
5541 else
5542 {
5543 tree ttype = TREE_TYPE (arg1);
5544
5545 /* This function is usually invoked and folded immediately by the front
5546 end before anything else has a chance to look at it. The pointer
5547 parameter at this point is usually cast to a void *, so check for that
5548 and look past the cast. */
5549 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5550 && VOID_TYPE_P (TREE_TYPE (ttype)))
5551 arg1 = TREE_OPERAND (arg1, 0);
5552
5553 ttype = TREE_TYPE (arg1);
5554 gcc_assert (POINTER_TYPE_P (ttype));
5555
5556 /* Get the underlying type of the object. */
5557 ttype = TREE_TYPE (ttype);
5558 type_align = TYPE_ALIGN (ttype);
5559 }
5560
5561 /* If the object has smaller alignment, the the lock free routines cannot
5562 be used. */
5563 if (type_align < mode_align)
5564 return boolean_false_node;
5565
5566 /* Check if a compare_and_swap pattern exists for the mode which represents
5567 the required size. The pattern is not allowed to fail, so the existence
5568 of the pattern indicates support is present. */
5569 if (can_compare_and_swap_p (mode, true))
5570 return boolean_true_node;
5571 else
5572 return boolean_false_node;
5573 }
5574
5575 /* Return true if the parameters to call EXP represent an object which will
5576 always generate lock free instructions. The first argument represents the
5577 size of the object, and the second parameter is a pointer to the object
5578 itself. If NULL is passed for the object, then the result is based on
5579 typical alignment for an object of the specified size. Otherwise return
5580 false. */
5581
5582 static rtx
5583 expand_builtin_atomic_always_lock_free (tree exp)
5584 {
5585 tree size;
5586 tree arg0 = CALL_EXPR_ARG (exp, 0);
5587 tree arg1 = CALL_EXPR_ARG (exp, 1);
5588
5589 if (TREE_CODE (arg0) != INTEGER_CST)
5590 {
5591 error ("non-constant argument 1 to __atomic_always_lock_free");
5592 return const0_rtx;
5593 }
5594
5595 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5596 if (size == boolean_true_node)
5597 return const1_rtx;
5598 return const0_rtx;
5599 }
5600
5601 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5602 is lock free on this architecture. */
5603
5604 static tree
5605 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5606 {
5607 if (!flag_inline_atomics)
5608 return NULL_TREE;
5609
5610 /* If it isn't always lock free, don't generate a result. */
5611 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5612 return boolean_true_node;
5613
5614 return NULL_TREE;
5615 }
5616
5617 /* Return true if the parameters to call EXP represent an object which will
5618 always generate lock free instructions. The first argument represents the
5619 size of the object, and the second parameter is a pointer to the object
5620 itself. If NULL is passed for the object, then the result is based on
5621 typical alignment for an object of the specified size. Otherwise return
5622 NULL*/
5623
5624 static rtx
5625 expand_builtin_atomic_is_lock_free (tree exp)
5626 {
5627 tree size;
5628 tree arg0 = CALL_EXPR_ARG (exp, 0);
5629 tree arg1 = CALL_EXPR_ARG (exp, 1);
5630
5631 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5632 {
5633 error ("non-integer argument 1 to __atomic_is_lock_free");
5634 return NULL_RTX;
5635 }
5636
5637 if (!flag_inline_atomics)
5638 return NULL_RTX;
5639
5640 /* If the value is known at compile time, return the RTX for it. */
5641 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5642 if (size == boolean_true_node)
5643 return const1_rtx;
5644
5645 return NULL_RTX;
5646 }
5647
5648 /* Expand the __atomic_thread_fence intrinsic:
5649 void __atomic_thread_fence (enum memmodel)
5650 EXP is the CALL_EXPR. */
5651
5652 static void
5653 expand_builtin_atomic_thread_fence (tree exp)
5654 {
5655 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5656 expand_mem_thread_fence (model);
5657 }
5658
5659 /* Expand the __atomic_signal_fence intrinsic:
5660 void __atomic_signal_fence (enum memmodel)
5661 EXP is the CALL_EXPR. */
5662
5663 static void
5664 expand_builtin_atomic_signal_fence (tree exp)
5665 {
5666 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5667 expand_mem_signal_fence (model);
5668 }
5669
5670 /* Expand the __sync_synchronize intrinsic. */
5671
5672 static void
5673 expand_builtin_sync_synchronize (void)
5674 {
5675 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5676 }
5677
5678 static rtx
5679 expand_builtin_thread_pointer (tree exp, rtx target)
5680 {
5681 enum insn_code icode;
5682 if (!validate_arglist (exp, VOID_TYPE))
5683 return const0_rtx;
5684 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5685 if (icode != CODE_FOR_nothing)
5686 {
5687 struct expand_operand op;
5688 /* If the target is not sutitable then create a new target. */
5689 if (target == NULL_RTX
5690 || !REG_P (target)
5691 || GET_MODE (target) != Pmode)
5692 target = gen_reg_rtx (Pmode);
5693 create_output_operand (&op, target, Pmode);
5694 expand_insn (icode, 1, &op);
5695 return target;
5696 }
5697 error ("__builtin_thread_pointer is not supported on this target");
5698 return const0_rtx;
5699 }
5700
5701 static void
5702 expand_builtin_set_thread_pointer (tree exp)
5703 {
5704 enum insn_code icode;
5705 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5706 return;
5707 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5708 if (icode != CODE_FOR_nothing)
5709 {
5710 struct expand_operand op;
5711 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5712 Pmode, EXPAND_NORMAL);
5713 create_input_operand (&op, val, Pmode);
5714 expand_insn (icode, 1, &op);
5715 return;
5716 }
5717 error ("__builtin_set_thread_pointer is not supported on this target");
5718 }
5719
5720 \f
5721 /* Emit code to restore the current value of stack. */
5722
5723 static void
5724 expand_stack_restore (tree var)
5725 {
5726 rtx_insn *prev;
5727 rtx sa = expand_normal (var);
5728
5729 sa = convert_memory_address (Pmode, sa);
5730
5731 prev = get_last_insn ();
5732 emit_stack_restore (SAVE_BLOCK, sa);
5733 fixup_args_size_notes (prev, get_last_insn (), 0);
5734 }
5735
5736
5737 /* Emit code to save the current value of stack. */
5738
5739 static rtx
5740 expand_stack_save (void)
5741 {
5742 rtx ret = NULL_RTX;
5743
5744 do_pending_stack_adjust ();
5745 emit_stack_save (SAVE_BLOCK, &ret);
5746 return ret;
5747 }
5748
5749 /* Expand an expression EXP that calls a built-in function,
5750 with result going to TARGET if that's convenient
5751 (and in mode MODE if that's convenient).
5752 SUBTARGET may be used as the target for computing one of EXP's operands.
5753 IGNORE is nonzero if the value is to be ignored. */
5754
5755 rtx
5756 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5757 int ignore)
5758 {
5759 tree fndecl = get_callee_fndecl (exp);
5760 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5761 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5762 int flags;
5763
5764 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5765 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5766
5767 /* When not optimizing, generate calls to library functions for a certain
5768 set of builtins. */
5769 if (!optimize
5770 && !called_as_built_in (fndecl)
5771 && fcode != BUILT_IN_FORK
5772 && fcode != BUILT_IN_EXECL
5773 && fcode != BUILT_IN_EXECV
5774 && fcode != BUILT_IN_EXECLP
5775 && fcode != BUILT_IN_EXECLE
5776 && fcode != BUILT_IN_EXECVP
5777 && fcode != BUILT_IN_EXECVE
5778 && fcode != BUILT_IN_ALLOCA
5779 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5780 && fcode != BUILT_IN_FREE)
5781 return expand_call (exp, target, ignore);
5782
5783 /* The built-in function expanders test for target == const0_rtx
5784 to determine whether the function's result will be ignored. */
5785 if (ignore)
5786 target = const0_rtx;
5787
5788 /* If the result of a pure or const built-in function is ignored, and
5789 none of its arguments are volatile, we can avoid expanding the
5790 built-in call and just evaluate the arguments for side-effects. */
5791 if (target == const0_rtx
5792 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5793 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5794 {
5795 bool volatilep = false;
5796 tree arg;
5797 call_expr_arg_iterator iter;
5798
5799 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5800 if (TREE_THIS_VOLATILE (arg))
5801 {
5802 volatilep = true;
5803 break;
5804 }
5805
5806 if (! volatilep)
5807 {
5808 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5809 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5810 return const0_rtx;
5811 }
5812 }
5813
5814 switch (fcode)
5815 {
5816 CASE_FLT_FN (BUILT_IN_FABS):
5817 case BUILT_IN_FABSD32:
5818 case BUILT_IN_FABSD64:
5819 case BUILT_IN_FABSD128:
5820 target = expand_builtin_fabs (exp, target, subtarget);
5821 if (target)
5822 return target;
5823 break;
5824
5825 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5826 target = expand_builtin_copysign (exp, target, subtarget);
5827 if (target)
5828 return target;
5829 break;
5830
5831 /* Just do a normal library call if we were unable to fold
5832 the values. */
5833 CASE_FLT_FN (BUILT_IN_CABS):
5834 break;
5835
5836 CASE_FLT_FN (BUILT_IN_EXP):
5837 CASE_FLT_FN (BUILT_IN_EXP10):
5838 CASE_FLT_FN (BUILT_IN_POW10):
5839 CASE_FLT_FN (BUILT_IN_EXP2):
5840 CASE_FLT_FN (BUILT_IN_EXPM1):
5841 CASE_FLT_FN (BUILT_IN_LOGB):
5842 CASE_FLT_FN (BUILT_IN_LOG):
5843 CASE_FLT_FN (BUILT_IN_LOG10):
5844 CASE_FLT_FN (BUILT_IN_LOG2):
5845 CASE_FLT_FN (BUILT_IN_LOG1P):
5846 CASE_FLT_FN (BUILT_IN_TAN):
5847 CASE_FLT_FN (BUILT_IN_ASIN):
5848 CASE_FLT_FN (BUILT_IN_ACOS):
5849 CASE_FLT_FN (BUILT_IN_ATAN):
5850 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5851 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5852 because of possible accuracy problems. */
5853 if (! flag_unsafe_math_optimizations)
5854 break;
5855 CASE_FLT_FN (BUILT_IN_SQRT):
5856 CASE_FLT_FN (BUILT_IN_FLOOR):
5857 CASE_FLT_FN (BUILT_IN_CEIL):
5858 CASE_FLT_FN (BUILT_IN_TRUNC):
5859 CASE_FLT_FN (BUILT_IN_ROUND):
5860 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5861 CASE_FLT_FN (BUILT_IN_RINT):
5862 target = expand_builtin_mathfn (exp, target, subtarget);
5863 if (target)
5864 return target;
5865 break;
5866
5867 CASE_FLT_FN (BUILT_IN_FMA):
5868 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5869 if (target)
5870 return target;
5871 break;
5872
5873 CASE_FLT_FN (BUILT_IN_ILOGB):
5874 if (! flag_unsafe_math_optimizations)
5875 break;
5876 CASE_FLT_FN (BUILT_IN_ISINF):
5877 CASE_FLT_FN (BUILT_IN_FINITE):
5878 case BUILT_IN_ISFINITE:
5879 case BUILT_IN_ISNORMAL:
5880 target = expand_builtin_interclass_mathfn (exp, target);
5881 if (target)
5882 return target;
5883 break;
5884
5885 CASE_FLT_FN (BUILT_IN_ICEIL):
5886 CASE_FLT_FN (BUILT_IN_LCEIL):
5887 CASE_FLT_FN (BUILT_IN_LLCEIL):
5888 CASE_FLT_FN (BUILT_IN_LFLOOR):
5889 CASE_FLT_FN (BUILT_IN_IFLOOR):
5890 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5891 target = expand_builtin_int_roundingfn (exp, target);
5892 if (target)
5893 return target;
5894 break;
5895
5896 CASE_FLT_FN (BUILT_IN_IRINT):
5897 CASE_FLT_FN (BUILT_IN_LRINT):
5898 CASE_FLT_FN (BUILT_IN_LLRINT):
5899 CASE_FLT_FN (BUILT_IN_IROUND):
5900 CASE_FLT_FN (BUILT_IN_LROUND):
5901 CASE_FLT_FN (BUILT_IN_LLROUND):
5902 target = expand_builtin_int_roundingfn_2 (exp, target);
5903 if (target)
5904 return target;
5905 break;
5906
5907 CASE_FLT_FN (BUILT_IN_POWI):
5908 target = expand_builtin_powi (exp, target);
5909 if (target)
5910 return target;
5911 break;
5912
5913 CASE_FLT_FN (BUILT_IN_ATAN2):
5914 CASE_FLT_FN (BUILT_IN_LDEXP):
5915 CASE_FLT_FN (BUILT_IN_SCALB):
5916 CASE_FLT_FN (BUILT_IN_SCALBN):
5917 CASE_FLT_FN (BUILT_IN_SCALBLN):
5918 if (! flag_unsafe_math_optimizations)
5919 break;
5920
5921 CASE_FLT_FN (BUILT_IN_FMOD):
5922 CASE_FLT_FN (BUILT_IN_REMAINDER):
5923 CASE_FLT_FN (BUILT_IN_DREM):
5924 CASE_FLT_FN (BUILT_IN_POW):
5925 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5926 if (target)
5927 return target;
5928 break;
5929
5930 CASE_FLT_FN (BUILT_IN_CEXPI):
5931 target = expand_builtin_cexpi (exp, target);
5932 gcc_assert (target);
5933 return target;
5934
5935 CASE_FLT_FN (BUILT_IN_SIN):
5936 CASE_FLT_FN (BUILT_IN_COS):
5937 if (! flag_unsafe_math_optimizations)
5938 break;
5939 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5940 if (target)
5941 return target;
5942 break;
5943
5944 CASE_FLT_FN (BUILT_IN_SINCOS):
5945 if (! flag_unsafe_math_optimizations)
5946 break;
5947 target = expand_builtin_sincos (exp);
5948 if (target)
5949 return target;
5950 break;
5951
5952 case BUILT_IN_APPLY_ARGS:
5953 return expand_builtin_apply_args ();
5954
5955 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5956 FUNCTION with a copy of the parameters described by
5957 ARGUMENTS, and ARGSIZE. It returns a block of memory
5958 allocated on the stack into which is stored all the registers
5959 that might possibly be used for returning the result of a
5960 function. ARGUMENTS is the value returned by
5961 __builtin_apply_args. ARGSIZE is the number of bytes of
5962 arguments that must be copied. ??? How should this value be
5963 computed? We'll also need a safe worst case value for varargs
5964 functions. */
5965 case BUILT_IN_APPLY:
5966 if (!validate_arglist (exp, POINTER_TYPE,
5967 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5968 && !validate_arglist (exp, REFERENCE_TYPE,
5969 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5970 return const0_rtx;
5971 else
5972 {
5973 rtx ops[3];
5974
5975 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5976 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5977 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5978
5979 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5980 }
5981
5982 /* __builtin_return (RESULT) causes the function to return the
5983 value described by RESULT. RESULT is address of the block of
5984 memory returned by __builtin_apply. */
5985 case BUILT_IN_RETURN:
5986 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5987 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5988 return const0_rtx;
5989
5990 case BUILT_IN_SAVEREGS:
5991 return expand_builtin_saveregs ();
5992
5993 case BUILT_IN_VA_ARG_PACK:
5994 /* All valid uses of __builtin_va_arg_pack () are removed during
5995 inlining. */
5996 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5997 return const0_rtx;
5998
5999 case BUILT_IN_VA_ARG_PACK_LEN:
6000 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6001 inlining. */
6002 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6003 return const0_rtx;
6004
6005 /* Return the address of the first anonymous stack arg. */
6006 case BUILT_IN_NEXT_ARG:
6007 if (fold_builtin_next_arg (exp, false))
6008 return const0_rtx;
6009 return expand_builtin_next_arg ();
6010
6011 case BUILT_IN_CLEAR_CACHE:
6012 target = expand_builtin___clear_cache (exp);
6013 if (target)
6014 return target;
6015 break;
6016
6017 case BUILT_IN_CLASSIFY_TYPE:
6018 return expand_builtin_classify_type (exp);
6019
6020 case BUILT_IN_CONSTANT_P:
6021 return const0_rtx;
6022
6023 case BUILT_IN_FRAME_ADDRESS:
6024 case BUILT_IN_RETURN_ADDRESS:
6025 return expand_builtin_frame_address (fndecl, exp);
6026
6027 /* Returns the address of the area where the structure is returned.
6028 0 otherwise. */
6029 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6030 if (call_expr_nargs (exp) != 0
6031 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6032 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6033 return const0_rtx;
6034 else
6035 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6036
6037 case BUILT_IN_ALLOCA:
6038 case BUILT_IN_ALLOCA_WITH_ALIGN:
6039 /* If the allocation stems from the declaration of a variable-sized
6040 object, it cannot accumulate. */
6041 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6042 if (target)
6043 return target;
6044 break;
6045
6046 case BUILT_IN_STACK_SAVE:
6047 return expand_stack_save ();
6048
6049 case BUILT_IN_STACK_RESTORE:
6050 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6051 return const0_rtx;
6052
6053 case BUILT_IN_BSWAP16:
6054 case BUILT_IN_BSWAP32:
6055 case BUILT_IN_BSWAP64:
6056 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6057 if (target)
6058 return target;
6059 break;
6060
6061 CASE_INT_FN (BUILT_IN_FFS):
6062 target = expand_builtin_unop (target_mode, exp, target,
6063 subtarget, ffs_optab);
6064 if (target)
6065 return target;
6066 break;
6067
6068 CASE_INT_FN (BUILT_IN_CLZ):
6069 target = expand_builtin_unop (target_mode, exp, target,
6070 subtarget, clz_optab);
6071 if (target)
6072 return target;
6073 break;
6074
6075 CASE_INT_FN (BUILT_IN_CTZ):
6076 target = expand_builtin_unop (target_mode, exp, target,
6077 subtarget, ctz_optab);
6078 if (target)
6079 return target;
6080 break;
6081
6082 CASE_INT_FN (BUILT_IN_CLRSB):
6083 target = expand_builtin_unop (target_mode, exp, target,
6084 subtarget, clrsb_optab);
6085 if (target)
6086 return target;
6087 break;
6088
6089 CASE_INT_FN (BUILT_IN_POPCOUNT):
6090 target = expand_builtin_unop (target_mode, exp, target,
6091 subtarget, popcount_optab);
6092 if (target)
6093 return target;
6094 break;
6095
6096 CASE_INT_FN (BUILT_IN_PARITY):
6097 target = expand_builtin_unop (target_mode, exp, target,
6098 subtarget, parity_optab);
6099 if (target)
6100 return target;
6101 break;
6102
6103 case BUILT_IN_STRLEN:
6104 target = expand_builtin_strlen (exp, target, target_mode);
6105 if (target)
6106 return target;
6107 break;
6108
6109 case BUILT_IN_STRCPY:
6110 target = expand_builtin_strcpy (exp, target);
6111 if (target)
6112 return target;
6113 break;
6114
6115 case BUILT_IN_STRNCPY:
6116 target = expand_builtin_strncpy (exp, target);
6117 if (target)
6118 return target;
6119 break;
6120
6121 case BUILT_IN_STPCPY:
6122 target = expand_builtin_stpcpy (exp, target, mode);
6123 if (target)
6124 return target;
6125 break;
6126
6127 case BUILT_IN_MEMCPY:
6128 target = expand_builtin_memcpy (exp, target);
6129 if (target)
6130 return target;
6131 break;
6132
6133 case BUILT_IN_MEMPCPY:
6134 target = expand_builtin_mempcpy (exp, target, mode);
6135 if (target)
6136 return target;
6137 break;
6138
6139 case BUILT_IN_MEMSET:
6140 target = expand_builtin_memset (exp, target, mode);
6141 if (target)
6142 return target;
6143 break;
6144
6145 case BUILT_IN_BZERO:
6146 target = expand_builtin_bzero (exp);
6147 if (target)
6148 return target;
6149 break;
6150
6151 case BUILT_IN_STRCMP:
6152 target = expand_builtin_strcmp (exp, target);
6153 if (target)
6154 return target;
6155 break;
6156
6157 case BUILT_IN_STRNCMP:
6158 target = expand_builtin_strncmp (exp, target, mode);
6159 if (target)
6160 return target;
6161 break;
6162
6163 case BUILT_IN_BCMP:
6164 case BUILT_IN_MEMCMP:
6165 target = expand_builtin_memcmp (exp, target, mode);
6166 if (target)
6167 return target;
6168 break;
6169
6170 case BUILT_IN_SETJMP:
6171 /* This should have been lowered to the builtins below. */
6172 gcc_unreachable ();
6173
6174 case BUILT_IN_SETJMP_SETUP:
6175 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6176 and the receiver label. */
6177 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6178 {
6179 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6180 VOIDmode, EXPAND_NORMAL);
6181 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6182 rtx label_r = label_rtx (label);
6183
6184 /* This is copied from the handling of non-local gotos. */
6185 expand_builtin_setjmp_setup (buf_addr, label_r);
6186 nonlocal_goto_handler_labels
6187 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6188 nonlocal_goto_handler_labels);
6189 /* ??? Do not let expand_label treat us as such since we would
6190 not want to be both on the list of non-local labels and on
6191 the list of forced labels. */
6192 FORCED_LABEL (label) = 0;
6193 return const0_rtx;
6194 }
6195 break;
6196
6197 case BUILT_IN_SETJMP_RECEIVER:
6198 /* __builtin_setjmp_receiver is passed the receiver label. */
6199 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6200 {
6201 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6202 rtx label_r = label_rtx (label);
6203
6204 expand_builtin_setjmp_receiver (label_r);
6205 return const0_rtx;
6206 }
6207 break;
6208
6209 /* __builtin_longjmp is passed a pointer to an array of five words.
6210 It's similar to the C library longjmp function but works with
6211 __builtin_setjmp above. */
6212 case BUILT_IN_LONGJMP:
6213 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6214 {
6215 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6216 VOIDmode, EXPAND_NORMAL);
6217 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6218
6219 if (value != const1_rtx)
6220 {
6221 error ("%<__builtin_longjmp%> second argument must be 1");
6222 return const0_rtx;
6223 }
6224
6225 expand_builtin_longjmp (buf_addr, value);
6226 return const0_rtx;
6227 }
6228 break;
6229
6230 case BUILT_IN_NONLOCAL_GOTO:
6231 target = expand_builtin_nonlocal_goto (exp);
6232 if (target)
6233 return target;
6234 break;
6235
6236 /* This updates the setjmp buffer that is its argument with the value
6237 of the current stack pointer. */
6238 case BUILT_IN_UPDATE_SETJMP_BUF:
6239 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6240 {
6241 rtx buf_addr
6242 = expand_normal (CALL_EXPR_ARG (exp, 0));
6243
6244 expand_builtin_update_setjmp_buf (buf_addr);
6245 return const0_rtx;
6246 }
6247 break;
6248
6249 case BUILT_IN_TRAP:
6250 expand_builtin_trap ();
6251 return const0_rtx;
6252
6253 case BUILT_IN_UNREACHABLE:
6254 expand_builtin_unreachable ();
6255 return const0_rtx;
6256
6257 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6258 case BUILT_IN_SIGNBITD32:
6259 case BUILT_IN_SIGNBITD64:
6260 case BUILT_IN_SIGNBITD128:
6261 target = expand_builtin_signbit (exp, target);
6262 if (target)
6263 return target;
6264 break;
6265
6266 /* Various hooks for the DWARF 2 __throw routine. */
6267 case BUILT_IN_UNWIND_INIT:
6268 expand_builtin_unwind_init ();
6269 return const0_rtx;
6270 case BUILT_IN_DWARF_CFA:
6271 return virtual_cfa_rtx;
6272 #ifdef DWARF2_UNWIND_INFO
6273 case BUILT_IN_DWARF_SP_COLUMN:
6274 return expand_builtin_dwarf_sp_column ();
6275 case BUILT_IN_INIT_DWARF_REG_SIZES:
6276 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6277 return const0_rtx;
6278 #endif
6279 case BUILT_IN_FROB_RETURN_ADDR:
6280 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6281 case BUILT_IN_EXTRACT_RETURN_ADDR:
6282 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6283 case BUILT_IN_EH_RETURN:
6284 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6285 CALL_EXPR_ARG (exp, 1));
6286 return const0_rtx;
6287 #ifdef EH_RETURN_DATA_REGNO
6288 case BUILT_IN_EH_RETURN_DATA_REGNO:
6289 return expand_builtin_eh_return_data_regno (exp);
6290 #endif
6291 case BUILT_IN_EXTEND_POINTER:
6292 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6293 case BUILT_IN_EH_POINTER:
6294 return expand_builtin_eh_pointer (exp);
6295 case BUILT_IN_EH_FILTER:
6296 return expand_builtin_eh_filter (exp);
6297 case BUILT_IN_EH_COPY_VALUES:
6298 return expand_builtin_eh_copy_values (exp);
6299
6300 case BUILT_IN_VA_START:
6301 return expand_builtin_va_start (exp);
6302 case BUILT_IN_VA_END:
6303 return expand_builtin_va_end (exp);
6304 case BUILT_IN_VA_COPY:
6305 return expand_builtin_va_copy (exp);
6306 case BUILT_IN_EXPECT:
6307 return expand_builtin_expect (exp, target);
6308 case BUILT_IN_ASSUME_ALIGNED:
6309 return expand_builtin_assume_aligned (exp, target);
6310 case BUILT_IN_PREFETCH:
6311 expand_builtin_prefetch (exp);
6312 return const0_rtx;
6313
6314 case BUILT_IN_INIT_TRAMPOLINE:
6315 return expand_builtin_init_trampoline (exp, true);
6316 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6317 return expand_builtin_init_trampoline (exp, false);
6318 case BUILT_IN_ADJUST_TRAMPOLINE:
6319 return expand_builtin_adjust_trampoline (exp);
6320
6321 case BUILT_IN_FORK:
6322 case BUILT_IN_EXECL:
6323 case BUILT_IN_EXECV:
6324 case BUILT_IN_EXECLP:
6325 case BUILT_IN_EXECLE:
6326 case BUILT_IN_EXECVP:
6327 case BUILT_IN_EXECVE:
6328 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6329 if (target)
6330 return target;
6331 break;
6332
6333 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6334 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6335 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6336 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6337 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6338 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6339 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6340 if (target)
6341 return target;
6342 break;
6343
6344 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6345 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6346 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6347 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6348 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6349 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6350 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6351 if (target)
6352 return target;
6353 break;
6354
6355 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6356 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6357 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6358 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6359 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6360 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6361 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6362 if (target)
6363 return target;
6364 break;
6365
6366 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6367 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6368 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6369 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6370 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6371 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6372 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6373 if (target)
6374 return target;
6375 break;
6376
6377 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6378 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6379 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6380 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6381 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6382 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6383 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6384 if (target)
6385 return target;
6386 break;
6387
6388 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6389 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6390 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6391 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6392 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6393 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6394 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6395 if (target)
6396 return target;
6397 break;
6398
6399 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6400 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6401 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6402 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6403 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6404 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6405 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6406 if (target)
6407 return target;
6408 break;
6409
6410 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6411 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6412 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6413 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6414 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6415 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6416 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6417 if (target)
6418 return target;
6419 break;
6420
6421 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6422 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6423 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6424 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6425 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6426 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6427 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6428 if (target)
6429 return target;
6430 break;
6431
6432 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6433 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6434 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6435 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6436 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6437 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6438 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6439 if (target)
6440 return target;
6441 break;
6442
6443 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6444 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6445 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6446 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6447 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6448 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6449 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6450 if (target)
6451 return target;
6452 break;
6453
6454 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6455 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6456 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6457 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6458 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6459 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6460 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6461 if (target)
6462 return target;
6463 break;
6464
6465 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6466 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6467 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6468 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6469 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6470 if (mode == VOIDmode)
6471 mode = TYPE_MODE (boolean_type_node);
6472 if (!target || !register_operand (target, mode))
6473 target = gen_reg_rtx (mode);
6474
6475 mode = get_builtin_sync_mode
6476 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6477 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6478 if (target)
6479 return target;
6480 break;
6481
6482 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6483 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6484 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6485 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6486 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6487 mode = get_builtin_sync_mode
6488 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6489 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6490 if (target)
6491 return target;
6492 break;
6493
6494 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6495 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6496 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6497 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6498 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6499 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6500 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6501 if (target)
6502 return target;
6503 break;
6504
6505 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6506 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6507 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6508 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6509 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6510 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6511 expand_builtin_sync_lock_release (mode, exp);
6512 return const0_rtx;
6513
6514 case BUILT_IN_SYNC_SYNCHRONIZE:
6515 expand_builtin_sync_synchronize ();
6516 return const0_rtx;
6517
6518 case BUILT_IN_ATOMIC_EXCHANGE_1:
6519 case BUILT_IN_ATOMIC_EXCHANGE_2:
6520 case BUILT_IN_ATOMIC_EXCHANGE_4:
6521 case BUILT_IN_ATOMIC_EXCHANGE_8:
6522 case BUILT_IN_ATOMIC_EXCHANGE_16:
6523 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6524 target = expand_builtin_atomic_exchange (mode, exp, target);
6525 if (target)
6526 return target;
6527 break;
6528
6529 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6530 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6531 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6532 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6533 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6534 {
6535 unsigned int nargs, z;
6536 vec<tree, va_gc> *vec;
6537
6538 mode =
6539 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6540 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6541 if (target)
6542 return target;
6543
6544 /* If this is turned into an external library call, the weak parameter
6545 must be dropped to match the expected parameter list. */
6546 nargs = call_expr_nargs (exp);
6547 vec_alloc (vec, nargs - 1);
6548 for (z = 0; z < 3; z++)
6549 vec->quick_push (CALL_EXPR_ARG (exp, z));
6550 /* Skip the boolean weak parameter. */
6551 for (z = 4; z < 6; z++)
6552 vec->quick_push (CALL_EXPR_ARG (exp, z));
6553 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6554 break;
6555 }
6556
6557 case BUILT_IN_ATOMIC_LOAD_1:
6558 case BUILT_IN_ATOMIC_LOAD_2:
6559 case BUILT_IN_ATOMIC_LOAD_4:
6560 case BUILT_IN_ATOMIC_LOAD_8:
6561 case BUILT_IN_ATOMIC_LOAD_16:
6562 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6563 target = expand_builtin_atomic_load (mode, exp, target);
6564 if (target)
6565 return target;
6566 break;
6567
6568 case BUILT_IN_ATOMIC_STORE_1:
6569 case BUILT_IN_ATOMIC_STORE_2:
6570 case BUILT_IN_ATOMIC_STORE_4:
6571 case BUILT_IN_ATOMIC_STORE_8:
6572 case BUILT_IN_ATOMIC_STORE_16:
6573 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6574 target = expand_builtin_atomic_store (mode, exp);
6575 if (target)
6576 return const0_rtx;
6577 break;
6578
6579 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6580 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6581 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6582 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6583 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6584 {
6585 enum built_in_function lib;
6586 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6587 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6588 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6589 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6590 ignore, lib);
6591 if (target)
6592 return target;
6593 break;
6594 }
6595 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6596 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6597 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6598 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6599 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6600 {
6601 enum built_in_function lib;
6602 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6603 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6604 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6605 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6606 ignore, lib);
6607 if (target)
6608 return target;
6609 break;
6610 }
6611 case BUILT_IN_ATOMIC_AND_FETCH_1:
6612 case BUILT_IN_ATOMIC_AND_FETCH_2:
6613 case BUILT_IN_ATOMIC_AND_FETCH_4:
6614 case BUILT_IN_ATOMIC_AND_FETCH_8:
6615 case BUILT_IN_ATOMIC_AND_FETCH_16:
6616 {
6617 enum built_in_function lib;
6618 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6619 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6620 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6621 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6622 ignore, lib);
6623 if (target)
6624 return target;
6625 break;
6626 }
6627 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6628 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6629 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6630 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6631 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6632 {
6633 enum built_in_function lib;
6634 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6635 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6636 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6637 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6638 ignore, lib);
6639 if (target)
6640 return target;
6641 break;
6642 }
6643 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6644 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6645 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6646 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6647 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6648 {
6649 enum built_in_function lib;
6650 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6651 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6652 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6653 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6654 ignore, lib);
6655 if (target)
6656 return target;
6657 break;
6658 }
6659 case BUILT_IN_ATOMIC_OR_FETCH_1:
6660 case BUILT_IN_ATOMIC_OR_FETCH_2:
6661 case BUILT_IN_ATOMIC_OR_FETCH_4:
6662 case BUILT_IN_ATOMIC_OR_FETCH_8:
6663 case BUILT_IN_ATOMIC_OR_FETCH_16:
6664 {
6665 enum built_in_function lib;
6666 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6667 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6668 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6669 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6670 ignore, lib);
6671 if (target)
6672 return target;
6673 break;
6674 }
6675 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6676 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6677 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6678 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6679 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6680 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6681 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6682 ignore, BUILT_IN_NONE);
6683 if (target)
6684 return target;
6685 break;
6686
6687 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6688 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6689 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6690 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6691 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6692 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6693 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6694 ignore, BUILT_IN_NONE);
6695 if (target)
6696 return target;
6697 break;
6698
6699 case BUILT_IN_ATOMIC_FETCH_AND_1:
6700 case BUILT_IN_ATOMIC_FETCH_AND_2:
6701 case BUILT_IN_ATOMIC_FETCH_AND_4:
6702 case BUILT_IN_ATOMIC_FETCH_AND_8:
6703 case BUILT_IN_ATOMIC_FETCH_AND_16:
6704 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6705 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6706 ignore, BUILT_IN_NONE);
6707 if (target)
6708 return target;
6709 break;
6710
6711 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6712 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6713 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6714 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6715 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6716 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6717 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6718 ignore, BUILT_IN_NONE);
6719 if (target)
6720 return target;
6721 break;
6722
6723 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6724 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6725 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6726 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6727 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6728 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6729 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6730 ignore, BUILT_IN_NONE);
6731 if (target)
6732 return target;
6733 break;
6734
6735 case BUILT_IN_ATOMIC_FETCH_OR_1:
6736 case BUILT_IN_ATOMIC_FETCH_OR_2:
6737 case BUILT_IN_ATOMIC_FETCH_OR_4:
6738 case BUILT_IN_ATOMIC_FETCH_OR_8:
6739 case BUILT_IN_ATOMIC_FETCH_OR_16:
6740 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6741 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6742 ignore, BUILT_IN_NONE);
6743 if (target)
6744 return target;
6745 break;
6746
6747 case BUILT_IN_ATOMIC_TEST_AND_SET:
6748 return expand_builtin_atomic_test_and_set (exp, target);
6749
6750 case BUILT_IN_ATOMIC_CLEAR:
6751 return expand_builtin_atomic_clear (exp);
6752
6753 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6754 return expand_builtin_atomic_always_lock_free (exp);
6755
6756 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6757 target = expand_builtin_atomic_is_lock_free (exp);
6758 if (target)
6759 return target;
6760 break;
6761
6762 case BUILT_IN_ATOMIC_THREAD_FENCE:
6763 expand_builtin_atomic_thread_fence (exp);
6764 return const0_rtx;
6765
6766 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6767 expand_builtin_atomic_signal_fence (exp);
6768 return const0_rtx;
6769
6770 case BUILT_IN_OBJECT_SIZE:
6771 return expand_builtin_object_size (exp);
6772
6773 case BUILT_IN_MEMCPY_CHK:
6774 case BUILT_IN_MEMPCPY_CHK:
6775 case BUILT_IN_MEMMOVE_CHK:
6776 case BUILT_IN_MEMSET_CHK:
6777 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6778 if (target)
6779 return target;
6780 break;
6781
6782 case BUILT_IN_STRCPY_CHK:
6783 case BUILT_IN_STPCPY_CHK:
6784 case BUILT_IN_STRNCPY_CHK:
6785 case BUILT_IN_STPNCPY_CHK:
6786 case BUILT_IN_STRCAT_CHK:
6787 case BUILT_IN_STRNCAT_CHK:
6788 case BUILT_IN_SNPRINTF_CHK:
6789 case BUILT_IN_VSNPRINTF_CHK:
6790 maybe_emit_chk_warning (exp, fcode);
6791 break;
6792
6793 case BUILT_IN_SPRINTF_CHK:
6794 case BUILT_IN_VSPRINTF_CHK:
6795 maybe_emit_sprintf_chk_warning (exp, fcode);
6796 break;
6797
6798 case BUILT_IN_FREE:
6799 if (warn_free_nonheap_object)
6800 maybe_emit_free_warning (exp);
6801 break;
6802
6803 case BUILT_IN_THREAD_POINTER:
6804 return expand_builtin_thread_pointer (exp, target);
6805
6806 case BUILT_IN_SET_THREAD_POINTER:
6807 expand_builtin_set_thread_pointer (exp);
6808 return const0_rtx;
6809
6810 case BUILT_IN_CILK_DETACH:
6811 expand_builtin_cilk_detach (exp);
6812 return const0_rtx;
6813
6814 case BUILT_IN_CILK_POP_FRAME:
6815 expand_builtin_cilk_pop_frame (exp);
6816 return const0_rtx;
6817
6818 default: /* just do library call, if unknown builtin */
6819 break;
6820 }
6821
6822 /* The switch statement above can drop through to cause the function
6823 to be called normally. */
6824 return expand_call (exp, target, ignore);
6825 }
6826
6827 /* Determine whether a tree node represents a call to a built-in
6828 function. If the tree T is a call to a built-in function with
6829 the right number of arguments of the appropriate types, return
6830 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6831 Otherwise the return value is END_BUILTINS. */
6832
6833 enum built_in_function
6834 builtin_mathfn_code (const_tree t)
6835 {
6836 const_tree fndecl, arg, parmlist;
6837 const_tree argtype, parmtype;
6838 const_call_expr_arg_iterator iter;
6839
6840 if (TREE_CODE (t) != CALL_EXPR
6841 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6842 return END_BUILTINS;
6843
6844 fndecl = get_callee_fndecl (t);
6845 if (fndecl == NULL_TREE
6846 || TREE_CODE (fndecl) != FUNCTION_DECL
6847 || ! DECL_BUILT_IN (fndecl)
6848 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6849 return END_BUILTINS;
6850
6851 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6852 init_const_call_expr_arg_iterator (t, &iter);
6853 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6854 {
6855 /* If a function doesn't take a variable number of arguments,
6856 the last element in the list will have type `void'. */
6857 parmtype = TREE_VALUE (parmlist);
6858 if (VOID_TYPE_P (parmtype))
6859 {
6860 if (more_const_call_expr_args_p (&iter))
6861 return END_BUILTINS;
6862 return DECL_FUNCTION_CODE (fndecl);
6863 }
6864
6865 if (! more_const_call_expr_args_p (&iter))
6866 return END_BUILTINS;
6867
6868 arg = next_const_call_expr_arg (&iter);
6869 argtype = TREE_TYPE (arg);
6870
6871 if (SCALAR_FLOAT_TYPE_P (parmtype))
6872 {
6873 if (! SCALAR_FLOAT_TYPE_P (argtype))
6874 return END_BUILTINS;
6875 }
6876 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6877 {
6878 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6879 return END_BUILTINS;
6880 }
6881 else if (POINTER_TYPE_P (parmtype))
6882 {
6883 if (! POINTER_TYPE_P (argtype))
6884 return END_BUILTINS;
6885 }
6886 else if (INTEGRAL_TYPE_P (parmtype))
6887 {
6888 if (! INTEGRAL_TYPE_P (argtype))
6889 return END_BUILTINS;
6890 }
6891 else
6892 return END_BUILTINS;
6893 }
6894
6895 /* Variable-length argument list. */
6896 return DECL_FUNCTION_CODE (fndecl);
6897 }
6898
6899 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6900 evaluate to a constant. */
6901
6902 static tree
6903 fold_builtin_constant_p (tree arg)
6904 {
6905 /* We return 1 for a numeric type that's known to be a constant
6906 value at compile-time or for an aggregate type that's a
6907 literal constant. */
6908 STRIP_NOPS (arg);
6909
6910 /* If we know this is a constant, emit the constant of one. */
6911 if (CONSTANT_CLASS_P (arg)
6912 || (TREE_CODE (arg) == CONSTRUCTOR
6913 && TREE_CONSTANT (arg)))
6914 return integer_one_node;
6915 if (TREE_CODE (arg) == ADDR_EXPR)
6916 {
6917 tree op = TREE_OPERAND (arg, 0);
6918 if (TREE_CODE (op) == STRING_CST
6919 || (TREE_CODE (op) == ARRAY_REF
6920 && integer_zerop (TREE_OPERAND (op, 1))
6921 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6922 return integer_one_node;
6923 }
6924
6925 /* If this expression has side effects, show we don't know it to be a
6926 constant. Likewise if it's a pointer or aggregate type since in
6927 those case we only want literals, since those are only optimized
6928 when generating RTL, not later.
6929 And finally, if we are compiling an initializer, not code, we
6930 need to return a definite result now; there's not going to be any
6931 more optimization done. */
6932 if (TREE_SIDE_EFFECTS (arg)
6933 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6934 || POINTER_TYPE_P (TREE_TYPE (arg))
6935 || cfun == 0
6936 || folding_initializer
6937 || force_folding_builtin_constant_p)
6938 return integer_zero_node;
6939
6940 return NULL_TREE;
6941 }
6942
6943 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6944 return it as a truthvalue. */
6945
6946 static tree
6947 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
6948 tree predictor)
6949 {
6950 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6951
6952 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6953 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6954 ret_type = TREE_TYPE (TREE_TYPE (fn));
6955 pred_type = TREE_VALUE (arg_types);
6956 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6957
6958 pred = fold_convert_loc (loc, pred_type, pred);
6959 expected = fold_convert_loc (loc, expected_type, expected);
6960 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
6961 predictor);
6962
6963 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6964 build_int_cst (ret_type, 0));
6965 }
6966
6967 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6968 NULL_TREE if no simplification is possible. */
6969
6970 tree
6971 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
6972 {
6973 tree inner, fndecl, inner_arg0;
6974 enum tree_code code;
6975
6976 /* Distribute the expected value over short-circuiting operators.
6977 See through the cast from truthvalue_type_node to long. */
6978 inner_arg0 = arg0;
6979 while (TREE_CODE (inner_arg0) == NOP_EXPR
6980 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6981 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6982 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6983
6984 /* If this is a builtin_expect within a builtin_expect keep the
6985 inner one. See through a comparison against a constant. It
6986 might have been added to create a thruthvalue. */
6987 inner = inner_arg0;
6988
6989 if (COMPARISON_CLASS_P (inner)
6990 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6991 inner = TREE_OPERAND (inner, 0);
6992
6993 if (TREE_CODE (inner) == CALL_EXPR
6994 && (fndecl = get_callee_fndecl (inner))
6995 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6996 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6997 return arg0;
6998
6999 inner = inner_arg0;
7000 code = TREE_CODE (inner);
7001 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7002 {
7003 tree op0 = TREE_OPERAND (inner, 0);
7004 tree op1 = TREE_OPERAND (inner, 1);
7005
7006 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7007 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7008 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7009
7010 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7011 }
7012
7013 /* If the argument isn't invariant then there's nothing else we can do. */
7014 if (!TREE_CONSTANT (inner_arg0))
7015 return NULL_TREE;
7016
7017 /* If we expect that a comparison against the argument will fold to
7018 a constant return the constant. In practice, this means a true
7019 constant or the address of a non-weak symbol. */
7020 inner = inner_arg0;
7021 STRIP_NOPS (inner);
7022 if (TREE_CODE (inner) == ADDR_EXPR)
7023 {
7024 do
7025 {
7026 inner = TREE_OPERAND (inner, 0);
7027 }
7028 while (TREE_CODE (inner) == COMPONENT_REF
7029 || TREE_CODE (inner) == ARRAY_REF);
7030 if ((TREE_CODE (inner) == VAR_DECL
7031 || TREE_CODE (inner) == FUNCTION_DECL)
7032 && DECL_WEAK (inner))
7033 return NULL_TREE;
7034 }
7035
7036 /* Otherwise, ARG0 already has the proper type for the return value. */
7037 return arg0;
7038 }
7039
7040 /* Fold a call to __builtin_classify_type with argument ARG. */
7041
7042 static tree
7043 fold_builtin_classify_type (tree arg)
7044 {
7045 if (arg == 0)
7046 return build_int_cst (integer_type_node, no_type_class);
7047
7048 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7049 }
7050
7051 /* Fold a call to __builtin_strlen with argument ARG. */
7052
7053 static tree
7054 fold_builtin_strlen (location_t loc, tree type, tree arg)
7055 {
7056 if (!validate_arg (arg, POINTER_TYPE))
7057 return NULL_TREE;
7058 else
7059 {
7060 tree len = c_strlen (arg, 0);
7061
7062 if (len)
7063 return fold_convert_loc (loc, type, len);
7064
7065 return NULL_TREE;
7066 }
7067 }
7068
7069 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7070
7071 static tree
7072 fold_builtin_inf (location_t loc, tree type, int warn)
7073 {
7074 REAL_VALUE_TYPE real;
7075
7076 /* __builtin_inff is intended to be usable to define INFINITY on all
7077 targets. If an infinity is not available, INFINITY expands "to a
7078 positive constant of type float that overflows at translation
7079 time", footnote "In this case, using INFINITY will violate the
7080 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7081 Thus we pedwarn to ensure this constraint violation is
7082 diagnosed. */
7083 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7084 pedwarn (loc, 0, "target format does not support infinity");
7085
7086 real_inf (&real);
7087 return build_real (type, real);
7088 }
7089
7090 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7091
7092 static tree
7093 fold_builtin_nan (tree arg, tree type, int quiet)
7094 {
7095 REAL_VALUE_TYPE real;
7096 const char *str;
7097
7098 if (!validate_arg (arg, POINTER_TYPE))
7099 return NULL_TREE;
7100 str = c_getstr (arg);
7101 if (!str)
7102 return NULL_TREE;
7103
7104 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7105 return NULL_TREE;
7106
7107 return build_real (type, real);
7108 }
7109
7110 /* Return true if the floating point expression T has an integer value.
7111 We also allow +Inf, -Inf and NaN to be considered integer values. */
7112
7113 static bool
7114 integer_valued_real_p (tree t)
7115 {
7116 switch (TREE_CODE (t))
7117 {
7118 case FLOAT_EXPR:
7119 return true;
7120
7121 case ABS_EXPR:
7122 case SAVE_EXPR:
7123 return integer_valued_real_p (TREE_OPERAND (t, 0));
7124
7125 case COMPOUND_EXPR:
7126 case MODIFY_EXPR:
7127 case BIND_EXPR:
7128 return integer_valued_real_p (TREE_OPERAND (t, 1));
7129
7130 case PLUS_EXPR:
7131 case MINUS_EXPR:
7132 case MULT_EXPR:
7133 case MIN_EXPR:
7134 case MAX_EXPR:
7135 return integer_valued_real_p (TREE_OPERAND (t, 0))
7136 && integer_valued_real_p (TREE_OPERAND (t, 1));
7137
7138 case COND_EXPR:
7139 return integer_valued_real_p (TREE_OPERAND (t, 1))
7140 && integer_valued_real_p (TREE_OPERAND (t, 2));
7141
7142 case REAL_CST:
7143 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7144
7145 case NOP_EXPR:
7146 {
7147 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7148 if (TREE_CODE (type) == INTEGER_TYPE)
7149 return true;
7150 if (TREE_CODE (type) == REAL_TYPE)
7151 return integer_valued_real_p (TREE_OPERAND (t, 0));
7152 break;
7153 }
7154
7155 case CALL_EXPR:
7156 switch (builtin_mathfn_code (t))
7157 {
7158 CASE_FLT_FN (BUILT_IN_CEIL):
7159 CASE_FLT_FN (BUILT_IN_FLOOR):
7160 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7161 CASE_FLT_FN (BUILT_IN_RINT):
7162 CASE_FLT_FN (BUILT_IN_ROUND):
7163 CASE_FLT_FN (BUILT_IN_TRUNC):
7164 return true;
7165
7166 CASE_FLT_FN (BUILT_IN_FMIN):
7167 CASE_FLT_FN (BUILT_IN_FMAX):
7168 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7169 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7170
7171 default:
7172 break;
7173 }
7174 break;
7175
7176 default:
7177 break;
7178 }
7179 return false;
7180 }
7181
7182 /* FNDECL is assumed to be a builtin where truncation can be propagated
7183 across (for instance floor((double)f) == (double)floorf (f).
7184 Do the transformation for a call with argument ARG. */
7185
7186 static tree
7187 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7188 {
7189 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7190
7191 if (!validate_arg (arg, REAL_TYPE))
7192 return NULL_TREE;
7193
7194 /* Integer rounding functions are idempotent. */
7195 if (fcode == builtin_mathfn_code (arg))
7196 return arg;
7197
7198 /* If argument is already integer valued, and we don't need to worry
7199 about setting errno, there's no need to perform rounding. */
7200 if (! flag_errno_math && integer_valued_real_p (arg))
7201 return arg;
7202
7203 if (optimize)
7204 {
7205 tree arg0 = strip_float_extensions (arg);
7206 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7207 tree newtype = TREE_TYPE (arg0);
7208 tree decl;
7209
7210 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7211 && (decl = mathfn_built_in (newtype, fcode)))
7212 return fold_convert_loc (loc, ftype,
7213 build_call_expr_loc (loc, decl, 1,
7214 fold_convert_loc (loc,
7215 newtype,
7216 arg0)));
7217 }
7218 return NULL_TREE;
7219 }
7220
7221 /* FNDECL is assumed to be builtin which can narrow the FP type of
7222 the argument, for instance lround((double)f) -> lroundf (f).
7223 Do the transformation for a call with argument ARG. */
7224
7225 static tree
7226 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7227 {
7228 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7229
7230 if (!validate_arg (arg, REAL_TYPE))
7231 return NULL_TREE;
7232
7233 /* If argument is already integer valued, and we don't need to worry
7234 about setting errno, there's no need to perform rounding. */
7235 if (! flag_errno_math && integer_valued_real_p (arg))
7236 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7237 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7238
7239 if (optimize)
7240 {
7241 tree ftype = TREE_TYPE (arg);
7242 tree arg0 = strip_float_extensions (arg);
7243 tree newtype = TREE_TYPE (arg0);
7244 tree decl;
7245
7246 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7247 && (decl = mathfn_built_in (newtype, fcode)))
7248 return build_call_expr_loc (loc, decl, 1,
7249 fold_convert_loc (loc, newtype, arg0));
7250 }
7251
7252 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7253 sizeof (int) == sizeof (long). */
7254 if (TYPE_PRECISION (integer_type_node)
7255 == TYPE_PRECISION (long_integer_type_node))
7256 {
7257 tree newfn = NULL_TREE;
7258 switch (fcode)
7259 {
7260 CASE_FLT_FN (BUILT_IN_ICEIL):
7261 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7262 break;
7263
7264 CASE_FLT_FN (BUILT_IN_IFLOOR):
7265 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7266 break;
7267
7268 CASE_FLT_FN (BUILT_IN_IROUND):
7269 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7270 break;
7271
7272 CASE_FLT_FN (BUILT_IN_IRINT):
7273 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7274 break;
7275
7276 default:
7277 break;
7278 }
7279
7280 if (newfn)
7281 {
7282 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7283 return fold_convert_loc (loc,
7284 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7285 }
7286 }
7287
7288 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7289 sizeof (long long) == sizeof (long). */
7290 if (TYPE_PRECISION (long_long_integer_type_node)
7291 == TYPE_PRECISION (long_integer_type_node))
7292 {
7293 tree newfn = NULL_TREE;
7294 switch (fcode)
7295 {
7296 CASE_FLT_FN (BUILT_IN_LLCEIL):
7297 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7298 break;
7299
7300 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7301 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7302 break;
7303
7304 CASE_FLT_FN (BUILT_IN_LLROUND):
7305 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7306 break;
7307
7308 CASE_FLT_FN (BUILT_IN_LLRINT):
7309 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7310 break;
7311
7312 default:
7313 break;
7314 }
7315
7316 if (newfn)
7317 {
7318 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7319 return fold_convert_loc (loc,
7320 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7321 }
7322 }
7323
7324 return NULL_TREE;
7325 }
7326
7327 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7328 return type. Return NULL_TREE if no simplification can be made. */
7329
7330 static tree
7331 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7332 {
7333 tree res;
7334
7335 if (!validate_arg (arg, COMPLEX_TYPE)
7336 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7337 return NULL_TREE;
7338
7339 /* Calculate the result when the argument is a constant. */
7340 if (TREE_CODE (arg) == COMPLEX_CST
7341 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7342 type, mpfr_hypot)))
7343 return res;
7344
7345 if (TREE_CODE (arg) == COMPLEX_EXPR)
7346 {
7347 tree real = TREE_OPERAND (arg, 0);
7348 tree imag = TREE_OPERAND (arg, 1);
7349
7350 /* If either part is zero, cabs is fabs of the other. */
7351 if (real_zerop (real))
7352 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7353 if (real_zerop (imag))
7354 return fold_build1_loc (loc, ABS_EXPR, type, real);
7355
7356 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7357 if (flag_unsafe_math_optimizations
7358 && operand_equal_p (real, imag, OEP_PURE_SAME))
7359 {
7360 const REAL_VALUE_TYPE sqrt2_trunc
7361 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7362 STRIP_NOPS (real);
7363 return fold_build2_loc (loc, MULT_EXPR, type,
7364 fold_build1_loc (loc, ABS_EXPR, type, real),
7365 build_real (type, sqrt2_trunc));
7366 }
7367 }
7368
7369 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7370 if (TREE_CODE (arg) == NEGATE_EXPR
7371 || TREE_CODE (arg) == CONJ_EXPR)
7372 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7373
7374 /* Don't do this when optimizing for size. */
7375 if (flag_unsafe_math_optimizations
7376 && optimize && optimize_function_for_speed_p (cfun))
7377 {
7378 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7379
7380 if (sqrtfn != NULL_TREE)
7381 {
7382 tree rpart, ipart, result;
7383
7384 arg = builtin_save_expr (arg);
7385
7386 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7387 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7388
7389 rpart = builtin_save_expr (rpart);
7390 ipart = builtin_save_expr (ipart);
7391
7392 result = fold_build2_loc (loc, PLUS_EXPR, type,
7393 fold_build2_loc (loc, MULT_EXPR, type,
7394 rpart, rpart),
7395 fold_build2_loc (loc, MULT_EXPR, type,
7396 ipart, ipart));
7397
7398 return build_call_expr_loc (loc, sqrtfn, 1, result);
7399 }
7400 }
7401
7402 return NULL_TREE;
7403 }
7404
7405 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7406 complex tree type of the result. If NEG is true, the imaginary
7407 zero is negative. */
7408
7409 static tree
7410 build_complex_cproj (tree type, bool neg)
7411 {
7412 REAL_VALUE_TYPE rinf, rzero = dconst0;
7413
7414 real_inf (&rinf);
7415 rzero.sign = neg;
7416 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7417 build_real (TREE_TYPE (type), rzero));
7418 }
7419
7420 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7421 return type. Return NULL_TREE if no simplification can be made. */
7422
7423 static tree
7424 fold_builtin_cproj (location_t loc, tree arg, tree type)
7425 {
7426 if (!validate_arg (arg, COMPLEX_TYPE)
7427 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7428 return NULL_TREE;
7429
7430 /* If there are no infinities, return arg. */
7431 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7432 return non_lvalue_loc (loc, arg);
7433
7434 /* Calculate the result when the argument is a constant. */
7435 if (TREE_CODE (arg) == COMPLEX_CST)
7436 {
7437 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7438 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7439
7440 if (real_isinf (real) || real_isinf (imag))
7441 return build_complex_cproj (type, imag->sign);
7442 else
7443 return arg;
7444 }
7445 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7446 {
7447 tree real = TREE_OPERAND (arg, 0);
7448 tree imag = TREE_OPERAND (arg, 1);
7449
7450 STRIP_NOPS (real);
7451 STRIP_NOPS (imag);
7452
7453 /* If the real part is inf and the imag part is known to be
7454 nonnegative, return (inf + 0i). Remember side-effects are
7455 possible in the imag part. */
7456 if (TREE_CODE (real) == REAL_CST
7457 && real_isinf (TREE_REAL_CST_PTR (real))
7458 && tree_expr_nonnegative_p (imag))
7459 return omit_one_operand_loc (loc, type,
7460 build_complex_cproj (type, false),
7461 arg);
7462
7463 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7464 Remember side-effects are possible in the real part. */
7465 if (TREE_CODE (imag) == REAL_CST
7466 && real_isinf (TREE_REAL_CST_PTR (imag)))
7467 return
7468 omit_one_operand_loc (loc, type,
7469 build_complex_cproj (type, TREE_REAL_CST_PTR
7470 (imag)->sign), arg);
7471 }
7472
7473 return NULL_TREE;
7474 }
7475
7476 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7477 Return NULL_TREE if no simplification can be made. */
7478
7479 static tree
7480 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7481 {
7482
7483 enum built_in_function fcode;
7484 tree res;
7485
7486 if (!validate_arg (arg, REAL_TYPE))
7487 return NULL_TREE;
7488
7489 /* Calculate the result when the argument is a constant. */
7490 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7491 return res;
7492
7493 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7494 fcode = builtin_mathfn_code (arg);
7495 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7496 {
7497 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7498 arg = fold_build2_loc (loc, MULT_EXPR, type,
7499 CALL_EXPR_ARG (arg, 0),
7500 build_real (type, dconsthalf));
7501 return build_call_expr_loc (loc, expfn, 1, arg);
7502 }
7503
7504 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7505 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7506 {
7507 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7508
7509 if (powfn)
7510 {
7511 tree arg0 = CALL_EXPR_ARG (arg, 0);
7512 tree tree_root;
7513 /* The inner root was either sqrt or cbrt. */
7514 /* This was a conditional expression but it triggered a bug
7515 in Sun C 5.5. */
7516 REAL_VALUE_TYPE dconstroot;
7517 if (BUILTIN_SQRT_P (fcode))
7518 dconstroot = dconsthalf;
7519 else
7520 dconstroot = dconst_third ();
7521
7522 /* Adjust for the outer root. */
7523 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7524 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7525 tree_root = build_real (type, dconstroot);
7526 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7527 }
7528 }
7529
7530 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7531 if (flag_unsafe_math_optimizations
7532 && (fcode == BUILT_IN_POW
7533 || fcode == BUILT_IN_POWF
7534 || fcode == BUILT_IN_POWL))
7535 {
7536 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7537 tree arg0 = CALL_EXPR_ARG (arg, 0);
7538 tree arg1 = CALL_EXPR_ARG (arg, 1);
7539 tree narg1;
7540 if (!tree_expr_nonnegative_p (arg0))
7541 arg0 = build1 (ABS_EXPR, type, arg0);
7542 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7543 build_real (type, dconsthalf));
7544 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7545 }
7546
7547 return NULL_TREE;
7548 }
7549
7550 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7551 Return NULL_TREE if no simplification can be made. */
7552
7553 static tree
7554 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7555 {
7556 const enum built_in_function fcode = builtin_mathfn_code (arg);
7557 tree res;
7558
7559 if (!validate_arg (arg, REAL_TYPE))
7560 return NULL_TREE;
7561
7562 /* Calculate the result when the argument is a constant. */
7563 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7564 return res;
7565
7566 if (flag_unsafe_math_optimizations)
7567 {
7568 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7569 if (BUILTIN_EXPONENT_P (fcode))
7570 {
7571 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7572 const REAL_VALUE_TYPE third_trunc =
7573 real_value_truncate (TYPE_MODE (type), dconst_third ());
7574 arg = fold_build2_loc (loc, MULT_EXPR, type,
7575 CALL_EXPR_ARG (arg, 0),
7576 build_real (type, third_trunc));
7577 return build_call_expr_loc (loc, expfn, 1, arg);
7578 }
7579
7580 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7581 if (BUILTIN_SQRT_P (fcode))
7582 {
7583 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7584
7585 if (powfn)
7586 {
7587 tree arg0 = CALL_EXPR_ARG (arg, 0);
7588 tree tree_root;
7589 REAL_VALUE_TYPE dconstroot = dconst_third ();
7590
7591 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7592 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7593 tree_root = build_real (type, dconstroot);
7594 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7595 }
7596 }
7597
7598 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7599 if (BUILTIN_CBRT_P (fcode))
7600 {
7601 tree arg0 = CALL_EXPR_ARG (arg, 0);
7602 if (tree_expr_nonnegative_p (arg0))
7603 {
7604 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7605
7606 if (powfn)
7607 {
7608 tree tree_root;
7609 REAL_VALUE_TYPE dconstroot;
7610
7611 real_arithmetic (&dconstroot, MULT_EXPR,
7612 dconst_third_ptr (), dconst_third_ptr ());
7613 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7614 tree_root = build_real (type, dconstroot);
7615 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7616 }
7617 }
7618 }
7619
7620 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7621 if (fcode == BUILT_IN_POW
7622 || fcode == BUILT_IN_POWF
7623 || fcode == BUILT_IN_POWL)
7624 {
7625 tree arg00 = CALL_EXPR_ARG (arg, 0);
7626 tree arg01 = CALL_EXPR_ARG (arg, 1);
7627 if (tree_expr_nonnegative_p (arg00))
7628 {
7629 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7630 const REAL_VALUE_TYPE dconstroot
7631 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7632 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7633 build_real (type, dconstroot));
7634 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7635 }
7636 }
7637 }
7638 return NULL_TREE;
7639 }
7640
7641 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7642 TYPE is the type of the return value. Return NULL_TREE if no
7643 simplification can be made. */
7644
7645 static tree
7646 fold_builtin_cos (location_t loc,
7647 tree arg, tree type, tree fndecl)
7648 {
7649 tree res, narg;
7650
7651 if (!validate_arg (arg, REAL_TYPE))
7652 return NULL_TREE;
7653
7654 /* Calculate the result when the argument is a constant. */
7655 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7656 return res;
7657
7658 /* Optimize cos(-x) into cos (x). */
7659 if ((narg = fold_strip_sign_ops (arg)))
7660 return build_call_expr_loc (loc, fndecl, 1, narg);
7661
7662 return NULL_TREE;
7663 }
7664
7665 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7666 Return NULL_TREE if no simplification can be made. */
7667
7668 static tree
7669 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7670 {
7671 if (validate_arg (arg, REAL_TYPE))
7672 {
7673 tree res, narg;
7674
7675 /* Calculate the result when the argument is a constant. */
7676 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7677 return res;
7678
7679 /* Optimize cosh(-x) into cosh (x). */
7680 if ((narg = fold_strip_sign_ops (arg)))
7681 return build_call_expr_loc (loc, fndecl, 1, narg);
7682 }
7683
7684 return NULL_TREE;
7685 }
7686
7687 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7688 argument ARG. TYPE is the type of the return value. Return
7689 NULL_TREE if no simplification can be made. */
7690
7691 static tree
7692 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7693 bool hyper)
7694 {
7695 if (validate_arg (arg, COMPLEX_TYPE)
7696 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7697 {
7698 tree tmp;
7699
7700 /* Calculate the result when the argument is a constant. */
7701 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7702 return tmp;
7703
7704 /* Optimize fn(-x) into fn(x). */
7705 if ((tmp = fold_strip_sign_ops (arg)))
7706 return build_call_expr_loc (loc, fndecl, 1, tmp);
7707 }
7708
7709 return NULL_TREE;
7710 }
7711
7712 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7713 Return NULL_TREE if no simplification can be made. */
7714
7715 static tree
7716 fold_builtin_tan (tree arg, tree type)
7717 {
7718 enum built_in_function fcode;
7719 tree res;
7720
7721 if (!validate_arg (arg, REAL_TYPE))
7722 return NULL_TREE;
7723
7724 /* Calculate the result when the argument is a constant. */
7725 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7726 return res;
7727
7728 /* Optimize tan(atan(x)) = x. */
7729 fcode = builtin_mathfn_code (arg);
7730 if (flag_unsafe_math_optimizations
7731 && (fcode == BUILT_IN_ATAN
7732 || fcode == BUILT_IN_ATANF
7733 || fcode == BUILT_IN_ATANL))
7734 return CALL_EXPR_ARG (arg, 0);
7735
7736 return NULL_TREE;
7737 }
7738
7739 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7740 NULL_TREE if no simplification can be made. */
7741
7742 static tree
7743 fold_builtin_sincos (location_t loc,
7744 tree arg0, tree arg1, tree arg2)
7745 {
7746 tree type;
7747 tree res, fn, call;
7748
7749 if (!validate_arg (arg0, REAL_TYPE)
7750 || !validate_arg (arg1, POINTER_TYPE)
7751 || !validate_arg (arg2, POINTER_TYPE))
7752 return NULL_TREE;
7753
7754 type = TREE_TYPE (arg0);
7755
7756 /* Calculate the result when the argument is a constant. */
7757 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7758 return res;
7759
7760 /* Canonicalize sincos to cexpi. */
7761 if (!targetm.libc_has_function (function_c99_math_complex))
7762 return NULL_TREE;
7763 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7764 if (!fn)
7765 return NULL_TREE;
7766
7767 call = build_call_expr_loc (loc, fn, 1, arg0);
7768 call = builtin_save_expr (call);
7769
7770 return build2 (COMPOUND_EXPR, void_type_node,
7771 build2 (MODIFY_EXPR, void_type_node,
7772 build_fold_indirect_ref_loc (loc, arg1),
7773 build1 (IMAGPART_EXPR, type, call)),
7774 build2 (MODIFY_EXPR, void_type_node,
7775 build_fold_indirect_ref_loc (loc, arg2),
7776 build1 (REALPART_EXPR, type, call)));
7777 }
7778
7779 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7780 NULL_TREE if no simplification can be made. */
7781
7782 static tree
7783 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7784 {
7785 tree rtype;
7786 tree realp, imagp, ifn;
7787 tree res;
7788
7789 if (!validate_arg (arg0, COMPLEX_TYPE)
7790 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7791 return NULL_TREE;
7792
7793 /* Calculate the result when the argument is a constant. */
7794 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7795 return res;
7796
7797 rtype = TREE_TYPE (TREE_TYPE (arg0));
7798
7799 /* In case we can figure out the real part of arg0 and it is constant zero
7800 fold to cexpi. */
7801 if (!targetm.libc_has_function (function_c99_math_complex))
7802 return NULL_TREE;
7803 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7804 if (!ifn)
7805 return NULL_TREE;
7806
7807 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7808 && real_zerop (realp))
7809 {
7810 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7811 return build_call_expr_loc (loc, ifn, 1, narg);
7812 }
7813
7814 /* In case we can easily decompose real and imaginary parts split cexp
7815 to exp (r) * cexpi (i). */
7816 if (flag_unsafe_math_optimizations
7817 && realp)
7818 {
7819 tree rfn, rcall, icall;
7820
7821 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7822 if (!rfn)
7823 return NULL_TREE;
7824
7825 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7826 if (!imagp)
7827 return NULL_TREE;
7828
7829 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7830 icall = builtin_save_expr (icall);
7831 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7832 rcall = builtin_save_expr (rcall);
7833 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7834 fold_build2_loc (loc, MULT_EXPR, rtype,
7835 rcall,
7836 fold_build1_loc (loc, REALPART_EXPR,
7837 rtype, icall)),
7838 fold_build2_loc (loc, MULT_EXPR, rtype,
7839 rcall,
7840 fold_build1_loc (loc, IMAGPART_EXPR,
7841 rtype, icall)));
7842 }
7843
7844 return NULL_TREE;
7845 }
7846
7847 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7848 Return NULL_TREE if no simplification can be made. */
7849
7850 static tree
7851 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7852 {
7853 if (!validate_arg (arg, REAL_TYPE))
7854 return NULL_TREE;
7855
7856 /* Optimize trunc of constant value. */
7857 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7858 {
7859 REAL_VALUE_TYPE r, x;
7860 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7861
7862 x = TREE_REAL_CST (arg);
7863 real_trunc (&r, TYPE_MODE (type), &x);
7864 return build_real (type, r);
7865 }
7866
7867 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7868 }
7869
7870 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7871 Return NULL_TREE if no simplification can be made. */
7872
7873 static tree
7874 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7875 {
7876 if (!validate_arg (arg, REAL_TYPE))
7877 return NULL_TREE;
7878
7879 /* Optimize floor of constant value. */
7880 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7881 {
7882 REAL_VALUE_TYPE x;
7883
7884 x = TREE_REAL_CST (arg);
7885 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7886 {
7887 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7888 REAL_VALUE_TYPE r;
7889
7890 real_floor (&r, TYPE_MODE (type), &x);
7891 return build_real (type, r);
7892 }
7893 }
7894
7895 /* Fold floor (x) where x is nonnegative to trunc (x). */
7896 if (tree_expr_nonnegative_p (arg))
7897 {
7898 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7899 if (truncfn)
7900 return build_call_expr_loc (loc, truncfn, 1, arg);
7901 }
7902
7903 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7904 }
7905
7906 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7907 Return NULL_TREE if no simplification can be made. */
7908
7909 static tree
7910 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7911 {
7912 if (!validate_arg (arg, REAL_TYPE))
7913 return NULL_TREE;
7914
7915 /* Optimize ceil of constant value. */
7916 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7917 {
7918 REAL_VALUE_TYPE x;
7919
7920 x = TREE_REAL_CST (arg);
7921 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7922 {
7923 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7924 REAL_VALUE_TYPE r;
7925
7926 real_ceil (&r, TYPE_MODE (type), &x);
7927 return build_real (type, r);
7928 }
7929 }
7930
7931 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7932 }
7933
7934 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7935 Return NULL_TREE if no simplification can be made. */
7936
7937 static tree
7938 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7939 {
7940 if (!validate_arg (arg, REAL_TYPE))
7941 return NULL_TREE;
7942
7943 /* Optimize round of constant value. */
7944 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7945 {
7946 REAL_VALUE_TYPE x;
7947
7948 x = TREE_REAL_CST (arg);
7949 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7950 {
7951 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7952 REAL_VALUE_TYPE r;
7953
7954 real_round (&r, TYPE_MODE (type), &x);
7955 return build_real (type, r);
7956 }
7957 }
7958
7959 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7960 }
7961
7962 /* Fold function call to builtin lround, lroundf or lroundl (or the
7963 corresponding long long versions) and other rounding functions. ARG
7964 is the argument to the call. Return NULL_TREE if no simplification
7965 can be made. */
7966
7967 static tree
7968 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7969 {
7970 if (!validate_arg (arg, REAL_TYPE))
7971 return NULL_TREE;
7972
7973 /* Optimize lround of constant value. */
7974 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7975 {
7976 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7977
7978 if (real_isfinite (&x))
7979 {
7980 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7981 tree ftype = TREE_TYPE (arg);
7982 REAL_VALUE_TYPE r;
7983 bool fail = false;
7984
7985 switch (DECL_FUNCTION_CODE (fndecl))
7986 {
7987 CASE_FLT_FN (BUILT_IN_IFLOOR):
7988 CASE_FLT_FN (BUILT_IN_LFLOOR):
7989 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7990 real_floor (&r, TYPE_MODE (ftype), &x);
7991 break;
7992
7993 CASE_FLT_FN (BUILT_IN_ICEIL):
7994 CASE_FLT_FN (BUILT_IN_LCEIL):
7995 CASE_FLT_FN (BUILT_IN_LLCEIL):
7996 real_ceil (&r, TYPE_MODE (ftype), &x);
7997 break;
7998
7999 CASE_FLT_FN (BUILT_IN_IROUND):
8000 CASE_FLT_FN (BUILT_IN_LROUND):
8001 CASE_FLT_FN (BUILT_IN_LLROUND):
8002 real_round (&r, TYPE_MODE (ftype), &x);
8003 break;
8004
8005 default:
8006 gcc_unreachable ();
8007 }
8008
8009 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8010 if (!fail)
8011 return wide_int_to_tree (itype, val);
8012 }
8013 }
8014
8015 switch (DECL_FUNCTION_CODE (fndecl))
8016 {
8017 CASE_FLT_FN (BUILT_IN_LFLOOR):
8018 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8019 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8020 if (tree_expr_nonnegative_p (arg))
8021 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8022 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8023 break;
8024 default:;
8025 }
8026
8027 return fold_fixed_mathfn (loc, fndecl, arg);
8028 }
8029
8030 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8031 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8032 the argument to the call. Return NULL_TREE if no simplification can
8033 be made. */
8034
8035 static tree
8036 fold_builtin_bitop (tree fndecl, tree arg)
8037 {
8038 if (!validate_arg (arg, INTEGER_TYPE))
8039 return NULL_TREE;
8040
8041 /* Optimize for constant argument. */
8042 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8043 {
8044 tree type = TREE_TYPE (arg);
8045 int result;
8046
8047 switch (DECL_FUNCTION_CODE (fndecl))
8048 {
8049 CASE_INT_FN (BUILT_IN_FFS):
8050 result = wi::ffs (arg);
8051 break;
8052
8053 CASE_INT_FN (BUILT_IN_CLZ):
8054 if (wi::ne_p (arg, 0))
8055 result = wi::clz (arg);
8056 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8057 result = TYPE_PRECISION (type);
8058 break;
8059
8060 CASE_INT_FN (BUILT_IN_CTZ):
8061 if (wi::ne_p (arg, 0))
8062 result = wi::ctz (arg);
8063 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8064 result = TYPE_PRECISION (type);
8065 break;
8066
8067 CASE_INT_FN (BUILT_IN_CLRSB):
8068 result = wi::clrsb (arg);
8069 break;
8070
8071 CASE_INT_FN (BUILT_IN_POPCOUNT):
8072 result = wi::popcount (arg);
8073 break;
8074
8075 CASE_INT_FN (BUILT_IN_PARITY):
8076 result = wi::parity (arg);
8077 break;
8078
8079 default:
8080 gcc_unreachable ();
8081 }
8082
8083 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8084 }
8085
8086 return NULL_TREE;
8087 }
8088
8089 /* Fold function call to builtin_bswap and the short, long and long long
8090 variants. Return NULL_TREE if no simplification can be made. */
8091 static tree
8092 fold_builtin_bswap (tree fndecl, tree arg)
8093 {
8094 if (! validate_arg (arg, INTEGER_TYPE))
8095 return NULL_TREE;
8096
8097 /* Optimize constant value. */
8098 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8099 {
8100 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8101
8102 switch (DECL_FUNCTION_CODE (fndecl))
8103 {
8104 case BUILT_IN_BSWAP16:
8105 case BUILT_IN_BSWAP32:
8106 case BUILT_IN_BSWAP64:
8107 {
8108 signop sgn = TYPE_SIGN (type);
8109 tree result =
8110 wide_int_to_tree (type,
8111 wide_int::from (arg, TYPE_PRECISION (type),
8112 sgn).bswap ());
8113 return result;
8114 }
8115 default:
8116 gcc_unreachable ();
8117 }
8118 }
8119
8120 return NULL_TREE;
8121 }
8122
8123 /* A subroutine of fold_builtin to fold the various logarithmic
8124 functions. Return NULL_TREE if no simplification can me made.
8125 FUNC is the corresponding MPFR logarithm function. */
8126
8127 static tree
8128 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8129 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8130 {
8131 if (validate_arg (arg, REAL_TYPE))
8132 {
8133 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8134 tree res;
8135 const enum built_in_function fcode = builtin_mathfn_code (arg);
8136
8137 /* Calculate the result when the argument is a constant. */
8138 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8139 return res;
8140
8141 /* Special case, optimize logN(expN(x)) = x. */
8142 if (flag_unsafe_math_optimizations
8143 && ((func == mpfr_log
8144 && (fcode == BUILT_IN_EXP
8145 || fcode == BUILT_IN_EXPF
8146 || fcode == BUILT_IN_EXPL))
8147 || (func == mpfr_log2
8148 && (fcode == BUILT_IN_EXP2
8149 || fcode == BUILT_IN_EXP2F
8150 || fcode == BUILT_IN_EXP2L))
8151 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8152 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8153
8154 /* Optimize logN(func()) for various exponential functions. We
8155 want to determine the value "x" and the power "exponent" in
8156 order to transform logN(x**exponent) into exponent*logN(x). */
8157 if (flag_unsafe_math_optimizations)
8158 {
8159 tree exponent = 0, x = 0;
8160
8161 switch (fcode)
8162 {
8163 CASE_FLT_FN (BUILT_IN_EXP):
8164 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8165 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8166 dconst_e ()));
8167 exponent = CALL_EXPR_ARG (arg, 0);
8168 break;
8169 CASE_FLT_FN (BUILT_IN_EXP2):
8170 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8171 x = build_real (type, dconst2);
8172 exponent = CALL_EXPR_ARG (arg, 0);
8173 break;
8174 CASE_FLT_FN (BUILT_IN_EXP10):
8175 CASE_FLT_FN (BUILT_IN_POW10):
8176 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8177 {
8178 REAL_VALUE_TYPE dconst10;
8179 real_from_integer (&dconst10, VOIDmode, 10, SIGNED);
8180 x = build_real (type, dconst10);
8181 }
8182 exponent = CALL_EXPR_ARG (arg, 0);
8183 break;
8184 CASE_FLT_FN (BUILT_IN_SQRT):
8185 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8186 x = CALL_EXPR_ARG (arg, 0);
8187 exponent = build_real (type, dconsthalf);
8188 break;
8189 CASE_FLT_FN (BUILT_IN_CBRT):
8190 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8191 x = CALL_EXPR_ARG (arg, 0);
8192 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8193 dconst_third ()));
8194 break;
8195 CASE_FLT_FN (BUILT_IN_POW):
8196 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8197 x = CALL_EXPR_ARG (arg, 0);
8198 exponent = CALL_EXPR_ARG (arg, 1);
8199 break;
8200 default:
8201 break;
8202 }
8203
8204 /* Now perform the optimization. */
8205 if (x && exponent)
8206 {
8207 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8208 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8209 }
8210 }
8211 }
8212
8213 return NULL_TREE;
8214 }
8215
8216 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8217 NULL_TREE if no simplification can be made. */
8218
8219 static tree
8220 fold_builtin_hypot (location_t loc, tree fndecl,
8221 tree arg0, tree arg1, tree type)
8222 {
8223 tree res, narg0, narg1;
8224
8225 if (!validate_arg (arg0, REAL_TYPE)
8226 || !validate_arg (arg1, REAL_TYPE))
8227 return NULL_TREE;
8228
8229 /* Calculate the result when the argument is a constant. */
8230 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8231 return res;
8232
8233 /* If either argument to hypot has a negate or abs, strip that off.
8234 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8235 narg0 = fold_strip_sign_ops (arg0);
8236 narg1 = fold_strip_sign_ops (arg1);
8237 if (narg0 || narg1)
8238 {
8239 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8240 narg1 ? narg1 : arg1);
8241 }
8242
8243 /* If either argument is zero, hypot is fabs of the other. */
8244 if (real_zerop (arg0))
8245 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8246 else if (real_zerop (arg1))
8247 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8248
8249 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8250 if (flag_unsafe_math_optimizations
8251 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8252 {
8253 const REAL_VALUE_TYPE sqrt2_trunc
8254 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8255 return fold_build2_loc (loc, MULT_EXPR, type,
8256 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8257 build_real (type, sqrt2_trunc));
8258 }
8259
8260 return NULL_TREE;
8261 }
8262
8263
8264 /* Fold a builtin function call to pow, powf, or powl. Return
8265 NULL_TREE if no simplification can be made. */
8266 static tree
8267 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8268 {
8269 tree res;
8270
8271 if (!validate_arg (arg0, REAL_TYPE)
8272 || !validate_arg (arg1, REAL_TYPE))
8273 return NULL_TREE;
8274
8275 /* Calculate the result when the argument is a constant. */
8276 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8277 return res;
8278
8279 /* Optimize pow(1.0,y) = 1.0. */
8280 if (real_onep (arg0))
8281 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8282
8283 if (TREE_CODE (arg1) == REAL_CST
8284 && !TREE_OVERFLOW (arg1))
8285 {
8286 REAL_VALUE_TYPE cint;
8287 REAL_VALUE_TYPE c;
8288 HOST_WIDE_INT n;
8289
8290 c = TREE_REAL_CST (arg1);
8291
8292 /* Optimize pow(x,0.0) = 1.0. */
8293 if (REAL_VALUES_EQUAL (c, dconst0))
8294 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8295 arg0);
8296
8297 /* Optimize pow(x,1.0) = x. */
8298 if (REAL_VALUES_EQUAL (c, dconst1))
8299 return arg0;
8300
8301 /* Optimize pow(x,-1.0) = 1.0/x. */
8302 if (REAL_VALUES_EQUAL (c, dconstm1))
8303 return fold_build2_loc (loc, RDIV_EXPR, type,
8304 build_real (type, dconst1), arg0);
8305
8306 /* Optimize pow(x,0.5) = sqrt(x). */
8307 if (flag_unsafe_math_optimizations
8308 && REAL_VALUES_EQUAL (c, dconsthalf))
8309 {
8310 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8311
8312 if (sqrtfn != NULL_TREE)
8313 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8314 }
8315
8316 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8317 if (flag_unsafe_math_optimizations)
8318 {
8319 const REAL_VALUE_TYPE dconstroot
8320 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8321
8322 if (REAL_VALUES_EQUAL (c, dconstroot))
8323 {
8324 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8325 if (cbrtfn != NULL_TREE)
8326 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8327 }
8328 }
8329
8330 /* Check for an integer exponent. */
8331 n = real_to_integer (&c);
8332 real_from_integer (&cint, VOIDmode, n, SIGNED);
8333 if (real_identical (&c, &cint))
8334 {
8335 /* Attempt to evaluate pow at compile-time, unless this should
8336 raise an exception. */
8337 if (TREE_CODE (arg0) == REAL_CST
8338 && !TREE_OVERFLOW (arg0)
8339 && (n > 0
8340 || (!flag_trapping_math && !flag_errno_math)
8341 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8342 {
8343 REAL_VALUE_TYPE x;
8344 bool inexact;
8345
8346 x = TREE_REAL_CST (arg0);
8347 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8348 if (flag_unsafe_math_optimizations || !inexact)
8349 return build_real (type, x);
8350 }
8351
8352 /* Strip sign ops from even integer powers. */
8353 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8354 {
8355 tree narg0 = fold_strip_sign_ops (arg0);
8356 if (narg0)
8357 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8358 }
8359 }
8360 }
8361
8362 if (flag_unsafe_math_optimizations)
8363 {
8364 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8365
8366 /* Optimize pow(expN(x),y) = expN(x*y). */
8367 if (BUILTIN_EXPONENT_P (fcode))
8368 {
8369 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8370 tree arg = CALL_EXPR_ARG (arg0, 0);
8371 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8372 return build_call_expr_loc (loc, expfn, 1, arg);
8373 }
8374
8375 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8376 if (BUILTIN_SQRT_P (fcode))
8377 {
8378 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8379 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8380 build_real (type, dconsthalf));
8381 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8382 }
8383
8384 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8385 if (BUILTIN_CBRT_P (fcode))
8386 {
8387 tree arg = CALL_EXPR_ARG (arg0, 0);
8388 if (tree_expr_nonnegative_p (arg))
8389 {
8390 const REAL_VALUE_TYPE dconstroot
8391 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8392 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8393 build_real (type, dconstroot));
8394 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8395 }
8396 }
8397
8398 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8399 if (fcode == BUILT_IN_POW
8400 || fcode == BUILT_IN_POWF
8401 || fcode == BUILT_IN_POWL)
8402 {
8403 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8404 if (tree_expr_nonnegative_p (arg00))
8405 {
8406 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8407 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8408 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8409 }
8410 }
8411 }
8412
8413 return NULL_TREE;
8414 }
8415
8416 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8417 Return NULL_TREE if no simplification can be made. */
8418 static tree
8419 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8420 tree arg0, tree arg1, tree type)
8421 {
8422 if (!validate_arg (arg0, REAL_TYPE)
8423 || !validate_arg (arg1, INTEGER_TYPE))
8424 return NULL_TREE;
8425
8426 /* Optimize pow(1.0,y) = 1.0. */
8427 if (real_onep (arg0))
8428 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8429
8430 if (tree_fits_shwi_p (arg1))
8431 {
8432 HOST_WIDE_INT c = tree_to_shwi (arg1);
8433
8434 /* Evaluate powi at compile-time. */
8435 if (TREE_CODE (arg0) == REAL_CST
8436 && !TREE_OVERFLOW (arg0))
8437 {
8438 REAL_VALUE_TYPE x;
8439 x = TREE_REAL_CST (arg0);
8440 real_powi (&x, TYPE_MODE (type), &x, c);
8441 return build_real (type, x);
8442 }
8443
8444 /* Optimize pow(x,0) = 1.0. */
8445 if (c == 0)
8446 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8447 arg0);
8448
8449 /* Optimize pow(x,1) = x. */
8450 if (c == 1)
8451 return arg0;
8452
8453 /* Optimize pow(x,-1) = 1.0/x. */
8454 if (c == -1)
8455 return fold_build2_loc (loc, RDIV_EXPR, type,
8456 build_real (type, dconst1), arg0);
8457 }
8458
8459 return NULL_TREE;
8460 }
8461
8462 /* A subroutine of fold_builtin to fold the various exponent
8463 functions. Return NULL_TREE if no simplification can be made.
8464 FUNC is the corresponding MPFR exponent function. */
8465
8466 static tree
8467 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8468 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8469 {
8470 if (validate_arg (arg, REAL_TYPE))
8471 {
8472 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8473 tree res;
8474
8475 /* Calculate the result when the argument is a constant. */
8476 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8477 return res;
8478
8479 /* Optimize expN(logN(x)) = x. */
8480 if (flag_unsafe_math_optimizations)
8481 {
8482 const enum built_in_function fcode = builtin_mathfn_code (arg);
8483
8484 if ((func == mpfr_exp
8485 && (fcode == BUILT_IN_LOG
8486 || fcode == BUILT_IN_LOGF
8487 || fcode == BUILT_IN_LOGL))
8488 || (func == mpfr_exp2
8489 && (fcode == BUILT_IN_LOG2
8490 || fcode == BUILT_IN_LOG2F
8491 || fcode == BUILT_IN_LOG2L))
8492 || (func == mpfr_exp10
8493 && (fcode == BUILT_IN_LOG10
8494 || fcode == BUILT_IN_LOG10F
8495 || fcode == BUILT_IN_LOG10L)))
8496 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8497 }
8498 }
8499
8500 return NULL_TREE;
8501 }
8502
8503 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8504 Return NULL_TREE if no simplification can be made. */
8505
8506 static tree
8507 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8508 {
8509 tree fn, len, lenp1, call, type;
8510
8511 if (!validate_arg (dest, POINTER_TYPE)
8512 || !validate_arg (src, POINTER_TYPE))
8513 return NULL_TREE;
8514
8515 len = c_strlen (src, 1);
8516 if (!len
8517 || TREE_CODE (len) != INTEGER_CST)
8518 return NULL_TREE;
8519
8520 if (optimize_function_for_size_p (cfun)
8521 /* If length is zero it's small enough. */
8522 && !integer_zerop (len))
8523 return NULL_TREE;
8524
8525 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8526 if (!fn)
8527 return NULL_TREE;
8528
8529 lenp1 = size_binop_loc (loc, PLUS_EXPR,
8530 fold_convert_loc (loc, size_type_node, len),
8531 build_int_cst (size_type_node, 1));
8532 /* We use dest twice in building our expression. Save it from
8533 multiple expansions. */
8534 dest = builtin_save_expr (dest);
8535 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8536
8537 type = TREE_TYPE (TREE_TYPE (fndecl));
8538 dest = fold_build_pointer_plus_loc (loc, dest, len);
8539 dest = fold_convert_loc (loc, type, dest);
8540 dest = omit_one_operand_loc (loc, type, dest, call);
8541 return dest;
8542 }
8543
8544 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8545 arguments to the call, and TYPE is its return type.
8546 Return NULL_TREE if no simplification can be made. */
8547
8548 static tree
8549 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8550 {
8551 if (!validate_arg (arg1, POINTER_TYPE)
8552 || !validate_arg (arg2, INTEGER_TYPE)
8553 || !validate_arg (len, INTEGER_TYPE))
8554 return NULL_TREE;
8555 else
8556 {
8557 const char *p1;
8558
8559 if (TREE_CODE (arg2) != INTEGER_CST
8560 || !tree_fits_uhwi_p (len))
8561 return NULL_TREE;
8562
8563 p1 = c_getstr (arg1);
8564 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8565 {
8566 char c;
8567 const char *r;
8568 tree tem;
8569
8570 if (target_char_cast (arg2, &c))
8571 return NULL_TREE;
8572
8573 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8574
8575 if (r == NULL)
8576 return build_int_cst (TREE_TYPE (arg1), 0);
8577
8578 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8579 return fold_convert_loc (loc, type, tem);
8580 }
8581 return NULL_TREE;
8582 }
8583 }
8584
8585 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8586 Return NULL_TREE if no simplification can be made. */
8587
8588 static tree
8589 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8590 {
8591 const char *p1, *p2;
8592
8593 if (!validate_arg (arg1, POINTER_TYPE)
8594 || !validate_arg (arg2, POINTER_TYPE)
8595 || !validate_arg (len, INTEGER_TYPE))
8596 return NULL_TREE;
8597
8598 /* If the LEN parameter is zero, return zero. */
8599 if (integer_zerop (len))
8600 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8601 arg1, arg2);
8602
8603 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8604 if (operand_equal_p (arg1, arg2, 0))
8605 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8606
8607 p1 = c_getstr (arg1);
8608 p2 = c_getstr (arg2);
8609
8610 /* If all arguments are constant, and the value of len is not greater
8611 than the lengths of arg1 and arg2, evaluate at compile-time. */
8612 if (tree_fits_uhwi_p (len) && p1 && p2
8613 && compare_tree_int (len, strlen (p1) + 1) <= 0
8614 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8615 {
8616 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8617
8618 if (r > 0)
8619 return integer_one_node;
8620 else if (r < 0)
8621 return integer_minus_one_node;
8622 else
8623 return integer_zero_node;
8624 }
8625
8626 /* If len parameter is one, return an expression corresponding to
8627 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8628 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8629 {
8630 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8631 tree cst_uchar_ptr_node
8632 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8633
8634 tree ind1
8635 = fold_convert_loc (loc, integer_type_node,
8636 build1 (INDIRECT_REF, cst_uchar_node,
8637 fold_convert_loc (loc,
8638 cst_uchar_ptr_node,
8639 arg1)));
8640 tree ind2
8641 = fold_convert_loc (loc, integer_type_node,
8642 build1 (INDIRECT_REF, cst_uchar_node,
8643 fold_convert_loc (loc,
8644 cst_uchar_ptr_node,
8645 arg2)));
8646 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8647 }
8648
8649 return NULL_TREE;
8650 }
8651
8652 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8653 Return NULL_TREE if no simplification can be made. */
8654
8655 static tree
8656 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8657 {
8658 const char *p1, *p2;
8659
8660 if (!validate_arg (arg1, POINTER_TYPE)
8661 || !validate_arg (arg2, POINTER_TYPE))
8662 return NULL_TREE;
8663
8664 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8665 if (operand_equal_p (arg1, arg2, 0))
8666 return integer_zero_node;
8667
8668 p1 = c_getstr (arg1);
8669 p2 = c_getstr (arg2);
8670
8671 if (p1 && p2)
8672 {
8673 const int i = strcmp (p1, p2);
8674 if (i < 0)
8675 return integer_minus_one_node;
8676 else if (i > 0)
8677 return integer_one_node;
8678 else
8679 return integer_zero_node;
8680 }
8681
8682 /* If the second arg is "", return *(const unsigned char*)arg1. */
8683 if (p2 && *p2 == '\0')
8684 {
8685 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8686 tree cst_uchar_ptr_node
8687 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8688
8689 return fold_convert_loc (loc, integer_type_node,
8690 build1 (INDIRECT_REF, cst_uchar_node,
8691 fold_convert_loc (loc,
8692 cst_uchar_ptr_node,
8693 arg1)));
8694 }
8695
8696 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8697 if (p1 && *p1 == '\0')
8698 {
8699 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8700 tree cst_uchar_ptr_node
8701 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8702
8703 tree temp
8704 = fold_convert_loc (loc, integer_type_node,
8705 build1 (INDIRECT_REF, cst_uchar_node,
8706 fold_convert_loc (loc,
8707 cst_uchar_ptr_node,
8708 arg2)));
8709 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8710 }
8711
8712 return NULL_TREE;
8713 }
8714
8715 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8716 Return NULL_TREE if no simplification can be made. */
8717
8718 static tree
8719 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8720 {
8721 const char *p1, *p2;
8722
8723 if (!validate_arg (arg1, POINTER_TYPE)
8724 || !validate_arg (arg2, POINTER_TYPE)
8725 || !validate_arg (len, INTEGER_TYPE))
8726 return NULL_TREE;
8727
8728 /* If the LEN parameter is zero, return zero. */
8729 if (integer_zerop (len))
8730 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8731 arg1, arg2);
8732
8733 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8734 if (operand_equal_p (arg1, arg2, 0))
8735 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8736
8737 p1 = c_getstr (arg1);
8738 p2 = c_getstr (arg2);
8739
8740 if (tree_fits_uhwi_p (len) && p1 && p2)
8741 {
8742 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8743 if (i > 0)
8744 return integer_one_node;
8745 else if (i < 0)
8746 return integer_minus_one_node;
8747 else
8748 return integer_zero_node;
8749 }
8750
8751 /* If the second arg is "", and the length is greater than zero,
8752 return *(const unsigned char*)arg1. */
8753 if (p2 && *p2 == '\0'
8754 && TREE_CODE (len) == INTEGER_CST
8755 && tree_int_cst_sgn (len) == 1)
8756 {
8757 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8758 tree cst_uchar_ptr_node
8759 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8760
8761 return fold_convert_loc (loc, integer_type_node,
8762 build1 (INDIRECT_REF, cst_uchar_node,
8763 fold_convert_loc (loc,
8764 cst_uchar_ptr_node,
8765 arg1)));
8766 }
8767
8768 /* If the first arg is "", and the length is greater than zero,
8769 return -*(const unsigned char*)arg2. */
8770 if (p1 && *p1 == '\0'
8771 && TREE_CODE (len) == INTEGER_CST
8772 && tree_int_cst_sgn (len) == 1)
8773 {
8774 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8775 tree cst_uchar_ptr_node
8776 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8777
8778 tree temp = fold_convert_loc (loc, integer_type_node,
8779 build1 (INDIRECT_REF, cst_uchar_node,
8780 fold_convert_loc (loc,
8781 cst_uchar_ptr_node,
8782 arg2)));
8783 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8784 }
8785
8786 /* If len parameter is one, return an expression corresponding to
8787 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8788 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8789 {
8790 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8791 tree cst_uchar_ptr_node
8792 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8793
8794 tree ind1 = fold_convert_loc (loc, integer_type_node,
8795 build1 (INDIRECT_REF, cst_uchar_node,
8796 fold_convert_loc (loc,
8797 cst_uchar_ptr_node,
8798 arg1)));
8799 tree ind2 = fold_convert_loc (loc, integer_type_node,
8800 build1 (INDIRECT_REF, cst_uchar_node,
8801 fold_convert_loc (loc,
8802 cst_uchar_ptr_node,
8803 arg2)));
8804 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8805 }
8806
8807 return NULL_TREE;
8808 }
8809
8810 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8811 ARG. Return NULL_TREE if no simplification can be made. */
8812
8813 static tree
8814 fold_builtin_signbit (location_t loc, tree arg, tree type)
8815 {
8816 if (!validate_arg (arg, REAL_TYPE))
8817 return NULL_TREE;
8818
8819 /* If ARG is a compile-time constant, determine the result. */
8820 if (TREE_CODE (arg) == REAL_CST
8821 && !TREE_OVERFLOW (arg))
8822 {
8823 REAL_VALUE_TYPE c;
8824
8825 c = TREE_REAL_CST (arg);
8826 return (REAL_VALUE_NEGATIVE (c)
8827 ? build_one_cst (type)
8828 : build_zero_cst (type));
8829 }
8830
8831 /* If ARG is non-negative, the result is always zero. */
8832 if (tree_expr_nonnegative_p (arg))
8833 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8834
8835 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8836 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8837 return fold_convert (type,
8838 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8839 build_real (TREE_TYPE (arg), dconst0)));
8840
8841 return NULL_TREE;
8842 }
8843
8844 /* Fold function call to builtin copysign, copysignf or copysignl with
8845 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8846 be made. */
8847
8848 static tree
8849 fold_builtin_copysign (location_t loc, tree fndecl,
8850 tree arg1, tree arg2, tree type)
8851 {
8852 tree tem;
8853
8854 if (!validate_arg (arg1, REAL_TYPE)
8855 || !validate_arg (arg2, REAL_TYPE))
8856 return NULL_TREE;
8857
8858 /* copysign(X,X) is X. */
8859 if (operand_equal_p (arg1, arg2, 0))
8860 return fold_convert_loc (loc, type, arg1);
8861
8862 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8863 if (TREE_CODE (arg1) == REAL_CST
8864 && TREE_CODE (arg2) == REAL_CST
8865 && !TREE_OVERFLOW (arg1)
8866 && !TREE_OVERFLOW (arg2))
8867 {
8868 REAL_VALUE_TYPE c1, c2;
8869
8870 c1 = TREE_REAL_CST (arg1);
8871 c2 = TREE_REAL_CST (arg2);
8872 /* c1.sign := c2.sign. */
8873 real_copysign (&c1, &c2);
8874 return build_real (type, c1);
8875 }
8876
8877 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8878 Remember to evaluate Y for side-effects. */
8879 if (tree_expr_nonnegative_p (arg2))
8880 return omit_one_operand_loc (loc, type,
8881 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8882 arg2);
8883
8884 /* Strip sign changing operations for the first argument. */
8885 tem = fold_strip_sign_ops (arg1);
8886 if (tem)
8887 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8888
8889 return NULL_TREE;
8890 }
8891
8892 /* Fold a call to builtin isascii with argument ARG. */
8893
8894 static tree
8895 fold_builtin_isascii (location_t loc, tree arg)
8896 {
8897 if (!validate_arg (arg, INTEGER_TYPE))
8898 return NULL_TREE;
8899 else
8900 {
8901 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8902 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8903 build_int_cst (integer_type_node,
8904 ~ (unsigned HOST_WIDE_INT) 0x7f));
8905 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8906 arg, integer_zero_node);
8907 }
8908 }
8909
8910 /* Fold a call to builtin toascii with argument ARG. */
8911
8912 static tree
8913 fold_builtin_toascii (location_t loc, tree arg)
8914 {
8915 if (!validate_arg (arg, INTEGER_TYPE))
8916 return NULL_TREE;
8917
8918 /* Transform toascii(c) -> (c & 0x7f). */
8919 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8920 build_int_cst (integer_type_node, 0x7f));
8921 }
8922
8923 /* Fold a call to builtin isdigit with argument ARG. */
8924
8925 static tree
8926 fold_builtin_isdigit (location_t loc, tree arg)
8927 {
8928 if (!validate_arg (arg, INTEGER_TYPE))
8929 return NULL_TREE;
8930 else
8931 {
8932 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8933 /* According to the C standard, isdigit is unaffected by locale.
8934 However, it definitely is affected by the target character set. */
8935 unsigned HOST_WIDE_INT target_digit0
8936 = lang_hooks.to_target_charset ('0');
8937
8938 if (target_digit0 == 0)
8939 return NULL_TREE;
8940
8941 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8942 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8943 build_int_cst (unsigned_type_node, target_digit0));
8944 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8945 build_int_cst (unsigned_type_node, 9));
8946 }
8947 }
8948
8949 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8950
8951 static tree
8952 fold_builtin_fabs (location_t loc, tree arg, tree type)
8953 {
8954 if (!validate_arg (arg, REAL_TYPE))
8955 return NULL_TREE;
8956
8957 arg = fold_convert_loc (loc, type, arg);
8958 if (TREE_CODE (arg) == REAL_CST)
8959 return fold_abs_const (arg, type);
8960 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8961 }
8962
8963 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8964
8965 static tree
8966 fold_builtin_abs (location_t loc, tree arg, tree type)
8967 {
8968 if (!validate_arg (arg, INTEGER_TYPE))
8969 return NULL_TREE;
8970
8971 arg = fold_convert_loc (loc, type, arg);
8972 if (TREE_CODE (arg) == INTEGER_CST)
8973 return fold_abs_const (arg, type);
8974 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8975 }
8976
8977 /* Fold a fma operation with arguments ARG[012]. */
8978
8979 tree
8980 fold_fma (location_t loc ATTRIBUTE_UNUSED,
8981 tree type, tree arg0, tree arg1, tree arg2)
8982 {
8983 if (TREE_CODE (arg0) == REAL_CST
8984 && TREE_CODE (arg1) == REAL_CST
8985 && TREE_CODE (arg2) == REAL_CST)
8986 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
8987
8988 return NULL_TREE;
8989 }
8990
8991 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8992
8993 static tree
8994 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8995 {
8996 if (validate_arg (arg0, REAL_TYPE)
8997 && validate_arg (arg1, REAL_TYPE)
8998 && validate_arg (arg2, REAL_TYPE))
8999 {
9000 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9001 if (tem)
9002 return tem;
9003
9004 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9005 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9006 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9007 }
9008 return NULL_TREE;
9009 }
9010
9011 /* Fold a call to builtin fmin or fmax. */
9012
9013 static tree
9014 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9015 tree type, bool max)
9016 {
9017 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9018 {
9019 /* Calculate the result when the argument is a constant. */
9020 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9021
9022 if (res)
9023 return res;
9024
9025 /* If either argument is NaN, return the other one. Avoid the
9026 transformation if we get (and honor) a signalling NaN. Using
9027 omit_one_operand() ensures we create a non-lvalue. */
9028 if (TREE_CODE (arg0) == REAL_CST
9029 && real_isnan (&TREE_REAL_CST (arg0))
9030 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9031 || ! TREE_REAL_CST (arg0).signalling))
9032 return omit_one_operand_loc (loc, type, arg1, arg0);
9033 if (TREE_CODE (arg1) == REAL_CST
9034 && real_isnan (&TREE_REAL_CST (arg1))
9035 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9036 || ! TREE_REAL_CST (arg1).signalling))
9037 return omit_one_operand_loc (loc, type, arg0, arg1);
9038
9039 /* Transform fmin/fmax(x,x) -> x. */
9040 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9041 return omit_one_operand_loc (loc, type, arg0, arg1);
9042
9043 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9044 functions to return the numeric arg if the other one is NaN.
9045 These tree codes don't honor that, so only transform if
9046 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9047 handled, so we don't have to worry about it either. */
9048 if (flag_finite_math_only)
9049 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9050 fold_convert_loc (loc, type, arg0),
9051 fold_convert_loc (loc, type, arg1));
9052 }
9053 return NULL_TREE;
9054 }
9055
9056 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9057
9058 static tree
9059 fold_builtin_carg (location_t loc, tree arg, tree type)
9060 {
9061 if (validate_arg (arg, COMPLEX_TYPE)
9062 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9063 {
9064 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9065
9066 if (atan2_fn)
9067 {
9068 tree new_arg = builtin_save_expr (arg);
9069 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9070 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9071 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9072 }
9073 }
9074
9075 return NULL_TREE;
9076 }
9077
9078 /* Fold a call to builtin logb/ilogb. */
9079
9080 static tree
9081 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9082 {
9083 if (! validate_arg (arg, REAL_TYPE))
9084 return NULL_TREE;
9085
9086 STRIP_NOPS (arg);
9087
9088 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9089 {
9090 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9091
9092 switch (value->cl)
9093 {
9094 case rvc_nan:
9095 case rvc_inf:
9096 /* If arg is Inf or NaN and we're logb, return it. */
9097 if (TREE_CODE (rettype) == REAL_TYPE)
9098 {
9099 /* For logb(-Inf) we have to return +Inf. */
9100 if (real_isinf (value) && real_isneg (value))
9101 {
9102 REAL_VALUE_TYPE tem;
9103 real_inf (&tem);
9104 return build_real (rettype, tem);
9105 }
9106 return fold_convert_loc (loc, rettype, arg);
9107 }
9108 /* Fall through... */
9109 case rvc_zero:
9110 /* Zero may set errno and/or raise an exception for logb, also
9111 for ilogb we don't know FP_ILOGB0. */
9112 return NULL_TREE;
9113 case rvc_normal:
9114 /* For normal numbers, proceed iff radix == 2. In GCC,
9115 normalized significands are in the range [0.5, 1.0). We
9116 want the exponent as if they were [1.0, 2.0) so get the
9117 exponent and subtract 1. */
9118 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9119 return fold_convert_loc (loc, rettype,
9120 build_int_cst (integer_type_node,
9121 REAL_EXP (value)-1));
9122 break;
9123 }
9124 }
9125
9126 return NULL_TREE;
9127 }
9128
9129 /* Fold a call to builtin significand, if radix == 2. */
9130
9131 static tree
9132 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9133 {
9134 if (! validate_arg (arg, REAL_TYPE))
9135 return NULL_TREE;
9136
9137 STRIP_NOPS (arg);
9138
9139 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9140 {
9141 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9142
9143 switch (value->cl)
9144 {
9145 case rvc_zero:
9146 case rvc_nan:
9147 case rvc_inf:
9148 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9149 return fold_convert_loc (loc, rettype, arg);
9150 case rvc_normal:
9151 /* For normal numbers, proceed iff radix == 2. */
9152 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9153 {
9154 REAL_VALUE_TYPE result = *value;
9155 /* In GCC, normalized significands are in the range [0.5,
9156 1.0). We want them to be [1.0, 2.0) so set the
9157 exponent to 1. */
9158 SET_REAL_EXP (&result, 1);
9159 return build_real (rettype, result);
9160 }
9161 break;
9162 }
9163 }
9164
9165 return NULL_TREE;
9166 }
9167
9168 /* Fold a call to builtin frexp, we can assume the base is 2. */
9169
9170 static tree
9171 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9172 {
9173 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9174 return NULL_TREE;
9175
9176 STRIP_NOPS (arg0);
9177
9178 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9179 return NULL_TREE;
9180
9181 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9182
9183 /* Proceed if a valid pointer type was passed in. */
9184 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9185 {
9186 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9187 tree frac, exp;
9188
9189 switch (value->cl)
9190 {
9191 case rvc_zero:
9192 /* For +-0, return (*exp = 0, +-0). */
9193 exp = integer_zero_node;
9194 frac = arg0;
9195 break;
9196 case rvc_nan:
9197 case rvc_inf:
9198 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9199 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9200 case rvc_normal:
9201 {
9202 /* Since the frexp function always expects base 2, and in
9203 GCC normalized significands are already in the range
9204 [0.5, 1.0), we have exactly what frexp wants. */
9205 REAL_VALUE_TYPE frac_rvt = *value;
9206 SET_REAL_EXP (&frac_rvt, 0);
9207 frac = build_real (rettype, frac_rvt);
9208 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9209 }
9210 break;
9211 default:
9212 gcc_unreachable ();
9213 }
9214
9215 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9216 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9217 TREE_SIDE_EFFECTS (arg1) = 1;
9218 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9219 }
9220
9221 return NULL_TREE;
9222 }
9223
9224 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9225 then we can assume the base is two. If it's false, then we have to
9226 check the mode of the TYPE parameter in certain cases. */
9227
9228 static tree
9229 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9230 tree type, bool ldexp)
9231 {
9232 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9233 {
9234 STRIP_NOPS (arg0);
9235 STRIP_NOPS (arg1);
9236
9237 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9238 if (real_zerop (arg0) || integer_zerop (arg1)
9239 || (TREE_CODE (arg0) == REAL_CST
9240 && !real_isfinite (&TREE_REAL_CST (arg0))))
9241 return omit_one_operand_loc (loc, type, arg0, arg1);
9242
9243 /* If both arguments are constant, then try to evaluate it. */
9244 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9245 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9246 && tree_fits_shwi_p (arg1))
9247 {
9248 /* Bound the maximum adjustment to twice the range of the
9249 mode's valid exponents. Use abs to ensure the range is
9250 positive as a sanity check. */
9251 const long max_exp_adj = 2 *
9252 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9253 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9254
9255 /* Get the user-requested adjustment. */
9256 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9257
9258 /* The requested adjustment must be inside this range. This
9259 is a preliminary cap to avoid things like overflow, we
9260 may still fail to compute the result for other reasons. */
9261 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9262 {
9263 REAL_VALUE_TYPE initial_result;
9264
9265 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9266
9267 /* Ensure we didn't overflow. */
9268 if (! real_isinf (&initial_result))
9269 {
9270 const REAL_VALUE_TYPE trunc_result
9271 = real_value_truncate (TYPE_MODE (type), initial_result);
9272
9273 /* Only proceed if the target mode can hold the
9274 resulting value. */
9275 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9276 return build_real (type, trunc_result);
9277 }
9278 }
9279 }
9280 }
9281
9282 return NULL_TREE;
9283 }
9284
9285 /* Fold a call to builtin modf. */
9286
9287 static tree
9288 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9289 {
9290 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9291 return NULL_TREE;
9292
9293 STRIP_NOPS (arg0);
9294
9295 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9296 return NULL_TREE;
9297
9298 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9299
9300 /* Proceed if a valid pointer type was passed in. */
9301 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9302 {
9303 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9304 REAL_VALUE_TYPE trunc, frac;
9305
9306 switch (value->cl)
9307 {
9308 case rvc_nan:
9309 case rvc_zero:
9310 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9311 trunc = frac = *value;
9312 break;
9313 case rvc_inf:
9314 /* For +-Inf, return (*arg1 = arg0, +-0). */
9315 frac = dconst0;
9316 frac.sign = value->sign;
9317 trunc = *value;
9318 break;
9319 case rvc_normal:
9320 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9321 real_trunc (&trunc, VOIDmode, value);
9322 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9323 /* If the original number was negative and already
9324 integral, then the fractional part is -0.0. */
9325 if (value->sign && frac.cl == rvc_zero)
9326 frac.sign = value->sign;
9327 break;
9328 }
9329
9330 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9331 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9332 build_real (rettype, trunc));
9333 TREE_SIDE_EFFECTS (arg1) = 1;
9334 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9335 build_real (rettype, frac));
9336 }
9337
9338 return NULL_TREE;
9339 }
9340
9341 /* Given a location LOC, an interclass builtin function decl FNDECL
9342 and its single argument ARG, return an folded expression computing
9343 the same, or NULL_TREE if we either couldn't or didn't want to fold
9344 (the latter happen if there's an RTL instruction available). */
9345
9346 static tree
9347 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9348 {
9349 enum machine_mode mode;
9350
9351 if (!validate_arg (arg, REAL_TYPE))
9352 return NULL_TREE;
9353
9354 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9355 return NULL_TREE;
9356
9357 mode = TYPE_MODE (TREE_TYPE (arg));
9358
9359 /* If there is no optab, try generic code. */
9360 switch (DECL_FUNCTION_CODE (fndecl))
9361 {
9362 tree result;
9363
9364 CASE_FLT_FN (BUILT_IN_ISINF):
9365 {
9366 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9367 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9368 tree const type = TREE_TYPE (arg);
9369 REAL_VALUE_TYPE r;
9370 char buf[128];
9371
9372 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9373 real_from_string (&r, buf);
9374 result = build_call_expr (isgr_fn, 2,
9375 fold_build1_loc (loc, ABS_EXPR, type, arg),
9376 build_real (type, r));
9377 return result;
9378 }
9379 CASE_FLT_FN (BUILT_IN_FINITE):
9380 case BUILT_IN_ISFINITE:
9381 {
9382 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9383 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9384 tree const type = TREE_TYPE (arg);
9385 REAL_VALUE_TYPE r;
9386 char buf[128];
9387
9388 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9389 real_from_string (&r, buf);
9390 result = build_call_expr (isle_fn, 2,
9391 fold_build1_loc (loc, ABS_EXPR, type, arg),
9392 build_real (type, r));
9393 /*result = fold_build2_loc (loc, UNGT_EXPR,
9394 TREE_TYPE (TREE_TYPE (fndecl)),
9395 fold_build1_loc (loc, ABS_EXPR, type, arg),
9396 build_real (type, r));
9397 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9398 TREE_TYPE (TREE_TYPE (fndecl)),
9399 result);*/
9400 return result;
9401 }
9402 case BUILT_IN_ISNORMAL:
9403 {
9404 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9405 islessequal(fabs(x),DBL_MAX). */
9406 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9407 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9408 tree const type = TREE_TYPE (arg);
9409 REAL_VALUE_TYPE rmax, rmin;
9410 char buf[128];
9411
9412 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9413 real_from_string (&rmax, buf);
9414 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9415 real_from_string (&rmin, buf);
9416 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9417 result = build_call_expr (isle_fn, 2, arg,
9418 build_real (type, rmax));
9419 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9420 build_call_expr (isge_fn, 2, arg,
9421 build_real (type, rmin)));
9422 return result;
9423 }
9424 default:
9425 break;
9426 }
9427
9428 return NULL_TREE;
9429 }
9430
9431 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9432 ARG is the argument for the call. */
9433
9434 static tree
9435 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9436 {
9437 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9438 REAL_VALUE_TYPE r;
9439
9440 if (!validate_arg (arg, REAL_TYPE))
9441 return NULL_TREE;
9442
9443 switch (builtin_index)
9444 {
9445 case BUILT_IN_ISINF:
9446 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9447 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9448
9449 if (TREE_CODE (arg) == REAL_CST)
9450 {
9451 r = TREE_REAL_CST (arg);
9452 if (real_isinf (&r))
9453 return real_compare (GT_EXPR, &r, &dconst0)
9454 ? integer_one_node : integer_minus_one_node;
9455 else
9456 return integer_zero_node;
9457 }
9458
9459 return NULL_TREE;
9460
9461 case BUILT_IN_ISINF_SIGN:
9462 {
9463 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9464 /* In a boolean context, GCC will fold the inner COND_EXPR to
9465 1. So e.g. "if (isinf_sign(x))" would be folded to just
9466 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9467 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9468 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9469 tree tmp = NULL_TREE;
9470
9471 arg = builtin_save_expr (arg);
9472
9473 if (signbit_fn && isinf_fn)
9474 {
9475 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9476 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9477
9478 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9479 signbit_call, integer_zero_node);
9480 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9481 isinf_call, integer_zero_node);
9482
9483 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9484 integer_minus_one_node, integer_one_node);
9485 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9486 isinf_call, tmp,
9487 integer_zero_node);
9488 }
9489
9490 return tmp;
9491 }
9492
9493 case BUILT_IN_ISFINITE:
9494 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9495 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9496 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9497
9498 if (TREE_CODE (arg) == REAL_CST)
9499 {
9500 r = TREE_REAL_CST (arg);
9501 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9502 }
9503
9504 return NULL_TREE;
9505
9506 case BUILT_IN_ISNAN:
9507 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9508 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9509
9510 if (TREE_CODE (arg) == REAL_CST)
9511 {
9512 r = TREE_REAL_CST (arg);
9513 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9514 }
9515
9516 arg = builtin_save_expr (arg);
9517 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9518
9519 default:
9520 gcc_unreachable ();
9521 }
9522 }
9523
9524 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9525 This builtin will generate code to return the appropriate floating
9526 point classification depending on the value of the floating point
9527 number passed in. The possible return values must be supplied as
9528 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9529 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9530 one floating point argument which is "type generic". */
9531
9532 static tree
9533 fold_builtin_fpclassify (location_t loc, tree exp)
9534 {
9535 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9536 arg, type, res, tmp;
9537 enum machine_mode mode;
9538 REAL_VALUE_TYPE r;
9539 char buf[128];
9540
9541 /* Verify the required arguments in the original call. */
9542 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9543 INTEGER_TYPE, INTEGER_TYPE,
9544 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9545 return NULL_TREE;
9546
9547 fp_nan = CALL_EXPR_ARG (exp, 0);
9548 fp_infinite = CALL_EXPR_ARG (exp, 1);
9549 fp_normal = CALL_EXPR_ARG (exp, 2);
9550 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9551 fp_zero = CALL_EXPR_ARG (exp, 4);
9552 arg = CALL_EXPR_ARG (exp, 5);
9553 type = TREE_TYPE (arg);
9554 mode = TYPE_MODE (type);
9555 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9556
9557 /* fpclassify(x) ->
9558 isnan(x) ? FP_NAN :
9559 (fabs(x) == Inf ? FP_INFINITE :
9560 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9561 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9562
9563 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9564 build_real (type, dconst0));
9565 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9566 tmp, fp_zero, fp_subnormal);
9567
9568 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9569 real_from_string (&r, buf);
9570 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9571 arg, build_real (type, r));
9572 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9573
9574 if (HONOR_INFINITIES (mode))
9575 {
9576 real_inf (&r);
9577 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9578 build_real (type, r));
9579 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9580 fp_infinite, res);
9581 }
9582
9583 if (HONOR_NANS (mode))
9584 {
9585 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9586 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9587 }
9588
9589 return res;
9590 }
9591
9592 /* Fold a call to an unordered comparison function such as
9593 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9594 being called and ARG0 and ARG1 are the arguments for the call.
9595 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9596 the opposite of the desired result. UNORDERED_CODE is used
9597 for modes that can hold NaNs and ORDERED_CODE is used for
9598 the rest. */
9599
9600 static tree
9601 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9602 enum tree_code unordered_code,
9603 enum tree_code ordered_code)
9604 {
9605 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9606 enum tree_code code;
9607 tree type0, type1;
9608 enum tree_code code0, code1;
9609 tree cmp_type = NULL_TREE;
9610
9611 type0 = TREE_TYPE (arg0);
9612 type1 = TREE_TYPE (arg1);
9613
9614 code0 = TREE_CODE (type0);
9615 code1 = TREE_CODE (type1);
9616
9617 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9618 /* Choose the wider of two real types. */
9619 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9620 ? type0 : type1;
9621 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9622 cmp_type = type0;
9623 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9624 cmp_type = type1;
9625
9626 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9627 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9628
9629 if (unordered_code == UNORDERED_EXPR)
9630 {
9631 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9632 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9633 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9634 }
9635
9636 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9637 : ordered_code;
9638 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9639 fold_build2_loc (loc, code, type, arg0, arg1));
9640 }
9641
9642 /* Fold a call to built-in function FNDECL with 0 arguments.
9643 IGNORE is true if the result of the function call is ignored. This
9644 function returns NULL_TREE if no simplification was possible. */
9645
9646 static tree
9647 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9648 {
9649 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9650 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9651 switch (fcode)
9652 {
9653 CASE_FLT_FN (BUILT_IN_INF):
9654 case BUILT_IN_INFD32:
9655 case BUILT_IN_INFD64:
9656 case BUILT_IN_INFD128:
9657 return fold_builtin_inf (loc, type, true);
9658
9659 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9660 return fold_builtin_inf (loc, type, false);
9661
9662 case BUILT_IN_CLASSIFY_TYPE:
9663 return fold_builtin_classify_type (NULL_TREE);
9664
9665 case BUILT_IN_UNREACHABLE:
9666 if (flag_sanitize & SANITIZE_UNREACHABLE
9667 && (current_function_decl == NULL
9668 || !lookup_attribute ("no_sanitize_undefined",
9669 DECL_ATTRIBUTES (current_function_decl))))
9670 return ubsan_instrument_unreachable (loc);
9671 break;
9672
9673 default:
9674 break;
9675 }
9676 return NULL_TREE;
9677 }
9678
9679 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9680 IGNORE is true if the result of the function call is ignored. This
9681 function returns NULL_TREE if no simplification was possible. */
9682
9683 static tree
9684 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9685 {
9686 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9687 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9688 switch (fcode)
9689 {
9690 case BUILT_IN_CONSTANT_P:
9691 {
9692 tree val = fold_builtin_constant_p (arg0);
9693
9694 /* Gimplification will pull the CALL_EXPR for the builtin out of
9695 an if condition. When not optimizing, we'll not CSE it back.
9696 To avoid link error types of regressions, return false now. */
9697 if (!val && !optimize)
9698 val = integer_zero_node;
9699
9700 return val;
9701 }
9702
9703 case BUILT_IN_CLASSIFY_TYPE:
9704 return fold_builtin_classify_type (arg0);
9705
9706 case BUILT_IN_STRLEN:
9707 return fold_builtin_strlen (loc, type, arg0);
9708
9709 CASE_FLT_FN (BUILT_IN_FABS):
9710 case BUILT_IN_FABSD32:
9711 case BUILT_IN_FABSD64:
9712 case BUILT_IN_FABSD128:
9713 return fold_builtin_fabs (loc, arg0, type);
9714
9715 case BUILT_IN_ABS:
9716 case BUILT_IN_LABS:
9717 case BUILT_IN_LLABS:
9718 case BUILT_IN_IMAXABS:
9719 return fold_builtin_abs (loc, arg0, type);
9720
9721 CASE_FLT_FN (BUILT_IN_CONJ):
9722 if (validate_arg (arg0, COMPLEX_TYPE)
9723 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9724 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9725 break;
9726
9727 CASE_FLT_FN (BUILT_IN_CREAL):
9728 if (validate_arg (arg0, COMPLEX_TYPE)
9729 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9730 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9731 break;
9732
9733 CASE_FLT_FN (BUILT_IN_CIMAG):
9734 if (validate_arg (arg0, COMPLEX_TYPE)
9735 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9736 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9737 break;
9738
9739 CASE_FLT_FN (BUILT_IN_CCOS):
9740 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9741
9742 CASE_FLT_FN (BUILT_IN_CCOSH):
9743 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9744
9745 CASE_FLT_FN (BUILT_IN_CPROJ):
9746 return fold_builtin_cproj (loc, arg0, type);
9747
9748 CASE_FLT_FN (BUILT_IN_CSIN):
9749 if (validate_arg (arg0, COMPLEX_TYPE)
9750 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9751 return do_mpc_arg1 (arg0, type, mpc_sin);
9752 break;
9753
9754 CASE_FLT_FN (BUILT_IN_CSINH):
9755 if (validate_arg (arg0, COMPLEX_TYPE)
9756 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9757 return do_mpc_arg1 (arg0, type, mpc_sinh);
9758 break;
9759
9760 CASE_FLT_FN (BUILT_IN_CTAN):
9761 if (validate_arg (arg0, COMPLEX_TYPE)
9762 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9763 return do_mpc_arg1 (arg0, type, mpc_tan);
9764 break;
9765
9766 CASE_FLT_FN (BUILT_IN_CTANH):
9767 if (validate_arg (arg0, COMPLEX_TYPE)
9768 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9769 return do_mpc_arg1 (arg0, type, mpc_tanh);
9770 break;
9771
9772 CASE_FLT_FN (BUILT_IN_CLOG):
9773 if (validate_arg (arg0, COMPLEX_TYPE)
9774 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9775 return do_mpc_arg1 (arg0, type, mpc_log);
9776 break;
9777
9778 CASE_FLT_FN (BUILT_IN_CSQRT):
9779 if (validate_arg (arg0, COMPLEX_TYPE)
9780 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9781 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9782 break;
9783
9784 CASE_FLT_FN (BUILT_IN_CASIN):
9785 if (validate_arg (arg0, COMPLEX_TYPE)
9786 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9787 return do_mpc_arg1 (arg0, type, mpc_asin);
9788 break;
9789
9790 CASE_FLT_FN (BUILT_IN_CACOS):
9791 if (validate_arg (arg0, COMPLEX_TYPE)
9792 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9793 return do_mpc_arg1 (arg0, type, mpc_acos);
9794 break;
9795
9796 CASE_FLT_FN (BUILT_IN_CATAN):
9797 if (validate_arg (arg0, COMPLEX_TYPE)
9798 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9799 return do_mpc_arg1 (arg0, type, mpc_atan);
9800 break;
9801
9802 CASE_FLT_FN (BUILT_IN_CASINH):
9803 if (validate_arg (arg0, COMPLEX_TYPE)
9804 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9805 return do_mpc_arg1 (arg0, type, mpc_asinh);
9806 break;
9807
9808 CASE_FLT_FN (BUILT_IN_CACOSH):
9809 if (validate_arg (arg0, COMPLEX_TYPE)
9810 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9811 return do_mpc_arg1 (arg0, type, mpc_acosh);
9812 break;
9813
9814 CASE_FLT_FN (BUILT_IN_CATANH):
9815 if (validate_arg (arg0, COMPLEX_TYPE)
9816 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9817 return do_mpc_arg1 (arg0, type, mpc_atanh);
9818 break;
9819
9820 CASE_FLT_FN (BUILT_IN_CABS):
9821 return fold_builtin_cabs (loc, arg0, type, fndecl);
9822
9823 CASE_FLT_FN (BUILT_IN_CARG):
9824 return fold_builtin_carg (loc, arg0, type);
9825
9826 CASE_FLT_FN (BUILT_IN_SQRT):
9827 return fold_builtin_sqrt (loc, arg0, type);
9828
9829 CASE_FLT_FN (BUILT_IN_CBRT):
9830 return fold_builtin_cbrt (loc, arg0, type);
9831
9832 CASE_FLT_FN (BUILT_IN_ASIN):
9833 if (validate_arg (arg0, REAL_TYPE))
9834 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9835 &dconstm1, &dconst1, true);
9836 break;
9837
9838 CASE_FLT_FN (BUILT_IN_ACOS):
9839 if (validate_arg (arg0, REAL_TYPE))
9840 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9841 &dconstm1, &dconst1, true);
9842 break;
9843
9844 CASE_FLT_FN (BUILT_IN_ATAN):
9845 if (validate_arg (arg0, REAL_TYPE))
9846 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9847 break;
9848
9849 CASE_FLT_FN (BUILT_IN_ASINH):
9850 if (validate_arg (arg0, REAL_TYPE))
9851 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9852 break;
9853
9854 CASE_FLT_FN (BUILT_IN_ACOSH):
9855 if (validate_arg (arg0, REAL_TYPE))
9856 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9857 &dconst1, NULL, true);
9858 break;
9859
9860 CASE_FLT_FN (BUILT_IN_ATANH):
9861 if (validate_arg (arg0, REAL_TYPE))
9862 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9863 &dconstm1, &dconst1, false);
9864 break;
9865
9866 CASE_FLT_FN (BUILT_IN_SIN):
9867 if (validate_arg (arg0, REAL_TYPE))
9868 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9869 break;
9870
9871 CASE_FLT_FN (BUILT_IN_COS):
9872 return fold_builtin_cos (loc, arg0, type, fndecl);
9873
9874 CASE_FLT_FN (BUILT_IN_TAN):
9875 return fold_builtin_tan (arg0, type);
9876
9877 CASE_FLT_FN (BUILT_IN_CEXP):
9878 return fold_builtin_cexp (loc, arg0, type);
9879
9880 CASE_FLT_FN (BUILT_IN_CEXPI):
9881 if (validate_arg (arg0, REAL_TYPE))
9882 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9883 break;
9884
9885 CASE_FLT_FN (BUILT_IN_SINH):
9886 if (validate_arg (arg0, REAL_TYPE))
9887 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9888 break;
9889
9890 CASE_FLT_FN (BUILT_IN_COSH):
9891 return fold_builtin_cosh (loc, arg0, type, fndecl);
9892
9893 CASE_FLT_FN (BUILT_IN_TANH):
9894 if (validate_arg (arg0, REAL_TYPE))
9895 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9896 break;
9897
9898 CASE_FLT_FN (BUILT_IN_ERF):
9899 if (validate_arg (arg0, REAL_TYPE))
9900 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9901 break;
9902
9903 CASE_FLT_FN (BUILT_IN_ERFC):
9904 if (validate_arg (arg0, REAL_TYPE))
9905 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9906 break;
9907
9908 CASE_FLT_FN (BUILT_IN_TGAMMA):
9909 if (validate_arg (arg0, REAL_TYPE))
9910 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9911 break;
9912
9913 CASE_FLT_FN (BUILT_IN_EXP):
9914 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9915
9916 CASE_FLT_FN (BUILT_IN_EXP2):
9917 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9918
9919 CASE_FLT_FN (BUILT_IN_EXP10):
9920 CASE_FLT_FN (BUILT_IN_POW10):
9921 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9922
9923 CASE_FLT_FN (BUILT_IN_EXPM1):
9924 if (validate_arg (arg0, REAL_TYPE))
9925 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9926 break;
9927
9928 CASE_FLT_FN (BUILT_IN_LOG):
9929 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9930
9931 CASE_FLT_FN (BUILT_IN_LOG2):
9932 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9933
9934 CASE_FLT_FN (BUILT_IN_LOG10):
9935 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9936
9937 CASE_FLT_FN (BUILT_IN_LOG1P):
9938 if (validate_arg (arg0, REAL_TYPE))
9939 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9940 &dconstm1, NULL, false);
9941 break;
9942
9943 CASE_FLT_FN (BUILT_IN_J0):
9944 if (validate_arg (arg0, REAL_TYPE))
9945 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9946 NULL, NULL, 0);
9947 break;
9948
9949 CASE_FLT_FN (BUILT_IN_J1):
9950 if (validate_arg (arg0, REAL_TYPE))
9951 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9952 NULL, NULL, 0);
9953 break;
9954
9955 CASE_FLT_FN (BUILT_IN_Y0):
9956 if (validate_arg (arg0, REAL_TYPE))
9957 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9958 &dconst0, NULL, false);
9959 break;
9960
9961 CASE_FLT_FN (BUILT_IN_Y1):
9962 if (validate_arg (arg0, REAL_TYPE))
9963 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9964 &dconst0, NULL, false);
9965 break;
9966
9967 CASE_FLT_FN (BUILT_IN_NAN):
9968 case BUILT_IN_NAND32:
9969 case BUILT_IN_NAND64:
9970 case BUILT_IN_NAND128:
9971 return fold_builtin_nan (arg0, type, true);
9972
9973 CASE_FLT_FN (BUILT_IN_NANS):
9974 return fold_builtin_nan (arg0, type, false);
9975
9976 CASE_FLT_FN (BUILT_IN_FLOOR):
9977 return fold_builtin_floor (loc, fndecl, arg0);
9978
9979 CASE_FLT_FN (BUILT_IN_CEIL):
9980 return fold_builtin_ceil (loc, fndecl, arg0);
9981
9982 CASE_FLT_FN (BUILT_IN_TRUNC):
9983 return fold_builtin_trunc (loc, fndecl, arg0);
9984
9985 CASE_FLT_FN (BUILT_IN_ROUND):
9986 return fold_builtin_round (loc, fndecl, arg0);
9987
9988 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9989 CASE_FLT_FN (BUILT_IN_RINT):
9990 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9991
9992 CASE_FLT_FN (BUILT_IN_ICEIL):
9993 CASE_FLT_FN (BUILT_IN_LCEIL):
9994 CASE_FLT_FN (BUILT_IN_LLCEIL):
9995 CASE_FLT_FN (BUILT_IN_LFLOOR):
9996 CASE_FLT_FN (BUILT_IN_IFLOOR):
9997 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9998 CASE_FLT_FN (BUILT_IN_IROUND):
9999 CASE_FLT_FN (BUILT_IN_LROUND):
10000 CASE_FLT_FN (BUILT_IN_LLROUND):
10001 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10002
10003 CASE_FLT_FN (BUILT_IN_IRINT):
10004 CASE_FLT_FN (BUILT_IN_LRINT):
10005 CASE_FLT_FN (BUILT_IN_LLRINT):
10006 return fold_fixed_mathfn (loc, fndecl, arg0);
10007
10008 case BUILT_IN_BSWAP16:
10009 case BUILT_IN_BSWAP32:
10010 case BUILT_IN_BSWAP64:
10011 return fold_builtin_bswap (fndecl, arg0);
10012
10013 CASE_INT_FN (BUILT_IN_FFS):
10014 CASE_INT_FN (BUILT_IN_CLZ):
10015 CASE_INT_FN (BUILT_IN_CTZ):
10016 CASE_INT_FN (BUILT_IN_CLRSB):
10017 CASE_INT_FN (BUILT_IN_POPCOUNT):
10018 CASE_INT_FN (BUILT_IN_PARITY):
10019 return fold_builtin_bitop (fndecl, arg0);
10020
10021 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10022 return fold_builtin_signbit (loc, arg0, type);
10023
10024 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10025 return fold_builtin_significand (loc, arg0, type);
10026
10027 CASE_FLT_FN (BUILT_IN_ILOGB):
10028 CASE_FLT_FN (BUILT_IN_LOGB):
10029 return fold_builtin_logb (loc, arg0, type);
10030
10031 case BUILT_IN_ISASCII:
10032 return fold_builtin_isascii (loc, arg0);
10033
10034 case BUILT_IN_TOASCII:
10035 return fold_builtin_toascii (loc, arg0);
10036
10037 case BUILT_IN_ISDIGIT:
10038 return fold_builtin_isdigit (loc, arg0);
10039
10040 CASE_FLT_FN (BUILT_IN_FINITE):
10041 case BUILT_IN_FINITED32:
10042 case BUILT_IN_FINITED64:
10043 case BUILT_IN_FINITED128:
10044 case BUILT_IN_ISFINITE:
10045 {
10046 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10047 if (ret)
10048 return ret;
10049 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10050 }
10051
10052 CASE_FLT_FN (BUILT_IN_ISINF):
10053 case BUILT_IN_ISINFD32:
10054 case BUILT_IN_ISINFD64:
10055 case BUILT_IN_ISINFD128:
10056 {
10057 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10058 if (ret)
10059 return ret;
10060 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10061 }
10062
10063 case BUILT_IN_ISNORMAL:
10064 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10065
10066 case BUILT_IN_ISINF_SIGN:
10067 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10068
10069 CASE_FLT_FN (BUILT_IN_ISNAN):
10070 case BUILT_IN_ISNAND32:
10071 case BUILT_IN_ISNAND64:
10072 case BUILT_IN_ISNAND128:
10073 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10074
10075 case BUILT_IN_PRINTF:
10076 case BUILT_IN_PRINTF_UNLOCKED:
10077 case BUILT_IN_VPRINTF:
10078 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10079
10080 case BUILT_IN_FREE:
10081 if (integer_zerop (arg0))
10082 return build_empty_stmt (loc);
10083 break;
10084
10085 default:
10086 break;
10087 }
10088
10089 return NULL_TREE;
10090
10091 }
10092
10093 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10094 IGNORE is true if the result of the function call is ignored. This
10095 function returns NULL_TREE if no simplification was possible. */
10096
10097 static tree
10098 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10099 {
10100 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10101 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10102
10103 switch (fcode)
10104 {
10105 CASE_FLT_FN (BUILT_IN_JN):
10106 if (validate_arg (arg0, INTEGER_TYPE)
10107 && validate_arg (arg1, REAL_TYPE))
10108 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10109 break;
10110
10111 CASE_FLT_FN (BUILT_IN_YN):
10112 if (validate_arg (arg0, INTEGER_TYPE)
10113 && validate_arg (arg1, REAL_TYPE))
10114 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10115 &dconst0, false);
10116 break;
10117
10118 CASE_FLT_FN (BUILT_IN_DREM):
10119 CASE_FLT_FN (BUILT_IN_REMAINDER):
10120 if (validate_arg (arg0, REAL_TYPE)
10121 && validate_arg (arg1, REAL_TYPE))
10122 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10123 break;
10124
10125 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10126 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10127 if (validate_arg (arg0, REAL_TYPE)
10128 && validate_arg (arg1, POINTER_TYPE))
10129 return do_mpfr_lgamma_r (arg0, arg1, type);
10130 break;
10131
10132 CASE_FLT_FN (BUILT_IN_ATAN2):
10133 if (validate_arg (arg0, REAL_TYPE)
10134 && validate_arg (arg1, REAL_TYPE))
10135 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10136 break;
10137
10138 CASE_FLT_FN (BUILT_IN_FDIM):
10139 if (validate_arg (arg0, REAL_TYPE)
10140 && validate_arg (arg1, REAL_TYPE))
10141 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10142 break;
10143
10144 CASE_FLT_FN (BUILT_IN_HYPOT):
10145 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10146
10147 CASE_FLT_FN (BUILT_IN_CPOW):
10148 if (validate_arg (arg0, COMPLEX_TYPE)
10149 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10150 && validate_arg (arg1, COMPLEX_TYPE)
10151 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10152 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10153 break;
10154
10155 CASE_FLT_FN (BUILT_IN_LDEXP):
10156 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10157 CASE_FLT_FN (BUILT_IN_SCALBN):
10158 CASE_FLT_FN (BUILT_IN_SCALBLN):
10159 return fold_builtin_load_exponent (loc, arg0, arg1,
10160 type, /*ldexp=*/false);
10161
10162 CASE_FLT_FN (BUILT_IN_FREXP):
10163 return fold_builtin_frexp (loc, arg0, arg1, type);
10164
10165 CASE_FLT_FN (BUILT_IN_MODF):
10166 return fold_builtin_modf (loc, arg0, arg1, type);
10167
10168 case BUILT_IN_STRSTR:
10169 return fold_builtin_strstr (loc, arg0, arg1, type);
10170
10171 case BUILT_IN_STRSPN:
10172 return fold_builtin_strspn (loc, arg0, arg1);
10173
10174 case BUILT_IN_STRCSPN:
10175 return fold_builtin_strcspn (loc, arg0, arg1);
10176
10177 case BUILT_IN_STRCHR:
10178 case BUILT_IN_INDEX:
10179 return fold_builtin_strchr (loc, arg0, arg1, type);
10180
10181 case BUILT_IN_STRRCHR:
10182 case BUILT_IN_RINDEX:
10183 return fold_builtin_strrchr (loc, arg0, arg1, type);
10184
10185 case BUILT_IN_STPCPY:
10186 if (ignore)
10187 {
10188 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10189 if (!fn)
10190 break;
10191
10192 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10193 }
10194 else
10195 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10196 break;
10197
10198 case BUILT_IN_STRCMP:
10199 return fold_builtin_strcmp (loc, arg0, arg1);
10200
10201 case BUILT_IN_STRPBRK:
10202 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10203
10204 case BUILT_IN_EXPECT:
10205 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10206
10207 CASE_FLT_FN (BUILT_IN_POW):
10208 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10209
10210 CASE_FLT_FN (BUILT_IN_POWI):
10211 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10212
10213 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10214 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10215
10216 CASE_FLT_FN (BUILT_IN_FMIN):
10217 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10218
10219 CASE_FLT_FN (BUILT_IN_FMAX):
10220 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10221
10222 case BUILT_IN_ISGREATER:
10223 return fold_builtin_unordered_cmp (loc, fndecl,
10224 arg0, arg1, UNLE_EXPR, LE_EXPR);
10225 case BUILT_IN_ISGREATEREQUAL:
10226 return fold_builtin_unordered_cmp (loc, fndecl,
10227 arg0, arg1, UNLT_EXPR, LT_EXPR);
10228 case BUILT_IN_ISLESS:
10229 return fold_builtin_unordered_cmp (loc, fndecl,
10230 arg0, arg1, UNGE_EXPR, GE_EXPR);
10231 case BUILT_IN_ISLESSEQUAL:
10232 return fold_builtin_unordered_cmp (loc, fndecl,
10233 arg0, arg1, UNGT_EXPR, GT_EXPR);
10234 case BUILT_IN_ISLESSGREATER:
10235 return fold_builtin_unordered_cmp (loc, fndecl,
10236 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10237 case BUILT_IN_ISUNORDERED:
10238 return fold_builtin_unordered_cmp (loc, fndecl,
10239 arg0, arg1, UNORDERED_EXPR,
10240 NOP_EXPR);
10241
10242 /* We do the folding for va_start in the expander. */
10243 case BUILT_IN_VA_START:
10244 break;
10245
10246 case BUILT_IN_OBJECT_SIZE:
10247 return fold_builtin_object_size (arg0, arg1);
10248
10249 case BUILT_IN_PRINTF:
10250 case BUILT_IN_PRINTF_UNLOCKED:
10251 case BUILT_IN_VPRINTF:
10252 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10253
10254 case BUILT_IN_PRINTF_CHK:
10255 case BUILT_IN_VPRINTF_CHK:
10256 if (!validate_arg (arg0, INTEGER_TYPE)
10257 || TREE_SIDE_EFFECTS (arg0))
10258 return NULL_TREE;
10259 else
10260 return fold_builtin_printf (loc, fndecl,
10261 arg1, NULL_TREE, ignore, fcode);
10262 break;
10263
10264 case BUILT_IN_FPRINTF:
10265 case BUILT_IN_FPRINTF_UNLOCKED:
10266 case BUILT_IN_VFPRINTF:
10267 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10268 ignore, fcode);
10269
10270 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10271 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10272
10273 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10274 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10275
10276 default:
10277 break;
10278 }
10279 return NULL_TREE;
10280 }
10281
10282 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10283 and ARG2. IGNORE is true if the result of the function call is ignored.
10284 This function returns NULL_TREE if no simplification was possible. */
10285
10286 static tree
10287 fold_builtin_3 (location_t loc, tree fndecl,
10288 tree arg0, tree arg1, tree arg2, bool ignore)
10289 {
10290 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10291 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10292 switch (fcode)
10293 {
10294
10295 CASE_FLT_FN (BUILT_IN_SINCOS):
10296 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10297
10298 CASE_FLT_FN (BUILT_IN_FMA):
10299 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10300 break;
10301
10302 CASE_FLT_FN (BUILT_IN_REMQUO):
10303 if (validate_arg (arg0, REAL_TYPE)
10304 && validate_arg (arg1, REAL_TYPE)
10305 && validate_arg (arg2, POINTER_TYPE))
10306 return do_mpfr_remquo (arg0, arg1, arg2);
10307 break;
10308
10309 case BUILT_IN_STRNCAT:
10310 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10311
10312 case BUILT_IN_STRNCMP:
10313 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10314
10315 case BUILT_IN_MEMCHR:
10316 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10317
10318 case BUILT_IN_BCMP:
10319 case BUILT_IN_MEMCMP:
10320 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10321
10322 case BUILT_IN_PRINTF_CHK:
10323 case BUILT_IN_VPRINTF_CHK:
10324 if (!validate_arg (arg0, INTEGER_TYPE)
10325 || TREE_SIDE_EFFECTS (arg0))
10326 return NULL_TREE;
10327 else
10328 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10329 break;
10330
10331 case BUILT_IN_FPRINTF:
10332 case BUILT_IN_FPRINTF_UNLOCKED:
10333 case BUILT_IN_VFPRINTF:
10334 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10335 ignore, fcode);
10336
10337 case BUILT_IN_FPRINTF_CHK:
10338 case BUILT_IN_VFPRINTF_CHK:
10339 if (!validate_arg (arg1, INTEGER_TYPE)
10340 || TREE_SIDE_EFFECTS (arg1))
10341 return NULL_TREE;
10342 else
10343 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10344 ignore, fcode);
10345
10346 case BUILT_IN_EXPECT:
10347 return fold_builtin_expect (loc, arg0, arg1, arg2);
10348
10349 default:
10350 break;
10351 }
10352 return NULL_TREE;
10353 }
10354
10355 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10356 ARG2, and ARG3. IGNORE is true if the result of the function call is
10357 ignored. This function returns NULL_TREE if no simplification was
10358 possible. */
10359
10360 static tree
10361 fold_builtin_4 (location_t loc, tree fndecl,
10362 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10363 {
10364 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10365
10366 switch (fcode)
10367 {
10368 case BUILT_IN_FPRINTF_CHK:
10369 case BUILT_IN_VFPRINTF_CHK:
10370 if (!validate_arg (arg1, INTEGER_TYPE)
10371 || TREE_SIDE_EFFECTS (arg1))
10372 return NULL_TREE;
10373 else
10374 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10375 ignore, fcode);
10376 break;
10377
10378 default:
10379 break;
10380 }
10381 return NULL_TREE;
10382 }
10383
10384 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10385 arguments, where NARGS <= 4. IGNORE is true if the result of the
10386 function call is ignored. This function returns NULL_TREE if no
10387 simplification was possible. Note that this only folds builtins with
10388 fixed argument patterns. Foldings that do varargs-to-varargs
10389 transformations, or that match calls with more than 4 arguments,
10390 need to be handled with fold_builtin_varargs instead. */
10391
10392 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10393
10394 static tree
10395 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10396 {
10397 tree ret = NULL_TREE;
10398
10399 switch (nargs)
10400 {
10401 case 0:
10402 ret = fold_builtin_0 (loc, fndecl, ignore);
10403 break;
10404 case 1:
10405 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10406 break;
10407 case 2:
10408 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10409 break;
10410 case 3:
10411 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10412 break;
10413 case 4:
10414 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10415 ignore);
10416 break;
10417 default:
10418 break;
10419 }
10420 if (ret)
10421 {
10422 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10423 SET_EXPR_LOCATION (ret, loc);
10424 TREE_NO_WARNING (ret) = 1;
10425 return ret;
10426 }
10427 return NULL_TREE;
10428 }
10429
10430 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10431 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10432 of arguments in ARGS to be omitted. OLDNARGS is the number of
10433 elements in ARGS. */
10434
10435 static tree
10436 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10437 int skip, tree fndecl, int n, va_list newargs)
10438 {
10439 int nargs = oldnargs - skip + n;
10440 tree *buffer;
10441
10442 if (n > 0)
10443 {
10444 int i, j;
10445
10446 buffer = XALLOCAVEC (tree, nargs);
10447 for (i = 0; i < n; i++)
10448 buffer[i] = va_arg (newargs, tree);
10449 for (j = skip; j < oldnargs; j++, i++)
10450 buffer[i] = args[j];
10451 }
10452 else
10453 buffer = args + skip;
10454
10455 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10456 }
10457
10458 /* Return true if FNDECL shouldn't be folded right now.
10459 If a built-in function has an inline attribute always_inline
10460 wrapper, defer folding it after always_inline functions have
10461 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10462 might not be performed. */
10463
10464 bool
10465 avoid_folding_inline_builtin (tree fndecl)
10466 {
10467 return (DECL_DECLARED_INLINE_P (fndecl)
10468 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10469 && cfun
10470 && !cfun->always_inline_functions_inlined
10471 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10472 }
10473
10474 /* A wrapper function for builtin folding that prevents warnings for
10475 "statement without effect" and the like, caused by removing the
10476 call node earlier than the warning is generated. */
10477
10478 tree
10479 fold_call_expr (location_t loc, tree exp, bool ignore)
10480 {
10481 tree ret = NULL_TREE;
10482 tree fndecl = get_callee_fndecl (exp);
10483 if (fndecl
10484 && TREE_CODE (fndecl) == FUNCTION_DECL
10485 && DECL_BUILT_IN (fndecl)
10486 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10487 yet. Defer folding until we see all the arguments
10488 (after inlining). */
10489 && !CALL_EXPR_VA_ARG_PACK (exp))
10490 {
10491 int nargs = call_expr_nargs (exp);
10492
10493 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10494 instead last argument is __builtin_va_arg_pack (). Defer folding
10495 even in that case, until arguments are finalized. */
10496 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10497 {
10498 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10499 if (fndecl2
10500 && TREE_CODE (fndecl2) == FUNCTION_DECL
10501 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10502 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10503 return NULL_TREE;
10504 }
10505
10506 if (avoid_folding_inline_builtin (fndecl))
10507 return NULL_TREE;
10508
10509 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10510 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10511 CALL_EXPR_ARGP (exp), ignore);
10512 else
10513 {
10514 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10515 {
10516 tree *args = CALL_EXPR_ARGP (exp);
10517 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10518 }
10519 if (!ret)
10520 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10521 if (ret)
10522 return ret;
10523 }
10524 }
10525 return NULL_TREE;
10526 }
10527
10528 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10529 N arguments are passed in the array ARGARRAY. */
10530
10531 tree
10532 fold_builtin_call_array (location_t loc, tree type,
10533 tree fn,
10534 int n,
10535 tree *argarray)
10536 {
10537 tree ret = NULL_TREE;
10538 tree exp;
10539
10540 if (TREE_CODE (fn) == ADDR_EXPR)
10541 {
10542 tree fndecl = TREE_OPERAND (fn, 0);
10543 if (TREE_CODE (fndecl) == FUNCTION_DECL
10544 && DECL_BUILT_IN (fndecl))
10545 {
10546 /* If last argument is __builtin_va_arg_pack (), arguments to this
10547 function are not finalized yet. Defer folding until they are. */
10548 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10549 {
10550 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10551 if (fndecl2
10552 && TREE_CODE (fndecl2) == FUNCTION_DECL
10553 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10554 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10555 return build_call_array_loc (loc, type, fn, n, argarray);
10556 }
10557 if (avoid_folding_inline_builtin (fndecl))
10558 return build_call_array_loc (loc, type, fn, n, argarray);
10559 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10560 {
10561 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10562 if (ret)
10563 return ret;
10564
10565 return build_call_array_loc (loc, type, fn, n, argarray);
10566 }
10567 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10568 {
10569 /* First try the transformations that don't require consing up
10570 an exp. */
10571 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10572 if (ret)
10573 return ret;
10574 }
10575
10576 /* If we got this far, we need to build an exp. */
10577 exp = build_call_array_loc (loc, type, fn, n, argarray);
10578 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10579 return ret ? ret : exp;
10580 }
10581 }
10582
10583 return build_call_array_loc (loc, type, fn, n, argarray);
10584 }
10585
10586 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10587 along with N new arguments specified as the "..." parameters. SKIP
10588 is the number of arguments in EXP to be omitted. This function is used
10589 to do varargs-to-varargs transformations. */
10590
10591 static tree
10592 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10593 {
10594 va_list ap;
10595 tree t;
10596
10597 va_start (ap, n);
10598 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10599 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10600 va_end (ap);
10601
10602 return t;
10603 }
10604
10605 /* Validate a single argument ARG against a tree code CODE representing
10606 a type. */
10607
10608 static bool
10609 validate_arg (const_tree arg, enum tree_code code)
10610 {
10611 if (!arg)
10612 return false;
10613 else if (code == POINTER_TYPE)
10614 return POINTER_TYPE_P (TREE_TYPE (arg));
10615 else if (code == INTEGER_TYPE)
10616 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10617 return code == TREE_CODE (TREE_TYPE (arg));
10618 }
10619
10620 /* This function validates the types of a function call argument list
10621 against a specified list of tree_codes. If the last specifier is a 0,
10622 that represents an ellipses, otherwise the last specifier must be a
10623 VOID_TYPE.
10624
10625 This is the GIMPLE version of validate_arglist. Eventually we want to
10626 completely convert builtins.c to work from GIMPLEs and the tree based
10627 validate_arglist will then be removed. */
10628
10629 bool
10630 validate_gimple_arglist (const_gimple call, ...)
10631 {
10632 enum tree_code code;
10633 bool res = 0;
10634 va_list ap;
10635 const_tree arg;
10636 size_t i;
10637
10638 va_start (ap, call);
10639 i = 0;
10640
10641 do
10642 {
10643 code = (enum tree_code) va_arg (ap, int);
10644 switch (code)
10645 {
10646 case 0:
10647 /* This signifies an ellipses, any further arguments are all ok. */
10648 res = true;
10649 goto end;
10650 case VOID_TYPE:
10651 /* This signifies an endlink, if no arguments remain, return
10652 true, otherwise return false. */
10653 res = (i == gimple_call_num_args (call));
10654 goto end;
10655 default:
10656 /* If no parameters remain or the parameter's code does not
10657 match the specified code, return false. Otherwise continue
10658 checking any remaining arguments. */
10659 arg = gimple_call_arg (call, i++);
10660 if (!validate_arg (arg, code))
10661 goto end;
10662 break;
10663 }
10664 }
10665 while (1);
10666
10667 /* We need gotos here since we can only have one VA_CLOSE in a
10668 function. */
10669 end: ;
10670 va_end (ap);
10671
10672 return res;
10673 }
10674
10675 /* Default target-specific builtin expander that does nothing. */
10676
10677 rtx
10678 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10679 rtx target ATTRIBUTE_UNUSED,
10680 rtx subtarget ATTRIBUTE_UNUSED,
10681 enum machine_mode mode ATTRIBUTE_UNUSED,
10682 int ignore ATTRIBUTE_UNUSED)
10683 {
10684 return NULL_RTX;
10685 }
10686
10687 /* Returns true is EXP represents data that would potentially reside
10688 in a readonly section. */
10689
10690 bool
10691 readonly_data_expr (tree exp)
10692 {
10693 STRIP_NOPS (exp);
10694
10695 if (TREE_CODE (exp) != ADDR_EXPR)
10696 return false;
10697
10698 exp = get_base_address (TREE_OPERAND (exp, 0));
10699 if (!exp)
10700 return false;
10701
10702 /* Make sure we call decl_readonly_section only for trees it
10703 can handle (since it returns true for everything it doesn't
10704 understand). */
10705 if (TREE_CODE (exp) == STRING_CST
10706 || TREE_CODE (exp) == CONSTRUCTOR
10707 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10708 return decl_readonly_section (exp, 0);
10709 else
10710 return false;
10711 }
10712
10713 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10714 to the call, and TYPE is its return type.
10715
10716 Return NULL_TREE if no simplification was possible, otherwise return the
10717 simplified form of the call as a tree.
10718
10719 The simplified form may be a constant or other expression which
10720 computes the same value, but in a more efficient manner (including
10721 calls to other builtin functions).
10722
10723 The call may contain arguments which need to be evaluated, but
10724 which are not useful to determine the result of the call. In
10725 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10726 COMPOUND_EXPR will be an argument which must be evaluated.
10727 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10728 COMPOUND_EXPR in the chain will contain the tree for the simplified
10729 form of the builtin function call. */
10730
10731 static tree
10732 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10733 {
10734 if (!validate_arg (s1, POINTER_TYPE)
10735 || !validate_arg (s2, POINTER_TYPE))
10736 return NULL_TREE;
10737 else
10738 {
10739 tree fn;
10740 const char *p1, *p2;
10741
10742 p2 = c_getstr (s2);
10743 if (p2 == NULL)
10744 return NULL_TREE;
10745
10746 p1 = c_getstr (s1);
10747 if (p1 != NULL)
10748 {
10749 const char *r = strstr (p1, p2);
10750 tree tem;
10751
10752 if (r == NULL)
10753 return build_int_cst (TREE_TYPE (s1), 0);
10754
10755 /* Return an offset into the constant string argument. */
10756 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10757 return fold_convert_loc (loc, type, tem);
10758 }
10759
10760 /* The argument is const char *, and the result is char *, so we need
10761 a type conversion here to avoid a warning. */
10762 if (p2[0] == '\0')
10763 return fold_convert_loc (loc, type, s1);
10764
10765 if (p2[1] != '\0')
10766 return NULL_TREE;
10767
10768 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10769 if (!fn)
10770 return NULL_TREE;
10771
10772 /* New argument list transforming strstr(s1, s2) to
10773 strchr(s1, s2[0]). */
10774 return build_call_expr_loc (loc, fn, 2, s1,
10775 build_int_cst (integer_type_node, p2[0]));
10776 }
10777 }
10778
10779 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10780 the call, and TYPE is its return type.
10781
10782 Return NULL_TREE if no simplification was possible, otherwise return the
10783 simplified form of the call as a tree.
10784
10785 The simplified form may be a constant or other expression which
10786 computes the same value, but in a more efficient manner (including
10787 calls to other builtin functions).
10788
10789 The call may contain arguments which need to be evaluated, but
10790 which are not useful to determine the result of the call. In
10791 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10792 COMPOUND_EXPR will be an argument which must be evaluated.
10793 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10794 COMPOUND_EXPR in the chain will contain the tree for the simplified
10795 form of the builtin function call. */
10796
10797 static tree
10798 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10799 {
10800 if (!validate_arg (s1, POINTER_TYPE)
10801 || !validate_arg (s2, INTEGER_TYPE))
10802 return NULL_TREE;
10803 else
10804 {
10805 const char *p1;
10806
10807 if (TREE_CODE (s2) != INTEGER_CST)
10808 return NULL_TREE;
10809
10810 p1 = c_getstr (s1);
10811 if (p1 != NULL)
10812 {
10813 char c;
10814 const char *r;
10815 tree tem;
10816
10817 if (target_char_cast (s2, &c))
10818 return NULL_TREE;
10819
10820 r = strchr (p1, c);
10821
10822 if (r == NULL)
10823 return build_int_cst (TREE_TYPE (s1), 0);
10824
10825 /* Return an offset into the constant string argument. */
10826 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10827 return fold_convert_loc (loc, type, tem);
10828 }
10829 return NULL_TREE;
10830 }
10831 }
10832
10833 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10834 the call, and TYPE is its return type.
10835
10836 Return NULL_TREE if no simplification was possible, otherwise return the
10837 simplified form of the call as a tree.
10838
10839 The simplified form may be a constant or other expression which
10840 computes the same value, but in a more efficient manner (including
10841 calls to other builtin functions).
10842
10843 The call may contain arguments which need to be evaluated, but
10844 which are not useful to determine the result of the call. In
10845 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10846 COMPOUND_EXPR will be an argument which must be evaluated.
10847 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10848 COMPOUND_EXPR in the chain will contain the tree for the simplified
10849 form of the builtin function call. */
10850
10851 static tree
10852 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10853 {
10854 if (!validate_arg (s1, POINTER_TYPE)
10855 || !validate_arg (s2, INTEGER_TYPE))
10856 return NULL_TREE;
10857 else
10858 {
10859 tree fn;
10860 const char *p1;
10861
10862 if (TREE_CODE (s2) != INTEGER_CST)
10863 return NULL_TREE;
10864
10865 p1 = c_getstr (s1);
10866 if (p1 != NULL)
10867 {
10868 char c;
10869 const char *r;
10870 tree tem;
10871
10872 if (target_char_cast (s2, &c))
10873 return NULL_TREE;
10874
10875 r = strrchr (p1, c);
10876
10877 if (r == NULL)
10878 return build_int_cst (TREE_TYPE (s1), 0);
10879
10880 /* Return an offset into the constant string argument. */
10881 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10882 return fold_convert_loc (loc, type, tem);
10883 }
10884
10885 if (! integer_zerop (s2))
10886 return NULL_TREE;
10887
10888 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10889 if (!fn)
10890 return NULL_TREE;
10891
10892 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10893 return build_call_expr_loc (loc, fn, 2, s1, s2);
10894 }
10895 }
10896
10897 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10898 to the call, and TYPE is its return type.
10899
10900 Return NULL_TREE if no simplification was possible, otherwise return the
10901 simplified form of the call as a tree.
10902
10903 The simplified form may be a constant or other expression which
10904 computes the same value, but in a more efficient manner (including
10905 calls to other builtin functions).
10906
10907 The call may contain arguments which need to be evaluated, but
10908 which are not useful to determine the result of the call. In
10909 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10910 COMPOUND_EXPR will be an argument which must be evaluated.
10911 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10912 COMPOUND_EXPR in the chain will contain the tree for the simplified
10913 form of the builtin function call. */
10914
10915 static tree
10916 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10917 {
10918 if (!validate_arg (s1, POINTER_TYPE)
10919 || !validate_arg (s2, POINTER_TYPE))
10920 return NULL_TREE;
10921 else
10922 {
10923 tree fn;
10924 const char *p1, *p2;
10925
10926 p2 = c_getstr (s2);
10927 if (p2 == NULL)
10928 return NULL_TREE;
10929
10930 p1 = c_getstr (s1);
10931 if (p1 != NULL)
10932 {
10933 const char *r = strpbrk (p1, p2);
10934 tree tem;
10935
10936 if (r == NULL)
10937 return build_int_cst (TREE_TYPE (s1), 0);
10938
10939 /* Return an offset into the constant string argument. */
10940 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10941 return fold_convert_loc (loc, type, tem);
10942 }
10943
10944 if (p2[0] == '\0')
10945 /* strpbrk(x, "") == NULL.
10946 Evaluate and ignore s1 in case it had side-effects. */
10947 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
10948
10949 if (p2[1] != '\0')
10950 return NULL_TREE; /* Really call strpbrk. */
10951
10952 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10953 if (!fn)
10954 return NULL_TREE;
10955
10956 /* New argument list transforming strpbrk(s1, s2) to
10957 strchr(s1, s2[0]). */
10958 return build_call_expr_loc (loc, fn, 2, s1,
10959 build_int_cst (integer_type_node, p2[0]));
10960 }
10961 }
10962
10963 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10964 arguments to the call.
10965
10966 Return NULL_TREE if no simplification was possible, otherwise return the
10967 simplified form of the call as a tree.
10968
10969 The simplified form may be a constant or other expression which
10970 computes the same value, but in a more efficient manner (including
10971 calls to other builtin functions).
10972
10973 The call may contain arguments which need to be evaluated, but
10974 which are not useful to determine the result of the call. In
10975 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10976 COMPOUND_EXPR will be an argument which must be evaluated.
10977 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10978 COMPOUND_EXPR in the chain will contain the tree for the simplified
10979 form of the builtin function call. */
10980
10981 static tree
10982 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
10983 {
10984 if (!validate_arg (dst, POINTER_TYPE)
10985 || !validate_arg (src, POINTER_TYPE)
10986 || !validate_arg (len, INTEGER_TYPE))
10987 return NULL_TREE;
10988 else
10989 {
10990 const char *p = c_getstr (src);
10991
10992 /* If the requested length is zero, or the src parameter string
10993 length is zero, return the dst parameter. */
10994 if (integer_zerop (len) || (p && *p == '\0'))
10995 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
10996
10997 /* If the requested len is greater than or equal to the string
10998 length, call strcat. */
10999 if (TREE_CODE (len) == INTEGER_CST && p
11000 && compare_tree_int (len, strlen (p)) >= 0)
11001 {
11002 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11003
11004 /* If the replacement _DECL isn't initialized, don't do the
11005 transformation. */
11006 if (!fn)
11007 return NULL_TREE;
11008
11009 return build_call_expr_loc (loc, fn, 2, dst, src);
11010 }
11011 return NULL_TREE;
11012 }
11013 }
11014
11015 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11016 to the call.
11017
11018 Return NULL_TREE if no simplification was possible, otherwise return the
11019 simplified form of the call as a tree.
11020
11021 The simplified form may be a constant or other expression which
11022 computes the same value, but in a more efficient manner (including
11023 calls to other builtin functions).
11024
11025 The call may contain arguments which need to be evaluated, but
11026 which are not useful to determine the result of the call. In
11027 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11028 COMPOUND_EXPR will be an argument which must be evaluated.
11029 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11030 COMPOUND_EXPR in the chain will contain the tree for the simplified
11031 form of the builtin function call. */
11032
11033 static tree
11034 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11035 {
11036 if (!validate_arg (s1, POINTER_TYPE)
11037 || !validate_arg (s2, POINTER_TYPE))
11038 return NULL_TREE;
11039 else
11040 {
11041 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11042
11043 /* If both arguments are constants, evaluate at compile-time. */
11044 if (p1 && p2)
11045 {
11046 const size_t r = strspn (p1, p2);
11047 return build_int_cst (size_type_node, r);
11048 }
11049
11050 /* If either argument is "", return NULL_TREE. */
11051 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11052 /* Evaluate and ignore both arguments in case either one has
11053 side-effects. */
11054 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11055 s1, s2);
11056 return NULL_TREE;
11057 }
11058 }
11059
11060 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11061 to the call.
11062
11063 Return NULL_TREE if no simplification was possible, otherwise return the
11064 simplified form of the call as a tree.
11065
11066 The simplified form may be a constant or other expression which
11067 computes the same value, but in a more efficient manner (including
11068 calls to other builtin functions).
11069
11070 The call may contain arguments which need to be evaluated, but
11071 which are not useful to determine the result of the call. In
11072 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11073 COMPOUND_EXPR will be an argument which must be evaluated.
11074 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11075 COMPOUND_EXPR in the chain will contain the tree for the simplified
11076 form of the builtin function call. */
11077
11078 static tree
11079 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11080 {
11081 if (!validate_arg (s1, POINTER_TYPE)
11082 || !validate_arg (s2, POINTER_TYPE))
11083 return NULL_TREE;
11084 else
11085 {
11086 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11087
11088 /* If both arguments are constants, evaluate at compile-time. */
11089 if (p1 && p2)
11090 {
11091 const size_t r = strcspn (p1, p2);
11092 return build_int_cst (size_type_node, r);
11093 }
11094
11095 /* If the first argument is "", return NULL_TREE. */
11096 if (p1 && *p1 == '\0')
11097 {
11098 /* Evaluate and ignore argument s2 in case it has
11099 side-effects. */
11100 return omit_one_operand_loc (loc, size_type_node,
11101 size_zero_node, s2);
11102 }
11103
11104 /* If the second argument is "", return __builtin_strlen(s1). */
11105 if (p2 && *p2 == '\0')
11106 {
11107 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11108
11109 /* If the replacement _DECL isn't initialized, don't do the
11110 transformation. */
11111 if (!fn)
11112 return NULL_TREE;
11113
11114 return build_call_expr_loc (loc, fn, 1, s1);
11115 }
11116 return NULL_TREE;
11117 }
11118 }
11119
11120 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11121 produced. False otherwise. This is done so that we don't output the error
11122 or warning twice or three times. */
11123
11124 bool
11125 fold_builtin_next_arg (tree exp, bool va_start_p)
11126 {
11127 tree fntype = TREE_TYPE (current_function_decl);
11128 int nargs = call_expr_nargs (exp);
11129 tree arg;
11130 /* There is good chance the current input_location points inside the
11131 definition of the va_start macro (perhaps on the token for
11132 builtin) in a system header, so warnings will not be emitted.
11133 Use the location in real source code. */
11134 source_location current_location =
11135 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11136 NULL);
11137
11138 if (!stdarg_p (fntype))
11139 {
11140 error ("%<va_start%> used in function with fixed args");
11141 return true;
11142 }
11143
11144 if (va_start_p)
11145 {
11146 if (va_start_p && (nargs != 2))
11147 {
11148 error ("wrong number of arguments to function %<va_start%>");
11149 return true;
11150 }
11151 arg = CALL_EXPR_ARG (exp, 1);
11152 }
11153 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11154 when we checked the arguments and if needed issued a warning. */
11155 else
11156 {
11157 if (nargs == 0)
11158 {
11159 /* Evidently an out of date version of <stdarg.h>; can't validate
11160 va_start's second argument, but can still work as intended. */
11161 warning_at (current_location,
11162 OPT_Wvarargs,
11163 "%<__builtin_next_arg%> called without an argument");
11164 return true;
11165 }
11166 else if (nargs > 1)
11167 {
11168 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11169 return true;
11170 }
11171 arg = CALL_EXPR_ARG (exp, 0);
11172 }
11173
11174 if (TREE_CODE (arg) == SSA_NAME)
11175 arg = SSA_NAME_VAR (arg);
11176
11177 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11178 or __builtin_next_arg (0) the first time we see it, after checking
11179 the arguments and if needed issuing a warning. */
11180 if (!integer_zerop (arg))
11181 {
11182 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11183
11184 /* Strip off all nops for the sake of the comparison. This
11185 is not quite the same as STRIP_NOPS. It does more.
11186 We must also strip off INDIRECT_EXPR for C++ reference
11187 parameters. */
11188 while (CONVERT_EXPR_P (arg)
11189 || TREE_CODE (arg) == INDIRECT_REF)
11190 arg = TREE_OPERAND (arg, 0);
11191 if (arg != last_parm)
11192 {
11193 /* FIXME: Sometimes with the tree optimizers we can get the
11194 not the last argument even though the user used the last
11195 argument. We just warn and set the arg to be the last
11196 argument so that we will get wrong-code because of
11197 it. */
11198 warning_at (current_location,
11199 OPT_Wvarargs,
11200 "second parameter of %<va_start%> not last named argument");
11201 }
11202
11203 /* Undefined by C99 7.15.1.4p4 (va_start):
11204 "If the parameter parmN is declared with the register storage
11205 class, with a function or array type, or with a type that is
11206 not compatible with the type that results after application of
11207 the default argument promotions, the behavior is undefined."
11208 */
11209 else if (DECL_REGISTER (arg))
11210 {
11211 warning_at (current_location,
11212 OPT_Wvarargs,
11213 "undefined behaviour when second parameter of "
11214 "%<va_start%> is declared with %<register%> storage");
11215 }
11216
11217 /* We want to verify the second parameter just once before the tree
11218 optimizers are run and then avoid keeping it in the tree,
11219 as otherwise we could warn even for correct code like:
11220 void foo (int i, ...)
11221 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11222 if (va_start_p)
11223 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11224 else
11225 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11226 }
11227 return false;
11228 }
11229
11230
11231 /* Expand a call EXP to __builtin_object_size. */
11232
11233 static rtx
11234 expand_builtin_object_size (tree exp)
11235 {
11236 tree ost;
11237 int object_size_type;
11238 tree fndecl = get_callee_fndecl (exp);
11239
11240 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11241 {
11242 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11243 exp, fndecl);
11244 expand_builtin_trap ();
11245 return const0_rtx;
11246 }
11247
11248 ost = CALL_EXPR_ARG (exp, 1);
11249 STRIP_NOPS (ost);
11250
11251 if (TREE_CODE (ost) != INTEGER_CST
11252 || tree_int_cst_sgn (ost) < 0
11253 || compare_tree_int (ost, 3) > 0)
11254 {
11255 error ("%Klast argument of %D is not integer constant between 0 and 3",
11256 exp, fndecl);
11257 expand_builtin_trap ();
11258 return const0_rtx;
11259 }
11260
11261 object_size_type = tree_to_shwi (ost);
11262
11263 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11264 }
11265
11266 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11267 FCODE is the BUILT_IN_* to use.
11268 Return NULL_RTX if we failed; the caller should emit a normal call,
11269 otherwise try to get the result in TARGET, if convenient (and in
11270 mode MODE if that's convenient). */
11271
11272 static rtx
11273 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11274 enum built_in_function fcode)
11275 {
11276 tree dest, src, len, size;
11277
11278 if (!validate_arglist (exp,
11279 POINTER_TYPE,
11280 fcode == BUILT_IN_MEMSET_CHK
11281 ? INTEGER_TYPE : POINTER_TYPE,
11282 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11283 return NULL_RTX;
11284
11285 dest = CALL_EXPR_ARG (exp, 0);
11286 src = CALL_EXPR_ARG (exp, 1);
11287 len = CALL_EXPR_ARG (exp, 2);
11288 size = CALL_EXPR_ARG (exp, 3);
11289
11290 if (! tree_fits_uhwi_p (size))
11291 return NULL_RTX;
11292
11293 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11294 {
11295 tree fn;
11296
11297 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11298 {
11299 warning_at (tree_nonartificial_location (exp),
11300 0, "%Kcall to %D will always overflow destination buffer",
11301 exp, get_callee_fndecl (exp));
11302 return NULL_RTX;
11303 }
11304
11305 fn = NULL_TREE;
11306 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11307 mem{cpy,pcpy,move,set} is available. */
11308 switch (fcode)
11309 {
11310 case BUILT_IN_MEMCPY_CHK:
11311 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11312 break;
11313 case BUILT_IN_MEMPCPY_CHK:
11314 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11315 break;
11316 case BUILT_IN_MEMMOVE_CHK:
11317 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11318 break;
11319 case BUILT_IN_MEMSET_CHK:
11320 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11321 break;
11322 default:
11323 break;
11324 }
11325
11326 if (! fn)
11327 return NULL_RTX;
11328
11329 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11330 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11331 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11332 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11333 }
11334 else if (fcode == BUILT_IN_MEMSET_CHK)
11335 return NULL_RTX;
11336 else
11337 {
11338 unsigned int dest_align = get_pointer_alignment (dest);
11339
11340 /* If DEST is not a pointer type, call the normal function. */
11341 if (dest_align == 0)
11342 return NULL_RTX;
11343
11344 /* If SRC and DEST are the same (and not volatile), do nothing. */
11345 if (operand_equal_p (src, dest, 0))
11346 {
11347 tree expr;
11348
11349 if (fcode != BUILT_IN_MEMPCPY_CHK)
11350 {
11351 /* Evaluate and ignore LEN in case it has side-effects. */
11352 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11353 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11354 }
11355
11356 expr = fold_build_pointer_plus (dest, len);
11357 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11358 }
11359
11360 /* __memmove_chk special case. */
11361 if (fcode == BUILT_IN_MEMMOVE_CHK)
11362 {
11363 unsigned int src_align = get_pointer_alignment (src);
11364
11365 if (src_align == 0)
11366 return NULL_RTX;
11367
11368 /* If src is categorized for a readonly section we can use
11369 normal __memcpy_chk. */
11370 if (readonly_data_expr (src))
11371 {
11372 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11373 if (!fn)
11374 return NULL_RTX;
11375 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11376 dest, src, len, size);
11377 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11378 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11379 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11380 }
11381 }
11382 return NULL_RTX;
11383 }
11384 }
11385
11386 /* Emit warning if a buffer overflow is detected at compile time. */
11387
11388 static void
11389 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11390 {
11391 int is_strlen = 0;
11392 tree len, size;
11393 location_t loc = tree_nonartificial_location (exp);
11394
11395 switch (fcode)
11396 {
11397 case BUILT_IN_STRCPY_CHK:
11398 case BUILT_IN_STPCPY_CHK:
11399 /* For __strcat_chk the warning will be emitted only if overflowing
11400 by at least strlen (dest) + 1 bytes. */
11401 case BUILT_IN_STRCAT_CHK:
11402 len = CALL_EXPR_ARG (exp, 1);
11403 size = CALL_EXPR_ARG (exp, 2);
11404 is_strlen = 1;
11405 break;
11406 case BUILT_IN_STRNCAT_CHK:
11407 case BUILT_IN_STRNCPY_CHK:
11408 case BUILT_IN_STPNCPY_CHK:
11409 len = CALL_EXPR_ARG (exp, 2);
11410 size = CALL_EXPR_ARG (exp, 3);
11411 break;
11412 case BUILT_IN_SNPRINTF_CHK:
11413 case BUILT_IN_VSNPRINTF_CHK:
11414 len = CALL_EXPR_ARG (exp, 1);
11415 size = CALL_EXPR_ARG (exp, 3);
11416 break;
11417 default:
11418 gcc_unreachable ();
11419 }
11420
11421 if (!len || !size)
11422 return;
11423
11424 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11425 return;
11426
11427 if (is_strlen)
11428 {
11429 len = c_strlen (len, 1);
11430 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11431 return;
11432 }
11433 else if (fcode == BUILT_IN_STRNCAT_CHK)
11434 {
11435 tree src = CALL_EXPR_ARG (exp, 1);
11436 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11437 return;
11438 src = c_strlen (src, 1);
11439 if (! src || ! tree_fits_uhwi_p (src))
11440 {
11441 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11442 exp, get_callee_fndecl (exp));
11443 return;
11444 }
11445 else if (tree_int_cst_lt (src, size))
11446 return;
11447 }
11448 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11449 return;
11450
11451 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11452 exp, get_callee_fndecl (exp));
11453 }
11454
11455 /* Emit warning if a buffer overflow is detected at compile time
11456 in __sprintf_chk/__vsprintf_chk calls. */
11457
11458 static void
11459 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11460 {
11461 tree size, len, fmt;
11462 const char *fmt_str;
11463 int nargs = call_expr_nargs (exp);
11464
11465 /* Verify the required arguments in the original call. */
11466
11467 if (nargs < 4)
11468 return;
11469 size = CALL_EXPR_ARG (exp, 2);
11470 fmt = CALL_EXPR_ARG (exp, 3);
11471
11472 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11473 return;
11474
11475 /* Check whether the format is a literal string constant. */
11476 fmt_str = c_getstr (fmt);
11477 if (fmt_str == NULL)
11478 return;
11479
11480 if (!init_target_chars ())
11481 return;
11482
11483 /* If the format doesn't contain % args or %%, we know its size. */
11484 if (strchr (fmt_str, target_percent) == 0)
11485 len = build_int_cstu (size_type_node, strlen (fmt_str));
11486 /* If the format is "%s" and first ... argument is a string literal,
11487 we know it too. */
11488 else if (fcode == BUILT_IN_SPRINTF_CHK
11489 && strcmp (fmt_str, target_percent_s) == 0)
11490 {
11491 tree arg;
11492
11493 if (nargs < 5)
11494 return;
11495 arg = CALL_EXPR_ARG (exp, 4);
11496 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11497 return;
11498
11499 len = c_strlen (arg, 1);
11500 if (!len || ! tree_fits_uhwi_p (len))
11501 return;
11502 }
11503 else
11504 return;
11505
11506 if (! tree_int_cst_lt (len, size))
11507 warning_at (tree_nonartificial_location (exp),
11508 0, "%Kcall to %D will always overflow destination buffer",
11509 exp, get_callee_fndecl (exp));
11510 }
11511
11512 /* Emit warning if a free is called with address of a variable. */
11513
11514 static void
11515 maybe_emit_free_warning (tree exp)
11516 {
11517 tree arg = CALL_EXPR_ARG (exp, 0);
11518
11519 STRIP_NOPS (arg);
11520 if (TREE_CODE (arg) != ADDR_EXPR)
11521 return;
11522
11523 arg = get_base_address (TREE_OPERAND (arg, 0));
11524 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11525 return;
11526
11527 if (SSA_VAR_P (arg))
11528 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11529 "%Kattempt to free a non-heap object %qD", exp, arg);
11530 else
11531 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11532 "%Kattempt to free a non-heap object", exp);
11533 }
11534
11535 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11536 if possible. */
11537
11538 static tree
11539 fold_builtin_object_size (tree ptr, tree ost)
11540 {
11541 unsigned HOST_WIDE_INT bytes;
11542 int object_size_type;
11543
11544 if (!validate_arg (ptr, POINTER_TYPE)
11545 || !validate_arg (ost, INTEGER_TYPE))
11546 return NULL_TREE;
11547
11548 STRIP_NOPS (ost);
11549
11550 if (TREE_CODE (ost) != INTEGER_CST
11551 || tree_int_cst_sgn (ost) < 0
11552 || compare_tree_int (ost, 3) > 0)
11553 return NULL_TREE;
11554
11555 object_size_type = tree_to_shwi (ost);
11556
11557 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11558 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11559 and (size_t) 0 for types 2 and 3. */
11560 if (TREE_SIDE_EFFECTS (ptr))
11561 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11562
11563 if (TREE_CODE (ptr) == ADDR_EXPR)
11564 {
11565 bytes = compute_builtin_object_size (ptr, object_size_type);
11566 if (wi::fits_to_tree_p (bytes, size_type_node))
11567 return build_int_cstu (size_type_node, bytes);
11568 }
11569 else if (TREE_CODE (ptr) == SSA_NAME)
11570 {
11571 /* If object size is not known yet, delay folding until
11572 later. Maybe subsequent passes will help determining
11573 it. */
11574 bytes = compute_builtin_object_size (ptr, object_size_type);
11575 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11576 && wi::fits_to_tree_p (bytes, size_type_node))
11577 return build_int_cstu (size_type_node, bytes);
11578 }
11579
11580 return NULL_TREE;
11581 }
11582
11583 /* Builtins with folding operations that operate on "..." arguments
11584 need special handling; we need to store the arguments in a convenient
11585 data structure before attempting any folding. Fortunately there are
11586 only a few builtins that fall into this category. FNDECL is the
11587 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11588 result of the function call is ignored. */
11589
11590 static tree
11591 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11592 bool ignore ATTRIBUTE_UNUSED)
11593 {
11594 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11595 tree ret = NULL_TREE;
11596
11597 switch (fcode)
11598 {
11599 case BUILT_IN_FPCLASSIFY:
11600 ret = fold_builtin_fpclassify (loc, exp);
11601 break;
11602
11603 default:
11604 break;
11605 }
11606 if (ret)
11607 {
11608 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11609 SET_EXPR_LOCATION (ret, loc);
11610 TREE_NO_WARNING (ret) = 1;
11611 return ret;
11612 }
11613 return NULL_TREE;
11614 }
11615
11616 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
11617 FMT and ARG are the arguments to the call; we don't fold cases with
11618 more than 2 arguments, and ARG may be null if this is a 1-argument case.
11619
11620 Return NULL_TREE if no simplification was possible, otherwise return the
11621 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11622 code of the function to be simplified. */
11623
11624 static tree
11625 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
11626 tree arg, bool ignore,
11627 enum built_in_function fcode)
11628 {
11629 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
11630 const char *fmt_str = NULL;
11631
11632 /* If the return value is used, don't do the transformation. */
11633 if (! ignore)
11634 return NULL_TREE;
11635
11636 /* Verify the required arguments in the original call. */
11637 if (!validate_arg (fmt, POINTER_TYPE))
11638 return NULL_TREE;
11639
11640 /* Check whether the format is a literal string constant. */
11641 fmt_str = c_getstr (fmt);
11642 if (fmt_str == NULL)
11643 return NULL_TREE;
11644
11645 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
11646 {
11647 /* If we're using an unlocked function, assume the other
11648 unlocked functions exist explicitly. */
11649 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
11650 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
11651 }
11652 else
11653 {
11654 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
11655 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
11656 }
11657
11658 if (!init_target_chars ())
11659 return NULL_TREE;
11660
11661 if (strcmp (fmt_str, target_percent_s) == 0
11662 || strchr (fmt_str, target_percent) == NULL)
11663 {
11664 const char *str;
11665
11666 if (strcmp (fmt_str, target_percent_s) == 0)
11667 {
11668 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11669 return NULL_TREE;
11670
11671 if (!arg || !validate_arg (arg, POINTER_TYPE))
11672 return NULL_TREE;
11673
11674 str = c_getstr (arg);
11675 if (str == NULL)
11676 return NULL_TREE;
11677 }
11678 else
11679 {
11680 /* The format specifier doesn't contain any '%' characters. */
11681 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
11682 && arg)
11683 return NULL_TREE;
11684 str = fmt_str;
11685 }
11686
11687 /* If the string was "", printf does nothing. */
11688 if (str[0] == '\0')
11689 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
11690
11691 /* If the string has length of 1, call putchar. */
11692 if (str[1] == '\0')
11693 {
11694 /* Given printf("c"), (where c is any one character,)
11695 convert "c"[0] to an int and pass that to the replacement
11696 function. */
11697 newarg = build_int_cst (integer_type_node, str[0]);
11698 if (fn_putchar)
11699 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
11700 }
11701 else
11702 {
11703 /* If the string was "string\n", call puts("string"). */
11704 size_t len = strlen (str);
11705 if ((unsigned char)str[len - 1] == target_newline
11706 && (size_t) (int) len == len
11707 && (int) len > 0)
11708 {
11709 char *newstr;
11710 tree offset_node, string_cst;
11711
11712 /* Create a NUL-terminated string that's one char shorter
11713 than the original, stripping off the trailing '\n'. */
11714 newarg = build_string_literal (len, str);
11715 string_cst = string_constant (newarg, &offset_node);
11716 gcc_checking_assert (string_cst
11717 && (TREE_STRING_LENGTH (string_cst)
11718 == (int) len)
11719 && integer_zerop (offset_node)
11720 && (unsigned char)
11721 TREE_STRING_POINTER (string_cst)[len - 1]
11722 == target_newline);
11723 /* build_string_literal creates a new STRING_CST,
11724 modify it in place to avoid double copying. */
11725 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
11726 newstr[len - 1] = '\0';
11727 if (fn_puts)
11728 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
11729 }
11730 else
11731 /* We'd like to arrange to call fputs(string,stdout) here,
11732 but we need stdout and don't have a way to get it yet. */
11733 return NULL_TREE;
11734 }
11735 }
11736
11737 /* The other optimizations can be done only on the non-va_list variants. */
11738 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11739 return NULL_TREE;
11740
11741 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
11742 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
11743 {
11744 if (!arg || !validate_arg (arg, POINTER_TYPE))
11745 return NULL_TREE;
11746 if (fn_puts)
11747 call = build_call_expr_loc (loc, fn_puts, 1, arg);
11748 }
11749
11750 /* If the format specifier was "%c", call __builtin_putchar(arg). */
11751 else if (strcmp (fmt_str, target_percent_c) == 0)
11752 {
11753 if (!arg || !validate_arg (arg, INTEGER_TYPE))
11754 return NULL_TREE;
11755 if (fn_putchar)
11756 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
11757 }
11758
11759 if (!call)
11760 return NULL_TREE;
11761
11762 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
11763 }
11764
11765 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
11766 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
11767 more than 3 arguments, and ARG may be null in the 2-argument case.
11768
11769 Return NULL_TREE if no simplification was possible, otherwise return the
11770 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11771 code of the function to be simplified. */
11772
11773 static tree
11774 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
11775 tree fmt, tree arg, bool ignore,
11776 enum built_in_function fcode)
11777 {
11778 tree fn_fputc, fn_fputs, call = NULL_TREE;
11779 const char *fmt_str = NULL;
11780
11781 /* If the return value is used, don't do the transformation. */
11782 if (! ignore)
11783 return NULL_TREE;
11784
11785 /* Verify the required arguments in the original call. */
11786 if (!validate_arg (fp, POINTER_TYPE))
11787 return NULL_TREE;
11788 if (!validate_arg (fmt, POINTER_TYPE))
11789 return NULL_TREE;
11790
11791 /* Check whether the format is a literal string constant. */
11792 fmt_str = c_getstr (fmt);
11793 if (fmt_str == NULL)
11794 return NULL_TREE;
11795
11796 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
11797 {
11798 /* If we're using an unlocked function, assume the other
11799 unlocked functions exist explicitly. */
11800 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
11801 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
11802 }
11803 else
11804 {
11805 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
11806 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
11807 }
11808
11809 if (!init_target_chars ())
11810 return NULL_TREE;
11811
11812 /* If the format doesn't contain % args or %%, use strcpy. */
11813 if (strchr (fmt_str, target_percent) == NULL)
11814 {
11815 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
11816 && arg)
11817 return NULL_TREE;
11818
11819 /* If the format specifier was "", fprintf does nothing. */
11820 if (fmt_str[0] == '\0')
11821 {
11822 /* If FP has side-effects, just wait until gimplification is
11823 done. */
11824 if (TREE_SIDE_EFFECTS (fp))
11825 return NULL_TREE;
11826
11827 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
11828 }
11829
11830 /* When "string" doesn't contain %, replace all cases of
11831 fprintf (fp, string) with fputs (string, fp). The fputs
11832 builtin will take care of special cases like length == 1. */
11833 if (fn_fputs)
11834 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
11835 }
11836
11837 /* The other optimizations can be done only on the non-va_list variants. */
11838 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
11839 return NULL_TREE;
11840
11841 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
11842 else if (strcmp (fmt_str, target_percent_s) == 0)
11843 {
11844 if (!arg || !validate_arg (arg, POINTER_TYPE))
11845 return NULL_TREE;
11846 if (fn_fputs)
11847 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
11848 }
11849
11850 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
11851 else if (strcmp (fmt_str, target_percent_c) == 0)
11852 {
11853 if (!arg || !validate_arg (arg, INTEGER_TYPE))
11854 return NULL_TREE;
11855 if (fn_fputc)
11856 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
11857 }
11858
11859 if (!call)
11860 return NULL_TREE;
11861 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
11862 }
11863
11864 /* Initialize format string characters in the target charset. */
11865
11866 bool
11867 init_target_chars (void)
11868 {
11869 static bool init;
11870 if (!init)
11871 {
11872 target_newline = lang_hooks.to_target_charset ('\n');
11873 target_percent = lang_hooks.to_target_charset ('%');
11874 target_c = lang_hooks.to_target_charset ('c');
11875 target_s = lang_hooks.to_target_charset ('s');
11876 if (target_newline == 0 || target_percent == 0 || target_c == 0
11877 || target_s == 0)
11878 return false;
11879
11880 target_percent_c[0] = target_percent;
11881 target_percent_c[1] = target_c;
11882 target_percent_c[2] = '\0';
11883
11884 target_percent_s[0] = target_percent;
11885 target_percent_s[1] = target_s;
11886 target_percent_s[2] = '\0';
11887
11888 target_percent_s_newline[0] = target_percent;
11889 target_percent_s_newline[1] = target_s;
11890 target_percent_s_newline[2] = target_newline;
11891 target_percent_s_newline[3] = '\0';
11892
11893 init = true;
11894 }
11895 return true;
11896 }
11897
11898 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11899 and no overflow/underflow occurred. INEXACT is true if M was not
11900 exactly calculated. TYPE is the tree type for the result. This
11901 function assumes that you cleared the MPFR flags and then
11902 calculated M to see if anything subsequently set a flag prior to
11903 entering this function. Return NULL_TREE if any checks fail. */
11904
11905 static tree
11906 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11907 {
11908 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11909 overflow/underflow occurred. If -frounding-math, proceed iff the
11910 result of calling FUNC was exact. */
11911 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11912 && (!flag_rounding_math || !inexact))
11913 {
11914 REAL_VALUE_TYPE rr;
11915
11916 real_from_mpfr (&rr, m, type, GMP_RNDN);
11917 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11918 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11919 but the mpft_t is not, then we underflowed in the
11920 conversion. */
11921 if (real_isfinite (&rr)
11922 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11923 {
11924 REAL_VALUE_TYPE rmode;
11925
11926 real_convert (&rmode, TYPE_MODE (type), &rr);
11927 /* Proceed iff the specified mode can hold the value. */
11928 if (real_identical (&rmode, &rr))
11929 return build_real (type, rmode);
11930 }
11931 }
11932 return NULL_TREE;
11933 }
11934
11935 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11936 number and no overflow/underflow occurred. INEXACT is true if M
11937 was not exactly calculated. TYPE is the tree type for the result.
11938 This function assumes that you cleared the MPFR flags and then
11939 calculated M to see if anything subsequently set a flag prior to
11940 entering this function. Return NULL_TREE if any checks fail, if
11941 FORCE_CONVERT is true, then bypass the checks. */
11942
11943 static tree
11944 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11945 {
11946 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11947 overflow/underflow occurred. If -frounding-math, proceed iff the
11948 result of calling FUNC was exact. */
11949 if (force_convert
11950 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11951 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11952 && (!flag_rounding_math || !inexact)))
11953 {
11954 REAL_VALUE_TYPE re, im;
11955
11956 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11957 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11958 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11959 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11960 but the mpft_t is not, then we underflowed in the
11961 conversion. */
11962 if (force_convert
11963 || (real_isfinite (&re) && real_isfinite (&im)
11964 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11965 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11966 {
11967 REAL_VALUE_TYPE re_mode, im_mode;
11968
11969 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11970 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11971 /* Proceed iff the specified mode can hold the value. */
11972 if (force_convert
11973 || (real_identical (&re_mode, &re)
11974 && real_identical (&im_mode, &im)))
11975 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11976 build_real (TREE_TYPE (type), im_mode));
11977 }
11978 }
11979 return NULL_TREE;
11980 }
11981
11982 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11983 FUNC on it and return the resulting value as a tree with type TYPE.
11984 If MIN and/or MAX are not NULL, then the supplied ARG must be
11985 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11986 acceptable values, otherwise they are not. The mpfr precision is
11987 set to the precision of TYPE. We assume that function FUNC returns
11988 zero if the result could be calculated exactly within the requested
11989 precision. */
11990
11991 static tree
11992 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11993 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11994 bool inclusive)
11995 {
11996 tree result = NULL_TREE;
11997
11998 STRIP_NOPS (arg);
11999
12000 /* To proceed, MPFR must exactly represent the target floating point
12001 format, which only happens when the target base equals two. */
12002 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12003 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12004 {
12005 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12006
12007 if (real_isfinite (ra)
12008 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12009 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12010 {
12011 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12012 const int prec = fmt->p;
12013 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12014 int inexact;
12015 mpfr_t m;
12016
12017 mpfr_init2 (m, prec);
12018 mpfr_from_real (m, ra, GMP_RNDN);
12019 mpfr_clear_flags ();
12020 inexact = func (m, m, rnd);
12021 result = do_mpfr_ckconv (m, type, inexact);
12022 mpfr_clear (m);
12023 }
12024 }
12025
12026 return result;
12027 }
12028
12029 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12030 FUNC on it and return the resulting value as a tree with type TYPE.
12031 The mpfr precision is set to the precision of TYPE. We assume that
12032 function FUNC returns zero if the result could be calculated
12033 exactly within the requested precision. */
12034
12035 static tree
12036 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12037 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12038 {
12039 tree result = NULL_TREE;
12040
12041 STRIP_NOPS (arg1);
12042 STRIP_NOPS (arg2);
12043
12044 /* To proceed, MPFR must exactly represent the target floating point
12045 format, which only happens when the target base equals two. */
12046 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12047 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12048 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12049 {
12050 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12051 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12052
12053 if (real_isfinite (ra1) && real_isfinite (ra2))
12054 {
12055 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12056 const int prec = fmt->p;
12057 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12058 int inexact;
12059 mpfr_t m1, m2;
12060
12061 mpfr_inits2 (prec, m1, m2, NULL);
12062 mpfr_from_real (m1, ra1, GMP_RNDN);
12063 mpfr_from_real (m2, ra2, GMP_RNDN);
12064 mpfr_clear_flags ();
12065 inexact = func (m1, m1, m2, rnd);
12066 result = do_mpfr_ckconv (m1, type, inexact);
12067 mpfr_clears (m1, m2, NULL);
12068 }
12069 }
12070
12071 return result;
12072 }
12073
12074 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12075 FUNC on it and return the resulting value as a tree with type TYPE.
12076 The mpfr precision is set to the precision of TYPE. We assume that
12077 function FUNC returns zero if the result could be calculated
12078 exactly within the requested precision. */
12079
12080 static tree
12081 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12082 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12083 {
12084 tree result = NULL_TREE;
12085
12086 STRIP_NOPS (arg1);
12087 STRIP_NOPS (arg2);
12088 STRIP_NOPS (arg3);
12089
12090 /* To proceed, MPFR must exactly represent the target floating point
12091 format, which only happens when the target base equals two. */
12092 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12093 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12094 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12095 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12096 {
12097 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12098 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12099 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12100
12101 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12102 {
12103 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12104 const int prec = fmt->p;
12105 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12106 int inexact;
12107 mpfr_t m1, m2, m3;
12108
12109 mpfr_inits2 (prec, m1, m2, m3, NULL);
12110 mpfr_from_real (m1, ra1, GMP_RNDN);
12111 mpfr_from_real (m2, ra2, GMP_RNDN);
12112 mpfr_from_real (m3, ra3, GMP_RNDN);
12113 mpfr_clear_flags ();
12114 inexact = func (m1, m1, m2, m3, rnd);
12115 result = do_mpfr_ckconv (m1, type, inexact);
12116 mpfr_clears (m1, m2, m3, NULL);
12117 }
12118 }
12119
12120 return result;
12121 }
12122
12123 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12124 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12125 If ARG_SINP and ARG_COSP are NULL then the result is returned
12126 as a complex value.
12127 The type is taken from the type of ARG and is used for setting the
12128 precision of the calculation and results. */
12129
12130 static tree
12131 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12132 {
12133 tree const type = TREE_TYPE (arg);
12134 tree result = NULL_TREE;
12135
12136 STRIP_NOPS (arg);
12137
12138 /* To proceed, MPFR must exactly represent the target floating point
12139 format, which only happens when the target base equals two. */
12140 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12141 && TREE_CODE (arg) == REAL_CST
12142 && !TREE_OVERFLOW (arg))
12143 {
12144 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12145
12146 if (real_isfinite (ra))
12147 {
12148 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12149 const int prec = fmt->p;
12150 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12151 tree result_s, result_c;
12152 int inexact;
12153 mpfr_t m, ms, mc;
12154
12155 mpfr_inits2 (prec, m, ms, mc, NULL);
12156 mpfr_from_real (m, ra, GMP_RNDN);
12157 mpfr_clear_flags ();
12158 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12159 result_s = do_mpfr_ckconv (ms, type, inexact);
12160 result_c = do_mpfr_ckconv (mc, type, inexact);
12161 mpfr_clears (m, ms, mc, NULL);
12162 if (result_s && result_c)
12163 {
12164 /* If we are to return in a complex value do so. */
12165 if (!arg_sinp && !arg_cosp)
12166 return build_complex (build_complex_type (type),
12167 result_c, result_s);
12168
12169 /* Dereference the sin/cos pointer arguments. */
12170 arg_sinp = build_fold_indirect_ref (arg_sinp);
12171 arg_cosp = build_fold_indirect_ref (arg_cosp);
12172 /* Proceed if valid pointer type were passed in. */
12173 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12174 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12175 {
12176 /* Set the values. */
12177 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12178 result_s);
12179 TREE_SIDE_EFFECTS (result_s) = 1;
12180 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12181 result_c);
12182 TREE_SIDE_EFFECTS (result_c) = 1;
12183 /* Combine the assignments into a compound expr. */
12184 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12185 result_s, result_c));
12186 }
12187 }
12188 }
12189 }
12190 return result;
12191 }
12192
12193 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12194 two-argument mpfr order N Bessel function FUNC on them and return
12195 the resulting value as a tree with type TYPE. The mpfr precision
12196 is set to the precision of TYPE. We assume that function FUNC
12197 returns zero if the result could be calculated exactly within the
12198 requested precision. */
12199 static tree
12200 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12201 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12202 const REAL_VALUE_TYPE *min, bool inclusive)
12203 {
12204 tree result = NULL_TREE;
12205
12206 STRIP_NOPS (arg1);
12207 STRIP_NOPS (arg2);
12208
12209 /* To proceed, MPFR must exactly represent the target floating point
12210 format, which only happens when the target base equals two. */
12211 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12212 && tree_fits_shwi_p (arg1)
12213 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12214 {
12215 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12216 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12217
12218 if (n == (long)n
12219 && real_isfinite (ra)
12220 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12221 {
12222 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12223 const int prec = fmt->p;
12224 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12225 int inexact;
12226 mpfr_t m;
12227
12228 mpfr_init2 (m, prec);
12229 mpfr_from_real (m, ra, GMP_RNDN);
12230 mpfr_clear_flags ();
12231 inexact = func (m, n, m, rnd);
12232 result = do_mpfr_ckconv (m, type, inexact);
12233 mpfr_clear (m);
12234 }
12235 }
12236
12237 return result;
12238 }
12239
12240 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12241 the pointer *(ARG_QUO) and return the result. The type is taken
12242 from the type of ARG0 and is used for setting the precision of the
12243 calculation and results. */
12244
12245 static tree
12246 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12247 {
12248 tree const type = TREE_TYPE (arg0);
12249 tree result = NULL_TREE;
12250
12251 STRIP_NOPS (arg0);
12252 STRIP_NOPS (arg1);
12253
12254 /* To proceed, MPFR must exactly represent the target floating point
12255 format, which only happens when the target base equals two. */
12256 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12257 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12258 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12259 {
12260 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12261 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12262
12263 if (real_isfinite (ra0) && real_isfinite (ra1))
12264 {
12265 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12266 const int prec = fmt->p;
12267 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12268 tree result_rem;
12269 long integer_quo;
12270 mpfr_t m0, m1;
12271
12272 mpfr_inits2 (prec, m0, m1, NULL);
12273 mpfr_from_real (m0, ra0, GMP_RNDN);
12274 mpfr_from_real (m1, ra1, GMP_RNDN);
12275 mpfr_clear_flags ();
12276 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12277 /* Remquo is independent of the rounding mode, so pass
12278 inexact=0 to do_mpfr_ckconv(). */
12279 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12280 mpfr_clears (m0, m1, NULL);
12281 if (result_rem)
12282 {
12283 /* MPFR calculates quo in the host's long so it may
12284 return more bits in quo than the target int can hold
12285 if sizeof(host long) > sizeof(target int). This can
12286 happen even for native compilers in LP64 mode. In
12287 these cases, modulo the quo value with the largest
12288 number that the target int can hold while leaving one
12289 bit for the sign. */
12290 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12291 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12292
12293 /* Dereference the quo pointer argument. */
12294 arg_quo = build_fold_indirect_ref (arg_quo);
12295 /* Proceed iff a valid pointer type was passed in. */
12296 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12297 {
12298 /* Set the value. */
12299 tree result_quo
12300 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12301 build_int_cst (TREE_TYPE (arg_quo),
12302 integer_quo));
12303 TREE_SIDE_EFFECTS (result_quo) = 1;
12304 /* Combine the quo assignment with the rem. */
12305 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12306 result_quo, result_rem));
12307 }
12308 }
12309 }
12310 }
12311 return result;
12312 }
12313
12314 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12315 resulting value as a tree with type TYPE. The mpfr precision is
12316 set to the precision of TYPE. We assume that this mpfr function
12317 returns zero if the result could be calculated exactly within the
12318 requested precision. In addition, the integer pointer represented
12319 by ARG_SG will be dereferenced and set to the appropriate signgam
12320 (-1,1) value. */
12321
12322 static tree
12323 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12324 {
12325 tree result = NULL_TREE;
12326
12327 STRIP_NOPS (arg);
12328
12329 /* To proceed, MPFR must exactly represent the target floating point
12330 format, which only happens when the target base equals two. Also
12331 verify ARG is a constant and that ARG_SG is an int pointer. */
12332 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12333 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12334 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12335 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12336 {
12337 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12338
12339 /* In addition to NaN and Inf, the argument cannot be zero or a
12340 negative integer. */
12341 if (real_isfinite (ra)
12342 && ra->cl != rvc_zero
12343 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12344 {
12345 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12346 const int prec = fmt->p;
12347 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12348 int inexact, sg;
12349 mpfr_t m;
12350 tree result_lg;
12351
12352 mpfr_init2 (m, prec);
12353 mpfr_from_real (m, ra, GMP_RNDN);
12354 mpfr_clear_flags ();
12355 inexact = mpfr_lgamma (m, &sg, m, rnd);
12356 result_lg = do_mpfr_ckconv (m, type, inexact);
12357 mpfr_clear (m);
12358 if (result_lg)
12359 {
12360 tree result_sg;
12361
12362 /* Dereference the arg_sg pointer argument. */
12363 arg_sg = build_fold_indirect_ref (arg_sg);
12364 /* Assign the signgam value into *arg_sg. */
12365 result_sg = fold_build2 (MODIFY_EXPR,
12366 TREE_TYPE (arg_sg), arg_sg,
12367 build_int_cst (TREE_TYPE (arg_sg), sg));
12368 TREE_SIDE_EFFECTS (result_sg) = 1;
12369 /* Combine the signgam assignment with the lgamma result. */
12370 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12371 result_sg, result_lg));
12372 }
12373 }
12374 }
12375
12376 return result;
12377 }
12378
12379 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12380 function FUNC on it and return the resulting value as a tree with
12381 type TYPE. The mpfr precision is set to the precision of TYPE. We
12382 assume that function FUNC returns zero if the result could be
12383 calculated exactly within the requested precision. */
12384
12385 static tree
12386 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12387 {
12388 tree result = NULL_TREE;
12389
12390 STRIP_NOPS (arg);
12391
12392 /* To proceed, MPFR must exactly represent the target floating point
12393 format, which only happens when the target base equals two. */
12394 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12395 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12396 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12397 {
12398 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12399 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12400
12401 if (real_isfinite (re) && real_isfinite (im))
12402 {
12403 const struct real_format *const fmt =
12404 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12405 const int prec = fmt->p;
12406 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12407 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12408 int inexact;
12409 mpc_t m;
12410
12411 mpc_init2 (m, prec);
12412 mpfr_from_real (mpc_realref (m), re, rnd);
12413 mpfr_from_real (mpc_imagref (m), im, rnd);
12414 mpfr_clear_flags ();
12415 inexact = func (m, m, crnd);
12416 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12417 mpc_clear (m);
12418 }
12419 }
12420
12421 return result;
12422 }
12423
12424 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12425 mpc function FUNC on it and return the resulting value as a tree
12426 with type TYPE. The mpfr precision is set to the precision of
12427 TYPE. We assume that function FUNC returns zero if the result
12428 could be calculated exactly within the requested precision. If
12429 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12430 in the arguments and/or results. */
12431
12432 tree
12433 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12434 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12435 {
12436 tree result = NULL_TREE;
12437
12438 STRIP_NOPS (arg0);
12439 STRIP_NOPS (arg1);
12440
12441 /* To proceed, MPFR must exactly represent the target floating point
12442 format, which only happens when the target base equals two. */
12443 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12444 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12445 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12446 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12447 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12448 {
12449 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12450 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12451 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12452 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12453
12454 if (do_nonfinite
12455 || (real_isfinite (re0) && real_isfinite (im0)
12456 && real_isfinite (re1) && real_isfinite (im1)))
12457 {
12458 const struct real_format *const fmt =
12459 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12460 const int prec = fmt->p;
12461 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12462 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12463 int inexact;
12464 mpc_t m0, m1;
12465
12466 mpc_init2 (m0, prec);
12467 mpc_init2 (m1, prec);
12468 mpfr_from_real (mpc_realref (m0), re0, rnd);
12469 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12470 mpfr_from_real (mpc_realref (m1), re1, rnd);
12471 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12472 mpfr_clear_flags ();
12473 inexact = func (m0, m0, m1, crnd);
12474 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12475 mpc_clear (m0);
12476 mpc_clear (m1);
12477 }
12478 }
12479
12480 return result;
12481 }
12482
12483 /* A wrapper function for builtin folding that prevents warnings for
12484 "statement without effect" and the like, caused by removing the
12485 call node earlier than the warning is generated. */
12486
12487 tree
12488 fold_call_stmt (gimple stmt, bool ignore)
12489 {
12490 tree ret = NULL_TREE;
12491 tree fndecl = gimple_call_fndecl (stmt);
12492 location_t loc = gimple_location (stmt);
12493 if (fndecl
12494 && TREE_CODE (fndecl) == FUNCTION_DECL
12495 && DECL_BUILT_IN (fndecl)
12496 && !gimple_call_va_arg_pack_p (stmt))
12497 {
12498 int nargs = gimple_call_num_args (stmt);
12499 tree *args = (nargs > 0
12500 ? gimple_call_arg_ptr (stmt, 0)
12501 : &error_mark_node);
12502
12503 if (avoid_folding_inline_builtin (fndecl))
12504 return NULL_TREE;
12505 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12506 {
12507 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12508 }
12509 else
12510 {
12511 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
12512 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12513 if (ret)
12514 {
12515 /* Propagate location information from original call to
12516 expansion of builtin. Otherwise things like
12517 maybe_emit_chk_warning, that operate on the expansion
12518 of a builtin, will use the wrong location information. */
12519 if (gimple_has_location (stmt))
12520 {
12521 tree realret = ret;
12522 if (TREE_CODE (ret) == NOP_EXPR)
12523 realret = TREE_OPERAND (ret, 0);
12524 if (CAN_HAVE_LOCATION_P (realret)
12525 && !EXPR_HAS_LOCATION (realret))
12526 SET_EXPR_LOCATION (realret, loc);
12527 return realret;
12528 }
12529 return ret;
12530 }
12531 }
12532 }
12533 return NULL_TREE;
12534 }
12535
12536 /* Look up the function in builtin_decl that corresponds to DECL
12537 and set ASMSPEC as its user assembler name. DECL must be a
12538 function decl that declares a builtin. */
12539
12540 void
12541 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12542 {
12543 tree builtin;
12544 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12545 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12546 && asmspec != 0);
12547
12548 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12549 set_user_assembler_name (builtin, asmspec);
12550 switch (DECL_FUNCTION_CODE (decl))
12551 {
12552 case BUILT_IN_MEMCPY:
12553 init_block_move_fn (asmspec);
12554 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12555 break;
12556 case BUILT_IN_MEMSET:
12557 init_block_clear_fn (asmspec);
12558 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12559 break;
12560 case BUILT_IN_MEMMOVE:
12561 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12562 break;
12563 case BUILT_IN_MEMCMP:
12564 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12565 break;
12566 case BUILT_IN_ABORT:
12567 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12568 break;
12569 case BUILT_IN_FFS:
12570 if (INT_TYPE_SIZE < BITS_PER_WORD)
12571 {
12572 set_user_assembler_libfunc ("ffs", asmspec);
12573 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12574 MODE_INT, 0), "ffs");
12575 }
12576 break;
12577 default:
12578 break;
12579 }
12580 }
12581
12582 /* Return true if DECL is a builtin that expands to a constant or similarly
12583 simple code. */
12584 bool
12585 is_simple_builtin (tree decl)
12586 {
12587 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12588 switch (DECL_FUNCTION_CODE (decl))
12589 {
12590 /* Builtins that expand to constants. */
12591 case BUILT_IN_CONSTANT_P:
12592 case BUILT_IN_EXPECT:
12593 case BUILT_IN_OBJECT_SIZE:
12594 case BUILT_IN_UNREACHABLE:
12595 /* Simple register moves or loads from stack. */
12596 case BUILT_IN_ASSUME_ALIGNED:
12597 case BUILT_IN_RETURN_ADDRESS:
12598 case BUILT_IN_EXTRACT_RETURN_ADDR:
12599 case BUILT_IN_FROB_RETURN_ADDR:
12600 case BUILT_IN_RETURN:
12601 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12602 case BUILT_IN_FRAME_ADDRESS:
12603 case BUILT_IN_VA_END:
12604 case BUILT_IN_STACK_SAVE:
12605 case BUILT_IN_STACK_RESTORE:
12606 /* Exception state returns or moves registers around. */
12607 case BUILT_IN_EH_FILTER:
12608 case BUILT_IN_EH_POINTER:
12609 case BUILT_IN_EH_COPY_VALUES:
12610 return true;
12611
12612 default:
12613 return false;
12614 }
12615
12616 return false;
12617 }
12618
12619 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12620 most probably expanded inline into reasonably simple code. This is a
12621 superset of is_simple_builtin. */
12622 bool
12623 is_inexpensive_builtin (tree decl)
12624 {
12625 if (!decl)
12626 return false;
12627 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12628 return true;
12629 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12630 switch (DECL_FUNCTION_CODE (decl))
12631 {
12632 case BUILT_IN_ABS:
12633 case BUILT_IN_ALLOCA:
12634 case BUILT_IN_ALLOCA_WITH_ALIGN:
12635 case BUILT_IN_BSWAP16:
12636 case BUILT_IN_BSWAP32:
12637 case BUILT_IN_BSWAP64:
12638 case BUILT_IN_CLZ:
12639 case BUILT_IN_CLZIMAX:
12640 case BUILT_IN_CLZL:
12641 case BUILT_IN_CLZLL:
12642 case BUILT_IN_CTZ:
12643 case BUILT_IN_CTZIMAX:
12644 case BUILT_IN_CTZL:
12645 case BUILT_IN_CTZLL:
12646 case BUILT_IN_FFS:
12647 case BUILT_IN_FFSIMAX:
12648 case BUILT_IN_FFSL:
12649 case BUILT_IN_FFSLL:
12650 case BUILT_IN_IMAXABS:
12651 case BUILT_IN_FINITE:
12652 case BUILT_IN_FINITEF:
12653 case BUILT_IN_FINITEL:
12654 case BUILT_IN_FINITED32:
12655 case BUILT_IN_FINITED64:
12656 case BUILT_IN_FINITED128:
12657 case BUILT_IN_FPCLASSIFY:
12658 case BUILT_IN_ISFINITE:
12659 case BUILT_IN_ISINF_SIGN:
12660 case BUILT_IN_ISINF:
12661 case BUILT_IN_ISINFF:
12662 case BUILT_IN_ISINFL:
12663 case BUILT_IN_ISINFD32:
12664 case BUILT_IN_ISINFD64:
12665 case BUILT_IN_ISINFD128:
12666 case BUILT_IN_ISNAN:
12667 case BUILT_IN_ISNANF:
12668 case BUILT_IN_ISNANL:
12669 case BUILT_IN_ISNAND32:
12670 case BUILT_IN_ISNAND64:
12671 case BUILT_IN_ISNAND128:
12672 case BUILT_IN_ISNORMAL:
12673 case BUILT_IN_ISGREATER:
12674 case BUILT_IN_ISGREATEREQUAL:
12675 case BUILT_IN_ISLESS:
12676 case BUILT_IN_ISLESSEQUAL:
12677 case BUILT_IN_ISLESSGREATER:
12678 case BUILT_IN_ISUNORDERED:
12679 case BUILT_IN_VA_ARG_PACK:
12680 case BUILT_IN_VA_ARG_PACK_LEN:
12681 case BUILT_IN_VA_COPY:
12682 case BUILT_IN_TRAP:
12683 case BUILT_IN_SAVEREGS:
12684 case BUILT_IN_POPCOUNTL:
12685 case BUILT_IN_POPCOUNTLL:
12686 case BUILT_IN_POPCOUNTIMAX:
12687 case BUILT_IN_POPCOUNT:
12688 case BUILT_IN_PARITYL:
12689 case BUILT_IN_PARITYLL:
12690 case BUILT_IN_PARITYIMAX:
12691 case BUILT_IN_PARITY:
12692 case BUILT_IN_LABS:
12693 case BUILT_IN_LLABS:
12694 case BUILT_IN_PREFETCH:
12695 return true;
12696
12697 default:
12698 return is_simple_builtin (decl);
12699 }
12700
12701 return false;
12702 }