re PR middle-end/62090 (ice in compute_may_aliases)
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "tree-object-size.h"
32 #include "realmpfr.h"
33 #include "basic-block.h"
34 #include "tree-ssa-alias.h"
35 #include "internal-fn.h"
36 #include "gimple-expr.h"
37 #include "is-a.h"
38 #include "gimple.h"
39 #include "flags.h"
40 #include "regs.h"
41 #include "hard-reg-set.h"
42 #include "except.h"
43 #include "function.h"
44 #include "insn-config.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "recog.h"
49 #include "output.h"
50 #include "typeclass.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "langhooks.h"
55 #include "tree-ssanames.h"
56 #include "tree-dfa.h"
57 #include "value-prof.h"
58 #include "diagnostic-core.h"
59 #include "builtins.h"
60 #include "ubsan.h"
61 #include "cilk.h"
62
63
64 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
65
66 struct target_builtins default_target_builtins;
67 #if SWITCHABLE_TARGET
68 struct target_builtins *this_target_builtins = &default_target_builtins;
69 #endif
70
71 /* Define the names of the builtin function types and codes. */
72 const char *const built_in_class_names[BUILT_IN_LAST]
73 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
74
75 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
76 const char * built_in_names[(int) END_BUILTINS] =
77 {
78 #include "builtins.def"
79 };
80 #undef DEF_BUILTIN
81
82 /* Setup an array of _DECL trees, make sure each element is
83 initialized to NULL_TREE. */
84 builtin_info_type builtin_info;
85
86 /* Non-zero if __builtin_constant_p should be folded right away. */
87 bool force_folding_builtin_constant_p;
88
89 static rtx c_readstr (const char *, enum machine_mode);
90 static int target_char_cast (tree, char *);
91 static rtx get_memory_rtx (tree, tree);
92 static int apply_args_size (void);
93 static int apply_result_size (void);
94 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
95 static rtx result_vector (int, rtx);
96 #endif
97 static void expand_builtin_update_setjmp_buf (rtx);
98 static void expand_builtin_prefetch (tree);
99 static rtx expand_builtin_apply_args (void);
100 static rtx expand_builtin_apply_args_1 (void);
101 static rtx expand_builtin_apply (rtx, rtx, rtx);
102 static void expand_builtin_return (rtx);
103 static enum type_class type_to_class (tree);
104 static rtx expand_builtin_classify_type (tree);
105 static void expand_errno_check (tree, rtx);
106 static rtx expand_builtin_mathfn (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
109 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
110 static rtx expand_builtin_interclass_mathfn (tree, rtx);
111 static rtx expand_builtin_sincos (tree);
112 static rtx expand_builtin_cexpi (tree, rtx);
113 static rtx expand_builtin_int_roundingfn (tree, rtx);
114 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
115 static rtx expand_builtin_next_arg (void);
116 static rtx expand_builtin_va_start (tree);
117 static rtx expand_builtin_va_end (tree);
118 static rtx expand_builtin_va_copy (tree);
119 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strcmp (tree, rtx);
121 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
122 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
123 static rtx expand_builtin_memcpy (tree, rtx);
124 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
126 enum machine_mode, int);
127 static rtx expand_builtin_strcpy (tree, rtx);
128 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
129 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx);
131 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
132 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
133 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_alloca (tree, bool);
137 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
138 static rtx expand_builtin_frame_address (tree, tree);
139 static tree stabilize_va_list_loc (location_t, tree, int);
140 static rtx expand_builtin_expect (tree, rtx);
141 static tree fold_builtin_constant_p (tree);
142 static tree fold_builtin_classify_type (tree);
143 static tree fold_builtin_strlen (location_t, tree, tree);
144 static tree fold_builtin_inf (location_t, tree, int);
145 static tree fold_builtin_nan (tree, tree, int);
146 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
147 static bool validate_arg (const_tree, enum tree_code code);
148 static bool integer_valued_real_p (tree);
149 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
150 static rtx expand_builtin_fabs (tree, rtx, rtx);
151 static rtx expand_builtin_signbit (tree, rtx);
152 static tree fold_builtin_sqrt (location_t, tree, tree);
153 static tree fold_builtin_cbrt (location_t, tree, tree);
154 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
155 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
156 static tree fold_builtin_cos (location_t, tree, tree, tree);
157 static tree fold_builtin_cosh (location_t, tree, tree, tree);
158 static tree fold_builtin_tan (tree, tree);
159 static tree fold_builtin_trunc (location_t, tree, tree);
160 static tree fold_builtin_floor (location_t, tree, tree);
161 static tree fold_builtin_ceil (location_t, tree, tree);
162 static tree fold_builtin_round (location_t, tree, tree);
163 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
164 static tree fold_builtin_bitop (tree, tree);
165 static tree fold_builtin_strchr (location_t, tree, tree, tree);
166 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
167 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
168 static tree fold_builtin_strcmp (location_t, tree, tree);
169 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
170 static tree fold_builtin_signbit (location_t, tree, tree);
171 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_isascii (location_t, tree);
173 static tree fold_builtin_toascii (location_t, tree);
174 static tree fold_builtin_isdigit (location_t, tree);
175 static tree fold_builtin_fabs (location_t, tree, tree);
176 static tree fold_builtin_abs (location_t, tree, tree);
177 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
178 enum tree_code);
179 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
180 static tree fold_builtin_0 (location_t, tree, bool);
181 static tree fold_builtin_1 (location_t, tree, tree, bool);
182 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
183 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
184 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
185 static tree fold_builtin_varargs (location_t, tree, tree, bool);
186
187 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
188 static tree fold_builtin_strstr (location_t, tree, tree, tree);
189 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
190 static tree fold_builtin_strncat (location_t, tree, tree, tree);
191 static tree fold_builtin_strspn (location_t, tree, tree);
192 static tree fold_builtin_strcspn (location_t, tree, tree);
193
194 static rtx expand_builtin_object_size (tree);
195 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
196 enum built_in_function);
197 static void maybe_emit_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
199 static void maybe_emit_free_warning (tree);
200 static tree fold_builtin_object_size (tree, tree);
201 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
202 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
203 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205 enum built_in_function);
206
207 static unsigned HOST_WIDE_INT target_newline;
208 unsigned HOST_WIDE_INT target_percent;
209 static unsigned HOST_WIDE_INT target_c;
210 static unsigned HOST_WIDE_INT target_s;
211 static char target_percent_c[3];
212 char target_percent_s[3];
213 static char target_percent_s_newline[4];
214 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
215 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
216 static tree do_mpfr_arg2 (tree, tree, tree,
217 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
218 static tree do_mpfr_arg3 (tree, tree, tree, tree,
219 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
220 static tree do_mpfr_sincos (tree, tree, tree);
221 static tree do_mpfr_bessel_n (tree, tree, tree,
222 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
223 const REAL_VALUE_TYPE *, bool);
224 static tree do_mpfr_remquo (tree, tree, tree);
225 static tree do_mpfr_lgamma_r (tree, tree, tree);
226 static void expand_builtin_sync_synchronize (void);
227
228 /* Return true if NAME starts with __builtin_ or __sync_. */
229
230 static bool
231 is_builtin_name (const char *name)
232 {
233 if (strncmp (name, "__builtin_", 10) == 0)
234 return true;
235 if (strncmp (name, "__sync_", 7) == 0)
236 return true;
237 if (strncmp (name, "__atomic_", 9) == 0)
238 return true;
239 if (flag_cilkplus
240 && (!strcmp (name, "__cilkrts_detach")
241 || !strcmp (name, "__cilkrts_pop_frame")))
242 return true;
243 return false;
244 }
245
246
247 /* Return true if DECL is a function symbol representing a built-in. */
248
249 bool
250 is_builtin_fn (tree decl)
251 {
252 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
253 }
254
255 /* Return true if NODE should be considered for inline expansion regardless
256 of the optimization level. This means whenever a function is invoked with
257 its "internal" name, which normally contains the prefix "__builtin". */
258
259 static bool
260 called_as_built_in (tree node)
261 {
262 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
263 we want the name used to call the function, not the name it
264 will have. */
265 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
266 return is_builtin_name (name);
267 }
268
269 /* Compute values M and N such that M divides (address of EXP - N) and such
270 that N < M. If these numbers can be determined, store M in alignp and N in
271 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
272 *alignp and any bit-offset to *bitposp.
273
274 Note that the address (and thus the alignment) computed here is based
275 on the address to which a symbol resolves, whereas DECL_ALIGN is based
276 on the address at which an object is actually located. These two
277 addresses are not always the same. For example, on ARM targets,
278 the address &foo of a Thumb function foo() has the lowest bit set,
279 whereas foo() itself starts on an even address.
280
281 If ADDR_P is true we are taking the address of the memory reference EXP
282 and thus cannot rely on the access taking place. */
283
284 static bool
285 get_object_alignment_2 (tree exp, unsigned int *alignp,
286 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
287 {
288 HOST_WIDE_INT bitsize, bitpos;
289 tree offset;
290 enum machine_mode mode;
291 int unsignedp, volatilep;
292 unsigned int align = BITS_PER_UNIT;
293 bool known_alignment = false;
294
295 /* Get the innermost object and the constant (bitpos) and possibly
296 variable (offset) offset of the access. */
297 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
298 &mode, &unsignedp, &volatilep, true);
299
300 /* Extract alignment information from the innermost object and
301 possibly adjust bitpos and offset. */
302 if (TREE_CODE (exp) == FUNCTION_DECL)
303 {
304 /* Function addresses can encode extra information besides their
305 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
306 allows the low bit to be used as a virtual bit, we know
307 that the address itself must be at least 2-byte aligned. */
308 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
309 align = 2 * BITS_PER_UNIT;
310 }
311 else if (TREE_CODE (exp) == LABEL_DECL)
312 ;
313 else if (TREE_CODE (exp) == CONST_DECL)
314 {
315 /* The alignment of a CONST_DECL is determined by its initializer. */
316 exp = DECL_INITIAL (exp);
317 align = TYPE_ALIGN (TREE_TYPE (exp));
318 #ifdef CONSTANT_ALIGNMENT
319 if (CONSTANT_CLASS_P (exp))
320 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
321 #endif
322 known_alignment = true;
323 }
324 else if (DECL_P (exp))
325 {
326 align = DECL_ALIGN (exp);
327 known_alignment = true;
328 }
329 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
330 {
331 align = TYPE_ALIGN (TREE_TYPE (exp));
332 }
333 else if (TREE_CODE (exp) == INDIRECT_REF
334 || TREE_CODE (exp) == MEM_REF
335 || TREE_CODE (exp) == TARGET_MEM_REF)
336 {
337 tree addr = TREE_OPERAND (exp, 0);
338 unsigned ptr_align;
339 unsigned HOST_WIDE_INT ptr_bitpos;
340
341 if (TREE_CODE (addr) == BIT_AND_EXPR
342 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
343 {
344 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
345 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
346 align *= BITS_PER_UNIT;
347 addr = TREE_OPERAND (addr, 0);
348 }
349
350 known_alignment
351 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
352 align = MAX (ptr_align, align);
353
354 /* The alignment of the pointer operand in a TARGET_MEM_REF
355 has to take the variable offset parts into account. */
356 if (TREE_CODE (exp) == TARGET_MEM_REF)
357 {
358 if (TMR_INDEX (exp))
359 {
360 unsigned HOST_WIDE_INT step = 1;
361 if (TMR_STEP (exp))
362 step = TREE_INT_CST_LOW (TMR_STEP (exp));
363 align = MIN (align, (step & -step) * BITS_PER_UNIT);
364 }
365 if (TMR_INDEX2 (exp))
366 align = BITS_PER_UNIT;
367 known_alignment = false;
368 }
369
370 /* When EXP is an actual memory reference then we can use
371 TYPE_ALIGN of a pointer indirection to derive alignment.
372 Do so only if get_pointer_alignment_1 did not reveal absolute
373 alignment knowledge and if using that alignment would
374 improve the situation. */
375 if (!addr_p && !known_alignment
376 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
377 align = TYPE_ALIGN (TREE_TYPE (exp));
378 else
379 {
380 /* Else adjust bitpos accordingly. */
381 bitpos += ptr_bitpos;
382 if (TREE_CODE (exp) == MEM_REF
383 || TREE_CODE (exp) == TARGET_MEM_REF)
384 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
385 }
386 }
387 else if (TREE_CODE (exp) == STRING_CST)
388 {
389 /* STRING_CST are the only constant objects we allow to be not
390 wrapped inside a CONST_DECL. */
391 align = TYPE_ALIGN (TREE_TYPE (exp));
392 #ifdef CONSTANT_ALIGNMENT
393 if (CONSTANT_CLASS_P (exp))
394 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
395 #endif
396 known_alignment = true;
397 }
398
399 /* If there is a non-constant offset part extract the maximum
400 alignment that can prevail. */
401 if (offset)
402 {
403 unsigned int trailing_zeros = tree_ctz (offset);
404 if (trailing_zeros < HOST_BITS_PER_INT)
405 {
406 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
407 if (inner)
408 align = MIN (align, inner);
409 }
410 }
411
412 *alignp = align;
413 *bitposp = bitpos & (*alignp - 1);
414 return known_alignment;
415 }
416
417 /* For a memory reference expression EXP compute values M and N such that M
418 divides (&EXP - N) and such that N < M. If these numbers can be determined,
419 store M in alignp and N in *BITPOSP and return true. Otherwise return false
420 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
421
422 bool
423 get_object_alignment_1 (tree exp, unsigned int *alignp,
424 unsigned HOST_WIDE_INT *bitposp)
425 {
426 return get_object_alignment_2 (exp, alignp, bitposp, false);
427 }
428
429 /* Return the alignment in bits of EXP, an object. */
430
431 unsigned int
432 get_object_alignment (tree exp)
433 {
434 unsigned HOST_WIDE_INT bitpos = 0;
435 unsigned int align;
436
437 get_object_alignment_1 (exp, &align, &bitpos);
438
439 /* align and bitpos now specify known low bits of the pointer.
440 ptr & (align - 1) == bitpos. */
441
442 if (bitpos != 0)
443 align = (bitpos & -bitpos);
444 return align;
445 }
446
447 /* For a pointer valued expression EXP compute values M and N such that M
448 divides (EXP - N) and such that N < M. If these numbers can be determined,
449 store M in alignp and N in *BITPOSP and return true. Return false if
450 the results are just a conservative approximation.
451
452 If EXP is not a pointer, false is returned too. */
453
454 bool
455 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
456 unsigned HOST_WIDE_INT *bitposp)
457 {
458 STRIP_NOPS (exp);
459
460 if (TREE_CODE (exp) == ADDR_EXPR)
461 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
462 alignp, bitposp, true);
463 else if (TREE_CODE (exp) == SSA_NAME
464 && POINTER_TYPE_P (TREE_TYPE (exp)))
465 {
466 unsigned int ptr_align, ptr_misalign;
467 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
468
469 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
470 {
471 *bitposp = ptr_misalign * BITS_PER_UNIT;
472 *alignp = ptr_align * BITS_PER_UNIT;
473 /* We cannot really tell whether this result is an approximation. */
474 return true;
475 }
476 else
477 {
478 *bitposp = 0;
479 *alignp = BITS_PER_UNIT;
480 return false;
481 }
482 }
483 else if (TREE_CODE (exp) == INTEGER_CST)
484 {
485 *alignp = BIGGEST_ALIGNMENT;
486 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
487 & (BIGGEST_ALIGNMENT - 1));
488 return true;
489 }
490
491 *bitposp = 0;
492 *alignp = BITS_PER_UNIT;
493 return false;
494 }
495
496 /* Return the alignment in bits of EXP, a pointer valued expression.
497 The alignment returned is, by default, the alignment of the thing that
498 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
499
500 Otherwise, look at the expression to see if we can do better, i.e., if the
501 expression is actually pointing at an object whose alignment is tighter. */
502
503 unsigned int
504 get_pointer_alignment (tree exp)
505 {
506 unsigned HOST_WIDE_INT bitpos = 0;
507 unsigned int align;
508
509 get_pointer_alignment_1 (exp, &align, &bitpos);
510
511 /* align and bitpos now specify known low bits of the pointer.
512 ptr & (align - 1) == bitpos. */
513
514 if (bitpos != 0)
515 align = (bitpos & -bitpos);
516
517 return align;
518 }
519
520 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
521 way, because it could contain a zero byte in the middle.
522 TREE_STRING_LENGTH is the size of the character array, not the string.
523
524 ONLY_VALUE should be nonzero if the result is not going to be emitted
525 into the instruction stream and zero if it is going to be expanded.
526 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
527 is returned, otherwise NULL, since
528 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
529 evaluate the side-effects.
530
531 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
532 accesses. Note that this implies the result is not going to be emitted
533 into the instruction stream.
534
535 The value returned is of type `ssizetype'.
536
537 Unfortunately, string_constant can't access the values of const char
538 arrays with initializers, so neither can we do so here. */
539
540 tree
541 c_strlen (tree src, int only_value)
542 {
543 tree offset_node;
544 HOST_WIDE_INT offset;
545 int max;
546 const char *ptr;
547 location_t loc;
548
549 STRIP_NOPS (src);
550 if (TREE_CODE (src) == COND_EXPR
551 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
552 {
553 tree len1, len2;
554
555 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
556 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
557 if (tree_int_cst_equal (len1, len2))
558 return len1;
559 }
560
561 if (TREE_CODE (src) == COMPOUND_EXPR
562 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
563 return c_strlen (TREE_OPERAND (src, 1), only_value);
564
565 loc = EXPR_LOC_OR_LOC (src, input_location);
566
567 src = string_constant (src, &offset_node);
568 if (src == 0)
569 return NULL_TREE;
570
571 max = TREE_STRING_LENGTH (src) - 1;
572 ptr = TREE_STRING_POINTER (src);
573
574 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
575 {
576 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
577 compute the offset to the following null if we don't know where to
578 start searching for it. */
579 int i;
580
581 for (i = 0; i < max; i++)
582 if (ptr[i] == 0)
583 return NULL_TREE;
584
585 /* We don't know the starting offset, but we do know that the string
586 has no internal zero bytes. We can assume that the offset falls
587 within the bounds of the string; otherwise, the programmer deserves
588 what he gets. Subtract the offset from the length of the string,
589 and return that. This would perhaps not be valid if we were dealing
590 with named arrays in addition to literal string constants. */
591
592 return size_diffop_loc (loc, size_int (max), offset_node);
593 }
594
595 /* We have a known offset into the string. Start searching there for
596 a null character if we can represent it as a single HOST_WIDE_INT. */
597 if (offset_node == 0)
598 offset = 0;
599 else if (! tree_fits_shwi_p (offset_node))
600 offset = -1;
601 else
602 offset = tree_to_shwi (offset_node);
603
604 /* If the offset is known to be out of bounds, warn, and call strlen at
605 runtime. */
606 if (offset < 0 || offset > max)
607 {
608 /* Suppress multiple warnings for propagated constant strings. */
609 if (only_value != 2
610 && !TREE_NO_WARNING (src))
611 {
612 warning_at (loc, 0, "offset outside bounds of constant string");
613 TREE_NO_WARNING (src) = 1;
614 }
615 return NULL_TREE;
616 }
617
618 /* Use strlen to search for the first zero byte. Since any strings
619 constructed with build_string will have nulls appended, we win even
620 if we get handed something like (char[4])"abcd".
621
622 Since OFFSET is our starting index into the string, no further
623 calculation is needed. */
624 return ssize_int (strlen (ptr + offset));
625 }
626
627 /* Return a char pointer for a C string if it is a string constant
628 or sum of string constant and integer constant. */
629
630 const char *
631 c_getstr (tree src)
632 {
633 tree offset_node;
634
635 src = string_constant (src, &offset_node);
636 if (src == 0)
637 return 0;
638
639 if (offset_node == 0)
640 return TREE_STRING_POINTER (src);
641 else if (!tree_fits_uhwi_p (offset_node)
642 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
643 return 0;
644
645 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
646 }
647
648 /* Return a constant integer corresponding to target reading
649 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
650
651 static rtx
652 c_readstr (const char *str, enum machine_mode mode)
653 {
654 HOST_WIDE_INT ch;
655 unsigned int i, j;
656 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
657
658 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
659 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
660 / HOST_BITS_PER_WIDE_INT;
661
662 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
663 for (i = 0; i < len; i++)
664 tmp[i] = 0;
665
666 ch = 1;
667 for (i = 0; i < GET_MODE_SIZE (mode); i++)
668 {
669 j = i;
670 if (WORDS_BIG_ENDIAN)
671 j = GET_MODE_SIZE (mode) - i - 1;
672 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
673 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
674 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
675 j *= BITS_PER_UNIT;
676
677 if (ch)
678 ch = (unsigned char) str[i];
679 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
680 }
681
682 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
683 return immed_wide_int_const (c, mode);
684 }
685
686 /* Cast a target constant CST to target CHAR and if that value fits into
687 host char type, return zero and put that value into variable pointed to by
688 P. */
689
690 static int
691 target_char_cast (tree cst, char *p)
692 {
693 unsigned HOST_WIDE_INT val, hostval;
694
695 if (TREE_CODE (cst) != INTEGER_CST
696 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
697 return 1;
698
699 /* Do not care if it fits or not right here. */
700 val = TREE_INT_CST_LOW (cst);
701
702 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
703 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
704
705 hostval = val;
706 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
707 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
708
709 if (val != hostval)
710 return 1;
711
712 *p = hostval;
713 return 0;
714 }
715
716 /* Similar to save_expr, but assumes that arbitrary code is not executed
717 in between the multiple evaluations. In particular, we assume that a
718 non-addressable local variable will not be modified. */
719
720 static tree
721 builtin_save_expr (tree exp)
722 {
723 if (TREE_CODE (exp) == SSA_NAME
724 || (TREE_ADDRESSABLE (exp) == 0
725 && (TREE_CODE (exp) == PARM_DECL
726 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
727 return exp;
728
729 return save_expr (exp);
730 }
731
732 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
733 times to get the address of either a higher stack frame, or a return
734 address located within it (depending on FNDECL_CODE). */
735
736 static rtx
737 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
738 {
739 int i;
740
741 #ifdef INITIAL_FRAME_ADDRESS_RTX
742 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
743 #else
744 rtx tem;
745
746 /* For a zero count with __builtin_return_address, we don't care what
747 frame address we return, because target-specific definitions will
748 override us. Therefore frame pointer elimination is OK, and using
749 the soft frame pointer is OK.
750
751 For a nonzero count, or a zero count with __builtin_frame_address,
752 we require a stable offset from the current frame pointer to the
753 previous one, so we must use the hard frame pointer, and
754 we must disable frame pointer elimination. */
755 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
756 tem = frame_pointer_rtx;
757 else
758 {
759 tem = hard_frame_pointer_rtx;
760
761 /* Tell reload not to eliminate the frame pointer. */
762 crtl->accesses_prior_frames = 1;
763 }
764 #endif
765
766 /* Some machines need special handling before we can access
767 arbitrary frames. For example, on the SPARC, we must first flush
768 all register windows to the stack. */
769 #ifdef SETUP_FRAME_ADDRESSES
770 if (count > 0)
771 SETUP_FRAME_ADDRESSES ();
772 #endif
773
774 /* On the SPARC, the return address is not in the frame, it is in a
775 register. There is no way to access it off of the current frame
776 pointer, but it can be accessed off the previous frame pointer by
777 reading the value from the register window save area. */
778 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
779 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
780 count--;
781 #endif
782
783 /* Scan back COUNT frames to the specified frame. */
784 for (i = 0; i < count; i++)
785 {
786 /* Assume the dynamic chain pointer is in the word that the
787 frame address points to, unless otherwise specified. */
788 #ifdef DYNAMIC_CHAIN_ADDRESS
789 tem = DYNAMIC_CHAIN_ADDRESS (tem);
790 #endif
791 tem = memory_address (Pmode, tem);
792 tem = gen_frame_mem (Pmode, tem);
793 tem = copy_to_reg (tem);
794 }
795
796 /* For __builtin_frame_address, return what we've got. But, on
797 the SPARC for example, we may have to add a bias. */
798 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
799 #ifdef FRAME_ADDR_RTX
800 return FRAME_ADDR_RTX (tem);
801 #else
802 return tem;
803 #endif
804
805 /* For __builtin_return_address, get the return address from that frame. */
806 #ifdef RETURN_ADDR_RTX
807 tem = RETURN_ADDR_RTX (count, tem);
808 #else
809 tem = memory_address (Pmode,
810 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
811 tem = gen_frame_mem (Pmode, tem);
812 #endif
813 return tem;
814 }
815
816 /* Alias set used for setjmp buffer. */
817 static alias_set_type setjmp_alias_set = -1;
818
819 /* Construct the leading half of a __builtin_setjmp call. Control will
820 return to RECEIVER_LABEL. This is also called directly by the SJLJ
821 exception handling code. */
822
823 void
824 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
825 {
826 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
827 rtx stack_save;
828 rtx mem;
829
830 if (setjmp_alias_set == -1)
831 setjmp_alias_set = new_alias_set ();
832
833 buf_addr = convert_memory_address (Pmode, buf_addr);
834
835 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
836
837 /* We store the frame pointer and the address of receiver_label in
838 the buffer and use the rest of it for the stack save area, which
839 is machine-dependent. */
840
841 mem = gen_rtx_MEM (Pmode, buf_addr);
842 set_mem_alias_set (mem, setjmp_alias_set);
843 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
844
845 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
846 GET_MODE_SIZE (Pmode))),
847 set_mem_alias_set (mem, setjmp_alias_set);
848
849 emit_move_insn (validize_mem (mem),
850 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
851
852 stack_save = gen_rtx_MEM (sa_mode,
853 plus_constant (Pmode, buf_addr,
854 2 * GET_MODE_SIZE (Pmode)));
855 set_mem_alias_set (stack_save, setjmp_alias_set);
856 emit_stack_save (SAVE_NONLOCAL, &stack_save);
857
858 /* If there is further processing to do, do it. */
859 #ifdef HAVE_builtin_setjmp_setup
860 if (HAVE_builtin_setjmp_setup)
861 emit_insn (gen_builtin_setjmp_setup (buf_addr));
862 #endif
863
864 /* We have a nonlocal label. */
865 cfun->has_nonlocal_label = 1;
866 }
867
868 /* Construct the trailing part of a __builtin_setjmp call. This is
869 also called directly by the SJLJ exception handling code.
870 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
871
872 void
873 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
874 {
875 rtx chain;
876
877 /* Mark the FP as used when we get here, so we have to make sure it's
878 marked as used by this function. */
879 emit_use (hard_frame_pointer_rtx);
880
881 /* Mark the static chain as clobbered here so life information
882 doesn't get messed up for it. */
883 chain = targetm.calls.static_chain (current_function_decl, true);
884 if (chain && REG_P (chain))
885 emit_clobber (chain);
886
887 /* Now put in the code to restore the frame pointer, and argument
888 pointer, if needed. */
889 #ifdef HAVE_nonlocal_goto
890 if (! HAVE_nonlocal_goto)
891 #endif
892 {
893 /* First adjust our frame pointer to its actual value. It was
894 previously set to the start of the virtual area corresponding to
895 the stacked variables when we branched here and now needs to be
896 adjusted to the actual hardware fp value.
897
898 Assignments to virtual registers are converted by
899 instantiate_virtual_regs into the corresponding assignment
900 to the underlying register (fp in this case) that makes
901 the original assignment true.
902 So the following insn will actually be decrementing fp by
903 STARTING_FRAME_OFFSET. */
904 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
905
906 /* Restoring the frame pointer also modifies the hard frame pointer.
907 Mark it used (so that the previous assignment remains live once
908 the frame pointer is eliminated) and clobbered (to represent the
909 implicit update from the assignment). */
910 emit_use (hard_frame_pointer_rtx);
911 emit_clobber (hard_frame_pointer_rtx);
912 }
913
914 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
915 if (fixed_regs[ARG_POINTER_REGNUM])
916 {
917 #ifdef ELIMINABLE_REGS
918 /* If the argument pointer can be eliminated in favor of the
919 frame pointer, we don't need to restore it. We assume here
920 that if such an elimination is present, it can always be used.
921 This is the case on all known machines; if we don't make this
922 assumption, we do unnecessary saving on many machines. */
923 size_t i;
924 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
925
926 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
927 if (elim_regs[i].from == ARG_POINTER_REGNUM
928 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
929 break;
930
931 if (i == ARRAY_SIZE (elim_regs))
932 #endif
933 {
934 /* Now restore our arg pointer from the address at which it
935 was saved in our stack frame. */
936 emit_move_insn (crtl->args.internal_arg_pointer,
937 copy_to_reg (get_arg_pointer_save_area ()));
938 }
939 }
940 #endif
941
942 #ifdef HAVE_builtin_setjmp_receiver
943 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
944 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
945 else
946 #endif
947 #ifdef HAVE_nonlocal_goto_receiver
948 if (HAVE_nonlocal_goto_receiver)
949 emit_insn (gen_nonlocal_goto_receiver ());
950 else
951 #endif
952 { /* Nothing */ }
953
954 /* We must not allow the code we just generated to be reordered by
955 scheduling. Specifically, the update of the frame pointer must
956 happen immediately, not later. */
957 emit_insn (gen_blockage ());
958 }
959
960 /* __builtin_longjmp is passed a pointer to an array of five words (not
961 all will be used on all machines). It operates similarly to the C
962 library function of the same name, but is more efficient. Much of
963 the code below is copied from the handling of non-local gotos. */
964
965 static void
966 expand_builtin_longjmp (rtx buf_addr, rtx value)
967 {
968 rtx fp, lab, stack, insn, last;
969 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
970
971 /* DRAP is needed for stack realign if longjmp is expanded to current
972 function */
973 if (SUPPORTS_STACK_ALIGNMENT)
974 crtl->need_drap = true;
975
976 if (setjmp_alias_set == -1)
977 setjmp_alias_set = new_alias_set ();
978
979 buf_addr = convert_memory_address (Pmode, buf_addr);
980
981 buf_addr = force_reg (Pmode, buf_addr);
982
983 /* We require that the user must pass a second argument of 1, because
984 that is what builtin_setjmp will return. */
985 gcc_assert (value == const1_rtx);
986
987 last = get_last_insn ();
988 #ifdef HAVE_builtin_longjmp
989 if (HAVE_builtin_longjmp)
990 emit_insn (gen_builtin_longjmp (buf_addr));
991 else
992 #endif
993 {
994 fp = gen_rtx_MEM (Pmode, buf_addr);
995 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
996 GET_MODE_SIZE (Pmode)));
997
998 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
999 2 * GET_MODE_SIZE (Pmode)));
1000 set_mem_alias_set (fp, setjmp_alias_set);
1001 set_mem_alias_set (lab, setjmp_alias_set);
1002 set_mem_alias_set (stack, setjmp_alias_set);
1003
1004 /* Pick up FP, label, and SP from the block and jump. This code is
1005 from expand_goto in stmt.c; see there for detailed comments. */
1006 #ifdef HAVE_nonlocal_goto
1007 if (HAVE_nonlocal_goto)
1008 /* We have to pass a value to the nonlocal_goto pattern that will
1009 get copied into the static_chain pointer, but it does not matter
1010 what that value is, because builtin_setjmp does not use it. */
1011 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1012 else
1013 #endif
1014 {
1015 lab = copy_to_reg (lab);
1016
1017 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1018 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1019
1020 emit_move_insn (hard_frame_pointer_rtx, fp);
1021 emit_stack_restore (SAVE_NONLOCAL, stack);
1022
1023 emit_use (hard_frame_pointer_rtx);
1024 emit_use (stack_pointer_rtx);
1025 emit_indirect_jump (lab);
1026 }
1027 }
1028
1029 /* Search backwards and mark the jump insn as a non-local goto.
1030 Note that this precludes the use of __builtin_longjmp to a
1031 __builtin_setjmp target in the same function. However, we've
1032 already cautioned the user that these functions are for
1033 internal exception handling use only. */
1034 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1035 {
1036 gcc_assert (insn != last);
1037
1038 if (JUMP_P (insn))
1039 {
1040 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1041 break;
1042 }
1043 else if (CALL_P (insn))
1044 break;
1045 }
1046 }
1047
1048 static inline bool
1049 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1050 {
1051 return (iter->i < iter->n);
1052 }
1053
1054 /* This function validates the types of a function call argument list
1055 against a specified list of tree_codes. If the last specifier is a 0,
1056 that represents an ellipses, otherwise the last specifier must be a
1057 VOID_TYPE. */
1058
1059 static bool
1060 validate_arglist (const_tree callexpr, ...)
1061 {
1062 enum tree_code code;
1063 bool res = 0;
1064 va_list ap;
1065 const_call_expr_arg_iterator iter;
1066 const_tree arg;
1067
1068 va_start (ap, callexpr);
1069 init_const_call_expr_arg_iterator (callexpr, &iter);
1070
1071 do
1072 {
1073 code = (enum tree_code) va_arg (ap, int);
1074 switch (code)
1075 {
1076 case 0:
1077 /* This signifies an ellipses, any further arguments are all ok. */
1078 res = true;
1079 goto end;
1080 case VOID_TYPE:
1081 /* This signifies an endlink, if no arguments remain, return
1082 true, otherwise return false. */
1083 res = !more_const_call_expr_args_p (&iter);
1084 goto end;
1085 default:
1086 /* If no parameters remain or the parameter's code does not
1087 match the specified code, return false. Otherwise continue
1088 checking any remaining arguments. */
1089 arg = next_const_call_expr_arg (&iter);
1090 if (!validate_arg (arg, code))
1091 goto end;
1092 break;
1093 }
1094 }
1095 while (1);
1096
1097 /* We need gotos here since we can only have one VA_CLOSE in a
1098 function. */
1099 end: ;
1100 va_end (ap);
1101
1102 return res;
1103 }
1104
1105 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1106 and the address of the save area. */
1107
1108 static rtx
1109 expand_builtin_nonlocal_goto (tree exp)
1110 {
1111 tree t_label, t_save_area;
1112 rtx r_label, r_save_area, r_fp, r_sp, insn;
1113
1114 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1115 return NULL_RTX;
1116
1117 t_label = CALL_EXPR_ARG (exp, 0);
1118 t_save_area = CALL_EXPR_ARG (exp, 1);
1119
1120 r_label = expand_normal (t_label);
1121 r_label = convert_memory_address (Pmode, r_label);
1122 r_save_area = expand_normal (t_save_area);
1123 r_save_area = convert_memory_address (Pmode, r_save_area);
1124 /* Copy the address of the save location to a register just in case it was
1125 based on the frame pointer. */
1126 r_save_area = copy_to_reg (r_save_area);
1127 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1128 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1129 plus_constant (Pmode, r_save_area,
1130 GET_MODE_SIZE (Pmode)));
1131
1132 crtl->has_nonlocal_goto = 1;
1133
1134 #ifdef HAVE_nonlocal_goto
1135 /* ??? We no longer need to pass the static chain value, afaik. */
1136 if (HAVE_nonlocal_goto)
1137 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1138 else
1139 #endif
1140 {
1141 r_label = copy_to_reg (r_label);
1142
1143 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1144 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1145
1146 /* Restore frame pointer for containing function. */
1147 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1148 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1149
1150 /* USE of hard_frame_pointer_rtx added for consistency;
1151 not clear if really needed. */
1152 emit_use (hard_frame_pointer_rtx);
1153 emit_use (stack_pointer_rtx);
1154
1155 /* If the architecture is using a GP register, we must
1156 conservatively assume that the target function makes use of it.
1157 The prologue of functions with nonlocal gotos must therefore
1158 initialize the GP register to the appropriate value, and we
1159 must then make sure that this value is live at the point
1160 of the jump. (Note that this doesn't necessarily apply
1161 to targets with a nonlocal_goto pattern; they are free
1162 to implement it in their own way. Note also that this is
1163 a no-op if the GP register is a global invariant.) */
1164 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1165 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1166 emit_use (pic_offset_table_rtx);
1167
1168 emit_indirect_jump (r_label);
1169 }
1170
1171 /* Search backwards to the jump insn and mark it as a
1172 non-local goto. */
1173 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1174 {
1175 if (JUMP_P (insn))
1176 {
1177 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1178 break;
1179 }
1180 else if (CALL_P (insn))
1181 break;
1182 }
1183
1184 return const0_rtx;
1185 }
1186
1187 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1188 (not all will be used on all machines) that was passed to __builtin_setjmp.
1189 It updates the stack pointer in that block to correspond to the current
1190 stack pointer. */
1191
1192 static void
1193 expand_builtin_update_setjmp_buf (rtx buf_addr)
1194 {
1195 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1196 rtx stack_save
1197 = gen_rtx_MEM (sa_mode,
1198 memory_address
1199 (sa_mode,
1200 plus_constant (Pmode, buf_addr,
1201 2 * GET_MODE_SIZE (Pmode))));
1202
1203 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1204 }
1205
1206 /* Expand a call to __builtin_prefetch. For a target that does not support
1207 data prefetch, evaluate the memory address argument in case it has side
1208 effects. */
1209
1210 static void
1211 expand_builtin_prefetch (tree exp)
1212 {
1213 tree arg0, arg1, arg2;
1214 int nargs;
1215 rtx op0, op1, op2;
1216
1217 if (!validate_arglist (exp, POINTER_TYPE, 0))
1218 return;
1219
1220 arg0 = CALL_EXPR_ARG (exp, 0);
1221
1222 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1223 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1224 locality). */
1225 nargs = call_expr_nargs (exp);
1226 if (nargs > 1)
1227 arg1 = CALL_EXPR_ARG (exp, 1);
1228 else
1229 arg1 = integer_zero_node;
1230 if (nargs > 2)
1231 arg2 = CALL_EXPR_ARG (exp, 2);
1232 else
1233 arg2 = integer_three_node;
1234
1235 /* Argument 0 is an address. */
1236 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1237
1238 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1239 if (TREE_CODE (arg1) != INTEGER_CST)
1240 {
1241 error ("second argument to %<__builtin_prefetch%> must be a constant");
1242 arg1 = integer_zero_node;
1243 }
1244 op1 = expand_normal (arg1);
1245 /* Argument 1 must be either zero or one. */
1246 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1247 {
1248 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1249 " using zero");
1250 op1 = const0_rtx;
1251 }
1252
1253 /* Argument 2 (locality) must be a compile-time constant int. */
1254 if (TREE_CODE (arg2) != INTEGER_CST)
1255 {
1256 error ("third argument to %<__builtin_prefetch%> must be a constant");
1257 arg2 = integer_zero_node;
1258 }
1259 op2 = expand_normal (arg2);
1260 /* Argument 2 must be 0, 1, 2, or 3. */
1261 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1262 {
1263 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1264 op2 = const0_rtx;
1265 }
1266
1267 #ifdef HAVE_prefetch
1268 if (HAVE_prefetch)
1269 {
1270 struct expand_operand ops[3];
1271
1272 create_address_operand (&ops[0], op0);
1273 create_integer_operand (&ops[1], INTVAL (op1));
1274 create_integer_operand (&ops[2], INTVAL (op2));
1275 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1276 return;
1277 }
1278 #endif
1279
1280 /* Don't do anything with direct references to volatile memory, but
1281 generate code to handle other side effects. */
1282 if (!MEM_P (op0) && side_effects_p (op0))
1283 emit_insn (op0);
1284 }
1285
1286 /* Get a MEM rtx for expression EXP which is the address of an operand
1287 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1288 the maximum length of the block of memory that might be accessed or
1289 NULL if unknown. */
1290
1291 static rtx
1292 get_memory_rtx (tree exp, tree len)
1293 {
1294 tree orig_exp = exp;
1295 rtx addr, mem;
1296
1297 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1298 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1299 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1300 exp = TREE_OPERAND (exp, 0);
1301
1302 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1303 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1304
1305 /* Get an expression we can use to find the attributes to assign to MEM.
1306 First remove any nops. */
1307 while (CONVERT_EXPR_P (exp)
1308 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1309 exp = TREE_OPERAND (exp, 0);
1310
1311 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1312 (as builtin stringops may alias with anything). */
1313 exp = fold_build2 (MEM_REF,
1314 build_array_type (char_type_node,
1315 build_range_type (sizetype,
1316 size_one_node, len)),
1317 exp, build_int_cst (ptr_type_node, 0));
1318
1319 /* If the MEM_REF has no acceptable address, try to get the base object
1320 from the original address we got, and build an all-aliasing
1321 unknown-sized access to that one. */
1322 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1323 set_mem_attributes (mem, exp, 0);
1324 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1325 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1326 0))))
1327 {
1328 exp = build_fold_addr_expr (exp);
1329 exp = fold_build2 (MEM_REF,
1330 build_array_type (char_type_node,
1331 build_range_type (sizetype,
1332 size_zero_node,
1333 NULL)),
1334 exp, build_int_cst (ptr_type_node, 0));
1335 set_mem_attributes (mem, exp, 0);
1336 }
1337 set_mem_alias_set (mem, 0);
1338 return mem;
1339 }
1340 \f
1341 /* Built-in functions to perform an untyped call and return. */
1342
1343 #define apply_args_mode \
1344 (this_target_builtins->x_apply_args_mode)
1345 #define apply_result_mode \
1346 (this_target_builtins->x_apply_result_mode)
1347
1348 /* Return the size required for the block returned by __builtin_apply_args,
1349 and initialize apply_args_mode. */
1350
1351 static int
1352 apply_args_size (void)
1353 {
1354 static int size = -1;
1355 int align;
1356 unsigned int regno;
1357 enum machine_mode mode;
1358
1359 /* The values computed by this function never change. */
1360 if (size < 0)
1361 {
1362 /* The first value is the incoming arg-pointer. */
1363 size = GET_MODE_SIZE (Pmode);
1364
1365 /* The second value is the structure value address unless this is
1366 passed as an "invisible" first argument. */
1367 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1368 size += GET_MODE_SIZE (Pmode);
1369
1370 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1371 if (FUNCTION_ARG_REGNO_P (regno))
1372 {
1373 mode = targetm.calls.get_raw_arg_mode (regno);
1374
1375 gcc_assert (mode != VOIDmode);
1376
1377 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1378 if (size % align != 0)
1379 size = CEIL (size, align) * align;
1380 size += GET_MODE_SIZE (mode);
1381 apply_args_mode[regno] = mode;
1382 }
1383 else
1384 {
1385 apply_args_mode[regno] = VOIDmode;
1386 }
1387 }
1388 return size;
1389 }
1390
1391 /* Return the size required for the block returned by __builtin_apply,
1392 and initialize apply_result_mode. */
1393
1394 static int
1395 apply_result_size (void)
1396 {
1397 static int size = -1;
1398 int align, regno;
1399 enum machine_mode mode;
1400
1401 /* The values computed by this function never change. */
1402 if (size < 0)
1403 {
1404 size = 0;
1405
1406 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1407 if (targetm.calls.function_value_regno_p (regno))
1408 {
1409 mode = targetm.calls.get_raw_result_mode (regno);
1410
1411 gcc_assert (mode != VOIDmode);
1412
1413 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1414 if (size % align != 0)
1415 size = CEIL (size, align) * align;
1416 size += GET_MODE_SIZE (mode);
1417 apply_result_mode[regno] = mode;
1418 }
1419 else
1420 apply_result_mode[regno] = VOIDmode;
1421
1422 /* Allow targets that use untyped_call and untyped_return to override
1423 the size so that machine-specific information can be stored here. */
1424 #ifdef APPLY_RESULT_SIZE
1425 size = APPLY_RESULT_SIZE;
1426 #endif
1427 }
1428 return size;
1429 }
1430
1431 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1432 /* Create a vector describing the result block RESULT. If SAVEP is true,
1433 the result block is used to save the values; otherwise it is used to
1434 restore the values. */
1435
1436 static rtx
1437 result_vector (int savep, rtx result)
1438 {
1439 int regno, size, align, nelts;
1440 enum machine_mode mode;
1441 rtx reg, mem;
1442 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1443
1444 size = nelts = 0;
1445 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1446 if ((mode = apply_result_mode[regno]) != VOIDmode)
1447 {
1448 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1449 if (size % align != 0)
1450 size = CEIL (size, align) * align;
1451 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1452 mem = adjust_address (result, mode, size);
1453 savevec[nelts++] = (savep
1454 ? gen_rtx_SET (VOIDmode, mem, reg)
1455 : gen_rtx_SET (VOIDmode, reg, mem));
1456 size += GET_MODE_SIZE (mode);
1457 }
1458 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1459 }
1460 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1461
1462 /* Save the state required to perform an untyped call with the same
1463 arguments as were passed to the current function. */
1464
1465 static rtx
1466 expand_builtin_apply_args_1 (void)
1467 {
1468 rtx registers, tem;
1469 int size, align, regno;
1470 enum machine_mode mode;
1471 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1472
1473 /* Create a block where the arg-pointer, structure value address,
1474 and argument registers can be saved. */
1475 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1476
1477 /* Walk past the arg-pointer and structure value address. */
1478 size = GET_MODE_SIZE (Pmode);
1479 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1480 size += GET_MODE_SIZE (Pmode);
1481
1482 /* Save each register used in calling a function to the block. */
1483 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1484 if ((mode = apply_args_mode[regno]) != VOIDmode)
1485 {
1486 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1487 if (size % align != 0)
1488 size = CEIL (size, align) * align;
1489
1490 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1491
1492 emit_move_insn (adjust_address (registers, mode, size), tem);
1493 size += GET_MODE_SIZE (mode);
1494 }
1495
1496 /* Save the arg pointer to the block. */
1497 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1498 #ifdef STACK_GROWS_DOWNWARD
1499 /* We need the pointer as the caller actually passed them to us, not
1500 as we might have pretended they were passed. Make sure it's a valid
1501 operand, as emit_move_insn isn't expected to handle a PLUS. */
1502 tem
1503 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1504 NULL_RTX);
1505 #endif
1506 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1507
1508 size = GET_MODE_SIZE (Pmode);
1509
1510 /* Save the structure value address unless this is passed as an
1511 "invisible" first argument. */
1512 if (struct_incoming_value)
1513 {
1514 emit_move_insn (adjust_address (registers, Pmode, size),
1515 copy_to_reg (struct_incoming_value));
1516 size += GET_MODE_SIZE (Pmode);
1517 }
1518
1519 /* Return the address of the block. */
1520 return copy_addr_to_reg (XEXP (registers, 0));
1521 }
1522
1523 /* __builtin_apply_args returns block of memory allocated on
1524 the stack into which is stored the arg pointer, structure
1525 value address, static chain, and all the registers that might
1526 possibly be used in performing a function call. The code is
1527 moved to the start of the function so the incoming values are
1528 saved. */
1529
1530 static rtx
1531 expand_builtin_apply_args (void)
1532 {
1533 /* Don't do __builtin_apply_args more than once in a function.
1534 Save the result of the first call and reuse it. */
1535 if (apply_args_value != 0)
1536 return apply_args_value;
1537 {
1538 /* When this function is called, it means that registers must be
1539 saved on entry to this function. So we migrate the
1540 call to the first insn of this function. */
1541 rtx temp;
1542 rtx seq;
1543
1544 start_sequence ();
1545 temp = expand_builtin_apply_args_1 ();
1546 seq = get_insns ();
1547 end_sequence ();
1548
1549 apply_args_value = temp;
1550
1551 /* Put the insns after the NOTE that starts the function.
1552 If this is inside a start_sequence, make the outer-level insn
1553 chain current, so the code is placed at the start of the
1554 function. If internal_arg_pointer is a non-virtual pseudo,
1555 it needs to be placed after the function that initializes
1556 that pseudo. */
1557 push_topmost_sequence ();
1558 if (REG_P (crtl->args.internal_arg_pointer)
1559 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1560 emit_insn_before (seq, parm_birth_insn);
1561 else
1562 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1563 pop_topmost_sequence ();
1564 return temp;
1565 }
1566 }
1567
1568 /* Perform an untyped call and save the state required to perform an
1569 untyped return of whatever value was returned by the given function. */
1570
1571 static rtx
1572 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1573 {
1574 int size, align, regno;
1575 enum machine_mode mode;
1576 rtx incoming_args, result, reg, dest, src, call_insn;
1577 rtx old_stack_level = 0;
1578 rtx call_fusage = 0;
1579 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1580
1581 arguments = convert_memory_address (Pmode, arguments);
1582
1583 /* Create a block where the return registers can be saved. */
1584 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1585
1586 /* Fetch the arg pointer from the ARGUMENTS block. */
1587 incoming_args = gen_reg_rtx (Pmode);
1588 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1589 #ifndef STACK_GROWS_DOWNWARD
1590 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1591 incoming_args, 0, OPTAB_LIB_WIDEN);
1592 #endif
1593
1594 /* Push a new argument block and copy the arguments. Do not allow
1595 the (potential) memcpy call below to interfere with our stack
1596 manipulations. */
1597 do_pending_stack_adjust ();
1598 NO_DEFER_POP;
1599
1600 /* Save the stack with nonlocal if available. */
1601 #ifdef HAVE_save_stack_nonlocal
1602 if (HAVE_save_stack_nonlocal)
1603 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1604 else
1605 #endif
1606 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1607
1608 /* Allocate a block of memory onto the stack and copy the memory
1609 arguments to the outgoing arguments address. We can pass TRUE
1610 as the 4th argument because we just saved the stack pointer
1611 and will restore it right after the call. */
1612 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1613
1614 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1615 may have already set current_function_calls_alloca to true.
1616 current_function_calls_alloca won't be set if argsize is zero,
1617 so we have to guarantee need_drap is true here. */
1618 if (SUPPORTS_STACK_ALIGNMENT)
1619 crtl->need_drap = true;
1620
1621 dest = virtual_outgoing_args_rtx;
1622 #ifndef STACK_GROWS_DOWNWARD
1623 if (CONST_INT_P (argsize))
1624 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1625 else
1626 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1627 #endif
1628 dest = gen_rtx_MEM (BLKmode, dest);
1629 set_mem_align (dest, PARM_BOUNDARY);
1630 src = gen_rtx_MEM (BLKmode, incoming_args);
1631 set_mem_align (src, PARM_BOUNDARY);
1632 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1633
1634 /* Refer to the argument block. */
1635 apply_args_size ();
1636 arguments = gen_rtx_MEM (BLKmode, arguments);
1637 set_mem_align (arguments, PARM_BOUNDARY);
1638
1639 /* Walk past the arg-pointer and structure value address. */
1640 size = GET_MODE_SIZE (Pmode);
1641 if (struct_value)
1642 size += GET_MODE_SIZE (Pmode);
1643
1644 /* Restore each of the registers previously saved. Make USE insns
1645 for each of these registers for use in making the call. */
1646 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1647 if ((mode = apply_args_mode[regno]) != VOIDmode)
1648 {
1649 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1650 if (size % align != 0)
1651 size = CEIL (size, align) * align;
1652 reg = gen_rtx_REG (mode, regno);
1653 emit_move_insn (reg, adjust_address (arguments, mode, size));
1654 use_reg (&call_fusage, reg);
1655 size += GET_MODE_SIZE (mode);
1656 }
1657
1658 /* Restore the structure value address unless this is passed as an
1659 "invisible" first argument. */
1660 size = GET_MODE_SIZE (Pmode);
1661 if (struct_value)
1662 {
1663 rtx value = gen_reg_rtx (Pmode);
1664 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1665 emit_move_insn (struct_value, value);
1666 if (REG_P (struct_value))
1667 use_reg (&call_fusage, struct_value);
1668 size += GET_MODE_SIZE (Pmode);
1669 }
1670
1671 /* All arguments and registers used for the call are set up by now! */
1672 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1673
1674 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1675 and we don't want to load it into a register as an optimization,
1676 because prepare_call_address already did it if it should be done. */
1677 if (GET_CODE (function) != SYMBOL_REF)
1678 function = memory_address (FUNCTION_MODE, function);
1679
1680 /* Generate the actual call instruction and save the return value. */
1681 #ifdef HAVE_untyped_call
1682 if (HAVE_untyped_call)
1683 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1684 result, result_vector (1, result)));
1685 else
1686 #endif
1687 #ifdef HAVE_call_value
1688 if (HAVE_call_value)
1689 {
1690 rtx valreg = 0;
1691
1692 /* Locate the unique return register. It is not possible to
1693 express a call that sets more than one return register using
1694 call_value; use untyped_call for that. In fact, untyped_call
1695 only needs to save the return registers in the given block. */
1696 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1697 if ((mode = apply_result_mode[regno]) != VOIDmode)
1698 {
1699 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1700
1701 valreg = gen_rtx_REG (mode, regno);
1702 }
1703
1704 emit_call_insn (GEN_CALL_VALUE (valreg,
1705 gen_rtx_MEM (FUNCTION_MODE, function),
1706 const0_rtx, NULL_RTX, const0_rtx));
1707
1708 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1709 }
1710 else
1711 #endif
1712 gcc_unreachable ();
1713
1714 /* Find the CALL insn we just emitted, and attach the register usage
1715 information. */
1716 call_insn = last_call_insn ();
1717 add_function_usage_to (call_insn, call_fusage);
1718
1719 /* Restore the stack. */
1720 #ifdef HAVE_save_stack_nonlocal
1721 if (HAVE_save_stack_nonlocal)
1722 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1723 else
1724 #endif
1725 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1726 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1727
1728 OK_DEFER_POP;
1729
1730 /* Return the address of the result block. */
1731 result = copy_addr_to_reg (XEXP (result, 0));
1732 return convert_memory_address (ptr_mode, result);
1733 }
1734
1735 /* Perform an untyped return. */
1736
1737 static void
1738 expand_builtin_return (rtx result)
1739 {
1740 int size, align, regno;
1741 enum machine_mode mode;
1742 rtx reg;
1743 rtx call_fusage = 0;
1744
1745 result = convert_memory_address (Pmode, result);
1746
1747 apply_result_size ();
1748 result = gen_rtx_MEM (BLKmode, result);
1749
1750 #ifdef HAVE_untyped_return
1751 if (HAVE_untyped_return)
1752 {
1753 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1754 emit_barrier ();
1755 return;
1756 }
1757 #endif
1758
1759 /* Restore the return value and note that each value is used. */
1760 size = 0;
1761 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1762 if ((mode = apply_result_mode[regno]) != VOIDmode)
1763 {
1764 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1765 if (size % align != 0)
1766 size = CEIL (size, align) * align;
1767 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1768 emit_move_insn (reg, adjust_address (result, mode, size));
1769
1770 push_to_sequence (call_fusage);
1771 emit_use (reg);
1772 call_fusage = get_insns ();
1773 end_sequence ();
1774 size += GET_MODE_SIZE (mode);
1775 }
1776
1777 /* Put the USE insns before the return. */
1778 emit_insn (call_fusage);
1779
1780 /* Return whatever values was restored by jumping directly to the end
1781 of the function. */
1782 expand_naked_return ();
1783 }
1784
1785 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1786
1787 static enum type_class
1788 type_to_class (tree type)
1789 {
1790 switch (TREE_CODE (type))
1791 {
1792 case VOID_TYPE: return void_type_class;
1793 case INTEGER_TYPE: return integer_type_class;
1794 case ENUMERAL_TYPE: return enumeral_type_class;
1795 case BOOLEAN_TYPE: return boolean_type_class;
1796 case POINTER_TYPE: return pointer_type_class;
1797 case REFERENCE_TYPE: return reference_type_class;
1798 case OFFSET_TYPE: return offset_type_class;
1799 case REAL_TYPE: return real_type_class;
1800 case COMPLEX_TYPE: return complex_type_class;
1801 case FUNCTION_TYPE: return function_type_class;
1802 case METHOD_TYPE: return method_type_class;
1803 case RECORD_TYPE: return record_type_class;
1804 case UNION_TYPE:
1805 case QUAL_UNION_TYPE: return union_type_class;
1806 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1807 ? string_type_class : array_type_class);
1808 case LANG_TYPE: return lang_type_class;
1809 default: return no_type_class;
1810 }
1811 }
1812
1813 /* Expand a call EXP to __builtin_classify_type. */
1814
1815 static rtx
1816 expand_builtin_classify_type (tree exp)
1817 {
1818 if (call_expr_nargs (exp))
1819 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1820 return GEN_INT (no_type_class);
1821 }
1822
1823 /* This helper macro, meant to be used in mathfn_built_in below,
1824 determines which among a set of three builtin math functions is
1825 appropriate for a given type mode. The `F' and `L' cases are
1826 automatically generated from the `double' case. */
1827 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1828 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1829 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1830 fcodel = BUILT_IN_MATHFN##L ; break;
1831 /* Similar to above, but appends _R after any F/L suffix. */
1832 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1833 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1834 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1835 fcodel = BUILT_IN_MATHFN##L_R ; break;
1836
1837 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1838 if available. If IMPLICIT is true use the implicit builtin declaration,
1839 otherwise use the explicit declaration. If we can't do the conversion,
1840 return zero. */
1841
1842 static tree
1843 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1844 {
1845 enum built_in_function fcode, fcodef, fcodel, fcode2;
1846
1847 switch (fn)
1848 {
1849 CASE_MATHFN (BUILT_IN_ACOS)
1850 CASE_MATHFN (BUILT_IN_ACOSH)
1851 CASE_MATHFN (BUILT_IN_ASIN)
1852 CASE_MATHFN (BUILT_IN_ASINH)
1853 CASE_MATHFN (BUILT_IN_ATAN)
1854 CASE_MATHFN (BUILT_IN_ATAN2)
1855 CASE_MATHFN (BUILT_IN_ATANH)
1856 CASE_MATHFN (BUILT_IN_CBRT)
1857 CASE_MATHFN (BUILT_IN_CEIL)
1858 CASE_MATHFN (BUILT_IN_CEXPI)
1859 CASE_MATHFN (BUILT_IN_COPYSIGN)
1860 CASE_MATHFN (BUILT_IN_COS)
1861 CASE_MATHFN (BUILT_IN_COSH)
1862 CASE_MATHFN (BUILT_IN_DREM)
1863 CASE_MATHFN (BUILT_IN_ERF)
1864 CASE_MATHFN (BUILT_IN_ERFC)
1865 CASE_MATHFN (BUILT_IN_EXP)
1866 CASE_MATHFN (BUILT_IN_EXP10)
1867 CASE_MATHFN (BUILT_IN_EXP2)
1868 CASE_MATHFN (BUILT_IN_EXPM1)
1869 CASE_MATHFN (BUILT_IN_FABS)
1870 CASE_MATHFN (BUILT_IN_FDIM)
1871 CASE_MATHFN (BUILT_IN_FLOOR)
1872 CASE_MATHFN (BUILT_IN_FMA)
1873 CASE_MATHFN (BUILT_IN_FMAX)
1874 CASE_MATHFN (BUILT_IN_FMIN)
1875 CASE_MATHFN (BUILT_IN_FMOD)
1876 CASE_MATHFN (BUILT_IN_FREXP)
1877 CASE_MATHFN (BUILT_IN_GAMMA)
1878 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1879 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1880 CASE_MATHFN (BUILT_IN_HYPOT)
1881 CASE_MATHFN (BUILT_IN_ILOGB)
1882 CASE_MATHFN (BUILT_IN_ICEIL)
1883 CASE_MATHFN (BUILT_IN_IFLOOR)
1884 CASE_MATHFN (BUILT_IN_INF)
1885 CASE_MATHFN (BUILT_IN_IRINT)
1886 CASE_MATHFN (BUILT_IN_IROUND)
1887 CASE_MATHFN (BUILT_IN_ISINF)
1888 CASE_MATHFN (BUILT_IN_J0)
1889 CASE_MATHFN (BUILT_IN_J1)
1890 CASE_MATHFN (BUILT_IN_JN)
1891 CASE_MATHFN (BUILT_IN_LCEIL)
1892 CASE_MATHFN (BUILT_IN_LDEXP)
1893 CASE_MATHFN (BUILT_IN_LFLOOR)
1894 CASE_MATHFN (BUILT_IN_LGAMMA)
1895 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1896 CASE_MATHFN (BUILT_IN_LLCEIL)
1897 CASE_MATHFN (BUILT_IN_LLFLOOR)
1898 CASE_MATHFN (BUILT_IN_LLRINT)
1899 CASE_MATHFN (BUILT_IN_LLROUND)
1900 CASE_MATHFN (BUILT_IN_LOG)
1901 CASE_MATHFN (BUILT_IN_LOG10)
1902 CASE_MATHFN (BUILT_IN_LOG1P)
1903 CASE_MATHFN (BUILT_IN_LOG2)
1904 CASE_MATHFN (BUILT_IN_LOGB)
1905 CASE_MATHFN (BUILT_IN_LRINT)
1906 CASE_MATHFN (BUILT_IN_LROUND)
1907 CASE_MATHFN (BUILT_IN_MODF)
1908 CASE_MATHFN (BUILT_IN_NAN)
1909 CASE_MATHFN (BUILT_IN_NANS)
1910 CASE_MATHFN (BUILT_IN_NEARBYINT)
1911 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1912 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1913 CASE_MATHFN (BUILT_IN_POW)
1914 CASE_MATHFN (BUILT_IN_POWI)
1915 CASE_MATHFN (BUILT_IN_POW10)
1916 CASE_MATHFN (BUILT_IN_REMAINDER)
1917 CASE_MATHFN (BUILT_IN_REMQUO)
1918 CASE_MATHFN (BUILT_IN_RINT)
1919 CASE_MATHFN (BUILT_IN_ROUND)
1920 CASE_MATHFN (BUILT_IN_SCALB)
1921 CASE_MATHFN (BUILT_IN_SCALBLN)
1922 CASE_MATHFN (BUILT_IN_SCALBN)
1923 CASE_MATHFN (BUILT_IN_SIGNBIT)
1924 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1925 CASE_MATHFN (BUILT_IN_SIN)
1926 CASE_MATHFN (BUILT_IN_SINCOS)
1927 CASE_MATHFN (BUILT_IN_SINH)
1928 CASE_MATHFN (BUILT_IN_SQRT)
1929 CASE_MATHFN (BUILT_IN_TAN)
1930 CASE_MATHFN (BUILT_IN_TANH)
1931 CASE_MATHFN (BUILT_IN_TGAMMA)
1932 CASE_MATHFN (BUILT_IN_TRUNC)
1933 CASE_MATHFN (BUILT_IN_Y0)
1934 CASE_MATHFN (BUILT_IN_Y1)
1935 CASE_MATHFN (BUILT_IN_YN)
1936
1937 default:
1938 return NULL_TREE;
1939 }
1940
1941 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1942 fcode2 = fcode;
1943 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1944 fcode2 = fcodef;
1945 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1946 fcode2 = fcodel;
1947 else
1948 return NULL_TREE;
1949
1950 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1951 return NULL_TREE;
1952
1953 return builtin_decl_explicit (fcode2);
1954 }
1955
1956 /* Like mathfn_built_in_1(), but always use the implicit array. */
1957
1958 tree
1959 mathfn_built_in (tree type, enum built_in_function fn)
1960 {
1961 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1962 }
1963
1964 /* If errno must be maintained, expand the RTL to check if the result,
1965 TARGET, of a built-in function call, EXP, is NaN, and if so set
1966 errno to EDOM. */
1967
1968 static void
1969 expand_errno_check (tree exp, rtx target)
1970 {
1971 rtx lab = gen_label_rtx ();
1972
1973 /* Test the result; if it is NaN, set errno=EDOM because
1974 the argument was not in the domain. */
1975 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1976 NULL_RTX, NULL_RTX, lab,
1977 /* The jump is very likely. */
1978 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1979
1980 #ifdef TARGET_EDOM
1981 /* If this built-in doesn't throw an exception, set errno directly. */
1982 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1983 {
1984 #ifdef GEN_ERRNO_RTX
1985 rtx errno_rtx = GEN_ERRNO_RTX;
1986 #else
1987 rtx errno_rtx
1988 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1989 #endif
1990 emit_move_insn (errno_rtx,
1991 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1992 emit_label (lab);
1993 return;
1994 }
1995 #endif
1996
1997 /* Make sure the library call isn't expanded as a tail call. */
1998 CALL_EXPR_TAILCALL (exp) = 0;
1999
2000 /* We can't set errno=EDOM directly; let the library call do it.
2001 Pop the arguments right away in case the call gets deleted. */
2002 NO_DEFER_POP;
2003 expand_call (exp, target, 0);
2004 OK_DEFER_POP;
2005 emit_label (lab);
2006 }
2007
2008 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2009 Return NULL_RTX if a normal call should be emitted rather than expanding
2010 the function in-line. EXP is the expression that is a call to the builtin
2011 function; if convenient, the result should be placed in TARGET.
2012 SUBTARGET may be used as the target for computing one of EXP's operands. */
2013
2014 static rtx
2015 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2016 {
2017 optab builtin_optab;
2018 rtx op0, insns;
2019 tree fndecl = get_callee_fndecl (exp);
2020 enum machine_mode mode;
2021 bool errno_set = false;
2022 bool try_widening = false;
2023 tree arg;
2024
2025 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2026 return NULL_RTX;
2027
2028 arg = CALL_EXPR_ARG (exp, 0);
2029
2030 switch (DECL_FUNCTION_CODE (fndecl))
2031 {
2032 CASE_FLT_FN (BUILT_IN_SQRT):
2033 errno_set = ! tree_expr_nonnegative_p (arg);
2034 try_widening = true;
2035 builtin_optab = sqrt_optab;
2036 break;
2037 CASE_FLT_FN (BUILT_IN_EXP):
2038 errno_set = true; builtin_optab = exp_optab; break;
2039 CASE_FLT_FN (BUILT_IN_EXP10):
2040 CASE_FLT_FN (BUILT_IN_POW10):
2041 errno_set = true; builtin_optab = exp10_optab; break;
2042 CASE_FLT_FN (BUILT_IN_EXP2):
2043 errno_set = true; builtin_optab = exp2_optab; break;
2044 CASE_FLT_FN (BUILT_IN_EXPM1):
2045 errno_set = true; builtin_optab = expm1_optab; break;
2046 CASE_FLT_FN (BUILT_IN_LOGB):
2047 errno_set = true; builtin_optab = logb_optab; break;
2048 CASE_FLT_FN (BUILT_IN_LOG):
2049 errno_set = true; builtin_optab = log_optab; break;
2050 CASE_FLT_FN (BUILT_IN_LOG10):
2051 errno_set = true; builtin_optab = log10_optab; break;
2052 CASE_FLT_FN (BUILT_IN_LOG2):
2053 errno_set = true; builtin_optab = log2_optab; break;
2054 CASE_FLT_FN (BUILT_IN_LOG1P):
2055 errno_set = true; builtin_optab = log1p_optab; break;
2056 CASE_FLT_FN (BUILT_IN_ASIN):
2057 builtin_optab = asin_optab; break;
2058 CASE_FLT_FN (BUILT_IN_ACOS):
2059 builtin_optab = acos_optab; break;
2060 CASE_FLT_FN (BUILT_IN_TAN):
2061 builtin_optab = tan_optab; break;
2062 CASE_FLT_FN (BUILT_IN_ATAN):
2063 builtin_optab = atan_optab; break;
2064 CASE_FLT_FN (BUILT_IN_FLOOR):
2065 builtin_optab = floor_optab; break;
2066 CASE_FLT_FN (BUILT_IN_CEIL):
2067 builtin_optab = ceil_optab; break;
2068 CASE_FLT_FN (BUILT_IN_TRUNC):
2069 builtin_optab = btrunc_optab; break;
2070 CASE_FLT_FN (BUILT_IN_ROUND):
2071 builtin_optab = round_optab; break;
2072 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2073 builtin_optab = nearbyint_optab;
2074 if (flag_trapping_math)
2075 break;
2076 /* Else fallthrough and expand as rint. */
2077 CASE_FLT_FN (BUILT_IN_RINT):
2078 builtin_optab = rint_optab; break;
2079 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2080 builtin_optab = significand_optab; break;
2081 default:
2082 gcc_unreachable ();
2083 }
2084
2085 /* Make a suitable register to place result in. */
2086 mode = TYPE_MODE (TREE_TYPE (exp));
2087
2088 if (! flag_errno_math || ! HONOR_NANS (mode))
2089 errno_set = false;
2090
2091 /* Before working hard, check whether the instruction is available, but try
2092 to widen the mode for specific operations. */
2093 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2094 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2095 && (!errno_set || !optimize_insn_for_size_p ()))
2096 {
2097 rtx result = gen_reg_rtx (mode);
2098
2099 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2100 need to expand the argument again. This way, we will not perform
2101 side-effects more the once. */
2102 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2103
2104 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2105
2106 start_sequence ();
2107
2108 /* Compute into RESULT.
2109 Set RESULT to wherever the result comes back. */
2110 result = expand_unop (mode, builtin_optab, op0, result, 0);
2111
2112 if (result != 0)
2113 {
2114 if (errno_set)
2115 expand_errno_check (exp, result);
2116
2117 /* Output the entire sequence. */
2118 insns = get_insns ();
2119 end_sequence ();
2120 emit_insn (insns);
2121 return result;
2122 }
2123
2124 /* If we were unable to expand via the builtin, stop the sequence
2125 (without outputting the insns) and call to the library function
2126 with the stabilized argument list. */
2127 end_sequence ();
2128 }
2129
2130 return expand_call (exp, target, target == const0_rtx);
2131 }
2132
2133 /* Expand a call to the builtin binary math functions (pow and atan2).
2134 Return NULL_RTX if a normal call should be emitted rather than expanding the
2135 function in-line. EXP is the expression that is a call to the builtin
2136 function; if convenient, the result should be placed in TARGET.
2137 SUBTARGET may be used as the target for computing one of EXP's
2138 operands. */
2139
2140 static rtx
2141 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2142 {
2143 optab builtin_optab;
2144 rtx op0, op1, insns, result;
2145 int op1_type = REAL_TYPE;
2146 tree fndecl = get_callee_fndecl (exp);
2147 tree arg0, arg1;
2148 enum machine_mode mode;
2149 bool errno_set = true;
2150
2151 switch (DECL_FUNCTION_CODE (fndecl))
2152 {
2153 CASE_FLT_FN (BUILT_IN_SCALBN):
2154 CASE_FLT_FN (BUILT_IN_SCALBLN):
2155 CASE_FLT_FN (BUILT_IN_LDEXP):
2156 op1_type = INTEGER_TYPE;
2157 default:
2158 break;
2159 }
2160
2161 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2162 return NULL_RTX;
2163
2164 arg0 = CALL_EXPR_ARG (exp, 0);
2165 arg1 = CALL_EXPR_ARG (exp, 1);
2166
2167 switch (DECL_FUNCTION_CODE (fndecl))
2168 {
2169 CASE_FLT_FN (BUILT_IN_POW):
2170 builtin_optab = pow_optab; break;
2171 CASE_FLT_FN (BUILT_IN_ATAN2):
2172 builtin_optab = atan2_optab; break;
2173 CASE_FLT_FN (BUILT_IN_SCALB):
2174 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2175 return 0;
2176 builtin_optab = scalb_optab; break;
2177 CASE_FLT_FN (BUILT_IN_SCALBN):
2178 CASE_FLT_FN (BUILT_IN_SCALBLN):
2179 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2180 return 0;
2181 /* Fall through... */
2182 CASE_FLT_FN (BUILT_IN_LDEXP):
2183 builtin_optab = ldexp_optab; break;
2184 CASE_FLT_FN (BUILT_IN_FMOD):
2185 builtin_optab = fmod_optab; break;
2186 CASE_FLT_FN (BUILT_IN_REMAINDER):
2187 CASE_FLT_FN (BUILT_IN_DREM):
2188 builtin_optab = remainder_optab; break;
2189 default:
2190 gcc_unreachable ();
2191 }
2192
2193 /* Make a suitable register to place result in. */
2194 mode = TYPE_MODE (TREE_TYPE (exp));
2195
2196 /* Before working hard, check whether the instruction is available. */
2197 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2198 return NULL_RTX;
2199
2200 result = gen_reg_rtx (mode);
2201
2202 if (! flag_errno_math || ! HONOR_NANS (mode))
2203 errno_set = false;
2204
2205 if (errno_set && optimize_insn_for_size_p ())
2206 return 0;
2207
2208 /* Always stabilize the argument list. */
2209 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2210 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2211
2212 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2213 op1 = expand_normal (arg1);
2214
2215 start_sequence ();
2216
2217 /* Compute into RESULT.
2218 Set RESULT to wherever the result comes back. */
2219 result = expand_binop (mode, builtin_optab, op0, op1,
2220 result, 0, OPTAB_DIRECT);
2221
2222 /* If we were unable to expand via the builtin, stop the sequence
2223 (without outputting the insns) and call to the library function
2224 with the stabilized argument list. */
2225 if (result == 0)
2226 {
2227 end_sequence ();
2228 return expand_call (exp, target, target == const0_rtx);
2229 }
2230
2231 if (errno_set)
2232 expand_errno_check (exp, result);
2233
2234 /* Output the entire sequence. */
2235 insns = get_insns ();
2236 end_sequence ();
2237 emit_insn (insns);
2238
2239 return result;
2240 }
2241
2242 /* Expand a call to the builtin trinary math functions (fma).
2243 Return NULL_RTX if a normal call should be emitted rather than expanding the
2244 function in-line. EXP is the expression that is a call to the builtin
2245 function; if convenient, the result should be placed in TARGET.
2246 SUBTARGET may be used as the target for computing one of EXP's
2247 operands. */
2248
2249 static rtx
2250 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2251 {
2252 optab builtin_optab;
2253 rtx op0, op1, op2, insns, result;
2254 tree fndecl = get_callee_fndecl (exp);
2255 tree arg0, arg1, arg2;
2256 enum machine_mode mode;
2257
2258 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2259 return NULL_RTX;
2260
2261 arg0 = CALL_EXPR_ARG (exp, 0);
2262 arg1 = CALL_EXPR_ARG (exp, 1);
2263 arg2 = CALL_EXPR_ARG (exp, 2);
2264
2265 switch (DECL_FUNCTION_CODE (fndecl))
2266 {
2267 CASE_FLT_FN (BUILT_IN_FMA):
2268 builtin_optab = fma_optab; break;
2269 default:
2270 gcc_unreachable ();
2271 }
2272
2273 /* Make a suitable register to place result in. */
2274 mode = TYPE_MODE (TREE_TYPE (exp));
2275
2276 /* Before working hard, check whether the instruction is available. */
2277 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2278 return NULL_RTX;
2279
2280 result = gen_reg_rtx (mode);
2281
2282 /* Always stabilize the argument list. */
2283 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2284 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2285 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2286
2287 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2288 op1 = expand_normal (arg1);
2289 op2 = expand_normal (arg2);
2290
2291 start_sequence ();
2292
2293 /* Compute into RESULT.
2294 Set RESULT to wherever the result comes back. */
2295 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2296 result, 0);
2297
2298 /* If we were unable to expand via the builtin, stop the sequence
2299 (without outputting the insns) and call to the library function
2300 with the stabilized argument list. */
2301 if (result == 0)
2302 {
2303 end_sequence ();
2304 return expand_call (exp, target, target == const0_rtx);
2305 }
2306
2307 /* Output the entire sequence. */
2308 insns = get_insns ();
2309 end_sequence ();
2310 emit_insn (insns);
2311
2312 return result;
2313 }
2314
2315 /* Expand a call to the builtin sin and cos math functions.
2316 Return NULL_RTX if a normal call should be emitted rather than expanding the
2317 function in-line. EXP is the expression that is a call to the builtin
2318 function; if convenient, the result should be placed in TARGET.
2319 SUBTARGET may be used as the target for computing one of EXP's
2320 operands. */
2321
2322 static rtx
2323 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2324 {
2325 optab builtin_optab;
2326 rtx op0, insns;
2327 tree fndecl = get_callee_fndecl (exp);
2328 enum machine_mode mode;
2329 tree arg;
2330
2331 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2332 return NULL_RTX;
2333
2334 arg = CALL_EXPR_ARG (exp, 0);
2335
2336 switch (DECL_FUNCTION_CODE (fndecl))
2337 {
2338 CASE_FLT_FN (BUILT_IN_SIN):
2339 CASE_FLT_FN (BUILT_IN_COS):
2340 builtin_optab = sincos_optab; break;
2341 default:
2342 gcc_unreachable ();
2343 }
2344
2345 /* Make a suitable register to place result in. */
2346 mode = TYPE_MODE (TREE_TYPE (exp));
2347
2348 /* Check if sincos insn is available, otherwise fallback
2349 to sin or cos insn. */
2350 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2351 switch (DECL_FUNCTION_CODE (fndecl))
2352 {
2353 CASE_FLT_FN (BUILT_IN_SIN):
2354 builtin_optab = sin_optab; break;
2355 CASE_FLT_FN (BUILT_IN_COS):
2356 builtin_optab = cos_optab; break;
2357 default:
2358 gcc_unreachable ();
2359 }
2360
2361 /* Before working hard, check whether the instruction is available. */
2362 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2363 {
2364 rtx result = gen_reg_rtx (mode);
2365
2366 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2367 need to expand the argument again. This way, we will not perform
2368 side-effects more the once. */
2369 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2370
2371 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2372
2373 start_sequence ();
2374
2375 /* Compute into RESULT.
2376 Set RESULT to wherever the result comes back. */
2377 if (builtin_optab == sincos_optab)
2378 {
2379 int ok;
2380
2381 switch (DECL_FUNCTION_CODE (fndecl))
2382 {
2383 CASE_FLT_FN (BUILT_IN_SIN):
2384 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2385 break;
2386 CASE_FLT_FN (BUILT_IN_COS):
2387 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2388 break;
2389 default:
2390 gcc_unreachable ();
2391 }
2392 gcc_assert (ok);
2393 }
2394 else
2395 result = expand_unop (mode, builtin_optab, op0, result, 0);
2396
2397 if (result != 0)
2398 {
2399 /* Output the entire sequence. */
2400 insns = get_insns ();
2401 end_sequence ();
2402 emit_insn (insns);
2403 return result;
2404 }
2405
2406 /* If we were unable to expand via the builtin, stop the sequence
2407 (without outputting the insns) and call to the library function
2408 with the stabilized argument list. */
2409 end_sequence ();
2410 }
2411
2412 return expand_call (exp, target, target == const0_rtx);
2413 }
2414
2415 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2416 return an RTL instruction code that implements the functionality.
2417 If that isn't possible or available return CODE_FOR_nothing. */
2418
2419 static enum insn_code
2420 interclass_mathfn_icode (tree arg, tree fndecl)
2421 {
2422 bool errno_set = false;
2423 optab builtin_optab = unknown_optab;
2424 enum machine_mode mode;
2425
2426 switch (DECL_FUNCTION_CODE (fndecl))
2427 {
2428 CASE_FLT_FN (BUILT_IN_ILOGB):
2429 errno_set = true; builtin_optab = ilogb_optab; break;
2430 CASE_FLT_FN (BUILT_IN_ISINF):
2431 builtin_optab = isinf_optab; break;
2432 case BUILT_IN_ISNORMAL:
2433 case BUILT_IN_ISFINITE:
2434 CASE_FLT_FN (BUILT_IN_FINITE):
2435 case BUILT_IN_FINITED32:
2436 case BUILT_IN_FINITED64:
2437 case BUILT_IN_FINITED128:
2438 case BUILT_IN_ISINFD32:
2439 case BUILT_IN_ISINFD64:
2440 case BUILT_IN_ISINFD128:
2441 /* These builtins have no optabs (yet). */
2442 break;
2443 default:
2444 gcc_unreachable ();
2445 }
2446
2447 /* There's no easy way to detect the case we need to set EDOM. */
2448 if (flag_errno_math && errno_set)
2449 return CODE_FOR_nothing;
2450
2451 /* Optab mode depends on the mode of the input argument. */
2452 mode = TYPE_MODE (TREE_TYPE (arg));
2453
2454 if (builtin_optab)
2455 return optab_handler (builtin_optab, mode);
2456 return CODE_FOR_nothing;
2457 }
2458
2459 /* Expand a call to one of the builtin math functions that operate on
2460 floating point argument and output an integer result (ilogb, isinf,
2461 isnan, etc).
2462 Return 0 if a normal call should be emitted rather than expanding the
2463 function in-line. EXP is the expression that is a call to the builtin
2464 function; if convenient, the result should be placed in TARGET. */
2465
2466 static rtx
2467 expand_builtin_interclass_mathfn (tree exp, rtx target)
2468 {
2469 enum insn_code icode = CODE_FOR_nothing;
2470 rtx op0;
2471 tree fndecl = get_callee_fndecl (exp);
2472 enum machine_mode mode;
2473 tree arg;
2474
2475 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2476 return NULL_RTX;
2477
2478 arg = CALL_EXPR_ARG (exp, 0);
2479 icode = interclass_mathfn_icode (arg, fndecl);
2480 mode = TYPE_MODE (TREE_TYPE (arg));
2481
2482 if (icode != CODE_FOR_nothing)
2483 {
2484 struct expand_operand ops[1];
2485 rtx last = get_last_insn ();
2486 tree orig_arg = arg;
2487
2488 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2489 need to expand the argument again. This way, we will not perform
2490 side-effects more the once. */
2491 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2492
2493 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2494
2495 if (mode != GET_MODE (op0))
2496 op0 = convert_to_mode (mode, op0, 0);
2497
2498 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2499 if (maybe_legitimize_operands (icode, 0, 1, ops)
2500 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2501 return ops[0].value;
2502
2503 delete_insns_since (last);
2504 CALL_EXPR_ARG (exp, 0) = orig_arg;
2505 }
2506
2507 return NULL_RTX;
2508 }
2509
2510 /* Expand a call to the builtin sincos math function.
2511 Return NULL_RTX if a normal call should be emitted rather than expanding the
2512 function in-line. EXP is the expression that is a call to the builtin
2513 function. */
2514
2515 static rtx
2516 expand_builtin_sincos (tree exp)
2517 {
2518 rtx op0, op1, op2, target1, target2;
2519 enum machine_mode mode;
2520 tree arg, sinp, cosp;
2521 int result;
2522 location_t loc = EXPR_LOCATION (exp);
2523 tree alias_type, alias_off;
2524
2525 if (!validate_arglist (exp, REAL_TYPE,
2526 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2527 return NULL_RTX;
2528
2529 arg = CALL_EXPR_ARG (exp, 0);
2530 sinp = CALL_EXPR_ARG (exp, 1);
2531 cosp = CALL_EXPR_ARG (exp, 2);
2532
2533 /* Make a suitable register to place result in. */
2534 mode = TYPE_MODE (TREE_TYPE (arg));
2535
2536 /* Check if sincos insn is available, otherwise emit the call. */
2537 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2538 return NULL_RTX;
2539
2540 target1 = gen_reg_rtx (mode);
2541 target2 = gen_reg_rtx (mode);
2542
2543 op0 = expand_normal (arg);
2544 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2545 alias_off = build_int_cst (alias_type, 0);
2546 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2547 sinp, alias_off));
2548 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2549 cosp, alias_off));
2550
2551 /* Compute into target1 and target2.
2552 Set TARGET to wherever the result comes back. */
2553 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2554 gcc_assert (result);
2555
2556 /* Move target1 and target2 to the memory locations indicated
2557 by op1 and op2. */
2558 emit_move_insn (op1, target1);
2559 emit_move_insn (op2, target2);
2560
2561 return const0_rtx;
2562 }
2563
2564 /* Expand a call to the internal cexpi builtin to the sincos math function.
2565 EXP is the expression that is a call to the builtin function; if convenient,
2566 the result should be placed in TARGET. */
2567
2568 static rtx
2569 expand_builtin_cexpi (tree exp, rtx target)
2570 {
2571 tree fndecl = get_callee_fndecl (exp);
2572 tree arg, type;
2573 enum machine_mode mode;
2574 rtx op0, op1, op2;
2575 location_t loc = EXPR_LOCATION (exp);
2576
2577 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2578 return NULL_RTX;
2579
2580 arg = CALL_EXPR_ARG (exp, 0);
2581 type = TREE_TYPE (arg);
2582 mode = TYPE_MODE (TREE_TYPE (arg));
2583
2584 /* Try expanding via a sincos optab, fall back to emitting a libcall
2585 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2586 is only generated from sincos, cexp or if we have either of them. */
2587 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2588 {
2589 op1 = gen_reg_rtx (mode);
2590 op2 = gen_reg_rtx (mode);
2591
2592 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2593
2594 /* Compute into op1 and op2. */
2595 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2596 }
2597 else if (targetm.libc_has_function (function_sincos))
2598 {
2599 tree call, fn = NULL_TREE;
2600 tree top1, top2;
2601 rtx op1a, op2a;
2602
2603 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2604 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2605 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2606 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2607 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2608 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2609 else
2610 gcc_unreachable ();
2611
2612 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2613 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2614 op1a = copy_addr_to_reg (XEXP (op1, 0));
2615 op2a = copy_addr_to_reg (XEXP (op2, 0));
2616 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2617 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2618
2619 /* Make sure not to fold the sincos call again. */
2620 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2621 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2622 call, 3, arg, top1, top2));
2623 }
2624 else
2625 {
2626 tree call, fn = NULL_TREE, narg;
2627 tree ctype = build_complex_type (type);
2628
2629 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2630 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2631 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2632 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2633 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2634 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2635 else
2636 gcc_unreachable ();
2637
2638 /* If we don't have a decl for cexp create one. This is the
2639 friendliest fallback if the user calls __builtin_cexpi
2640 without full target C99 function support. */
2641 if (fn == NULL_TREE)
2642 {
2643 tree fntype;
2644 const char *name = NULL;
2645
2646 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2647 name = "cexpf";
2648 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2649 name = "cexp";
2650 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2651 name = "cexpl";
2652
2653 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2654 fn = build_fn_decl (name, fntype);
2655 }
2656
2657 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2658 build_real (type, dconst0), arg);
2659
2660 /* Make sure not to fold the cexp call again. */
2661 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2662 return expand_expr (build_call_nary (ctype, call, 1, narg),
2663 target, VOIDmode, EXPAND_NORMAL);
2664 }
2665
2666 /* Now build the proper return type. */
2667 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2668 make_tree (TREE_TYPE (arg), op2),
2669 make_tree (TREE_TYPE (arg), op1)),
2670 target, VOIDmode, EXPAND_NORMAL);
2671 }
2672
2673 /* Conveniently construct a function call expression. FNDECL names the
2674 function to be called, N is the number of arguments, and the "..."
2675 parameters are the argument expressions. Unlike build_call_exr
2676 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2677
2678 static tree
2679 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2680 {
2681 va_list ap;
2682 tree fntype = TREE_TYPE (fndecl);
2683 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2684
2685 va_start (ap, n);
2686 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2687 va_end (ap);
2688 SET_EXPR_LOCATION (fn, loc);
2689 return fn;
2690 }
2691
2692 /* Expand a call to one of the builtin rounding functions gcc defines
2693 as an extension (lfloor and lceil). As these are gcc extensions we
2694 do not need to worry about setting errno to EDOM.
2695 If expanding via optab fails, lower expression to (int)(floor(x)).
2696 EXP is the expression that is a call to the builtin function;
2697 if convenient, the result should be placed in TARGET. */
2698
2699 static rtx
2700 expand_builtin_int_roundingfn (tree exp, rtx target)
2701 {
2702 convert_optab builtin_optab;
2703 rtx op0, insns, tmp;
2704 tree fndecl = get_callee_fndecl (exp);
2705 enum built_in_function fallback_fn;
2706 tree fallback_fndecl;
2707 enum machine_mode mode;
2708 tree arg;
2709
2710 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2711 gcc_unreachable ();
2712
2713 arg = CALL_EXPR_ARG (exp, 0);
2714
2715 switch (DECL_FUNCTION_CODE (fndecl))
2716 {
2717 CASE_FLT_FN (BUILT_IN_ICEIL):
2718 CASE_FLT_FN (BUILT_IN_LCEIL):
2719 CASE_FLT_FN (BUILT_IN_LLCEIL):
2720 builtin_optab = lceil_optab;
2721 fallback_fn = BUILT_IN_CEIL;
2722 break;
2723
2724 CASE_FLT_FN (BUILT_IN_IFLOOR):
2725 CASE_FLT_FN (BUILT_IN_LFLOOR):
2726 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2727 builtin_optab = lfloor_optab;
2728 fallback_fn = BUILT_IN_FLOOR;
2729 break;
2730
2731 default:
2732 gcc_unreachable ();
2733 }
2734
2735 /* Make a suitable register to place result in. */
2736 mode = TYPE_MODE (TREE_TYPE (exp));
2737
2738 target = gen_reg_rtx (mode);
2739
2740 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2741 need to expand the argument again. This way, we will not perform
2742 side-effects more the once. */
2743 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2744
2745 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2746
2747 start_sequence ();
2748
2749 /* Compute into TARGET. */
2750 if (expand_sfix_optab (target, op0, builtin_optab))
2751 {
2752 /* Output the entire sequence. */
2753 insns = get_insns ();
2754 end_sequence ();
2755 emit_insn (insns);
2756 return target;
2757 }
2758
2759 /* If we were unable to expand via the builtin, stop the sequence
2760 (without outputting the insns). */
2761 end_sequence ();
2762
2763 /* Fall back to floating point rounding optab. */
2764 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2765
2766 /* For non-C99 targets we may end up without a fallback fndecl here
2767 if the user called __builtin_lfloor directly. In this case emit
2768 a call to the floor/ceil variants nevertheless. This should result
2769 in the best user experience for not full C99 targets. */
2770 if (fallback_fndecl == NULL_TREE)
2771 {
2772 tree fntype;
2773 const char *name = NULL;
2774
2775 switch (DECL_FUNCTION_CODE (fndecl))
2776 {
2777 case BUILT_IN_ICEIL:
2778 case BUILT_IN_LCEIL:
2779 case BUILT_IN_LLCEIL:
2780 name = "ceil";
2781 break;
2782 case BUILT_IN_ICEILF:
2783 case BUILT_IN_LCEILF:
2784 case BUILT_IN_LLCEILF:
2785 name = "ceilf";
2786 break;
2787 case BUILT_IN_ICEILL:
2788 case BUILT_IN_LCEILL:
2789 case BUILT_IN_LLCEILL:
2790 name = "ceill";
2791 break;
2792 case BUILT_IN_IFLOOR:
2793 case BUILT_IN_LFLOOR:
2794 case BUILT_IN_LLFLOOR:
2795 name = "floor";
2796 break;
2797 case BUILT_IN_IFLOORF:
2798 case BUILT_IN_LFLOORF:
2799 case BUILT_IN_LLFLOORF:
2800 name = "floorf";
2801 break;
2802 case BUILT_IN_IFLOORL:
2803 case BUILT_IN_LFLOORL:
2804 case BUILT_IN_LLFLOORL:
2805 name = "floorl";
2806 break;
2807 default:
2808 gcc_unreachable ();
2809 }
2810
2811 fntype = build_function_type_list (TREE_TYPE (arg),
2812 TREE_TYPE (arg), NULL_TREE);
2813 fallback_fndecl = build_fn_decl (name, fntype);
2814 }
2815
2816 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2817
2818 tmp = expand_normal (exp);
2819 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2820
2821 /* Truncate the result of floating point optab to integer
2822 via expand_fix (). */
2823 target = gen_reg_rtx (mode);
2824 expand_fix (target, tmp, 0);
2825
2826 return target;
2827 }
2828
2829 /* Expand a call to one of the builtin math functions doing integer
2830 conversion (lrint).
2831 Return 0 if a normal call should be emitted rather than expanding the
2832 function in-line. EXP is the expression that is a call to the builtin
2833 function; if convenient, the result should be placed in TARGET. */
2834
2835 static rtx
2836 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2837 {
2838 convert_optab builtin_optab;
2839 rtx op0, insns;
2840 tree fndecl = get_callee_fndecl (exp);
2841 tree arg;
2842 enum machine_mode mode;
2843 enum built_in_function fallback_fn = BUILT_IN_NONE;
2844
2845 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2846 gcc_unreachable ();
2847
2848 arg = CALL_EXPR_ARG (exp, 0);
2849
2850 switch (DECL_FUNCTION_CODE (fndecl))
2851 {
2852 CASE_FLT_FN (BUILT_IN_IRINT):
2853 fallback_fn = BUILT_IN_LRINT;
2854 /* FALLTHRU */
2855 CASE_FLT_FN (BUILT_IN_LRINT):
2856 CASE_FLT_FN (BUILT_IN_LLRINT):
2857 builtin_optab = lrint_optab;
2858 break;
2859
2860 CASE_FLT_FN (BUILT_IN_IROUND):
2861 fallback_fn = BUILT_IN_LROUND;
2862 /* FALLTHRU */
2863 CASE_FLT_FN (BUILT_IN_LROUND):
2864 CASE_FLT_FN (BUILT_IN_LLROUND):
2865 builtin_optab = lround_optab;
2866 break;
2867
2868 default:
2869 gcc_unreachable ();
2870 }
2871
2872 /* There's no easy way to detect the case we need to set EDOM. */
2873 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2874 return NULL_RTX;
2875
2876 /* Make a suitable register to place result in. */
2877 mode = TYPE_MODE (TREE_TYPE (exp));
2878
2879 /* There's no easy way to detect the case we need to set EDOM. */
2880 if (!flag_errno_math)
2881 {
2882 rtx result = gen_reg_rtx (mode);
2883
2884 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2885 need to expand the argument again. This way, we will not perform
2886 side-effects more the once. */
2887 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2888
2889 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2890
2891 start_sequence ();
2892
2893 if (expand_sfix_optab (result, op0, builtin_optab))
2894 {
2895 /* Output the entire sequence. */
2896 insns = get_insns ();
2897 end_sequence ();
2898 emit_insn (insns);
2899 return result;
2900 }
2901
2902 /* If we were unable to expand via the builtin, stop the sequence
2903 (without outputting the insns) and call to the library function
2904 with the stabilized argument list. */
2905 end_sequence ();
2906 }
2907
2908 if (fallback_fn != BUILT_IN_NONE)
2909 {
2910 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2911 targets, (int) round (x) should never be transformed into
2912 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2913 a call to lround in the hope that the target provides at least some
2914 C99 functions. This should result in the best user experience for
2915 not full C99 targets. */
2916 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2917 fallback_fn, 0);
2918
2919 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2920 fallback_fndecl, 1, arg);
2921
2922 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2923 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2924 return convert_to_mode (mode, target, 0);
2925 }
2926
2927 return expand_call (exp, target, target == const0_rtx);
2928 }
2929
2930 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2931 a normal call should be emitted rather than expanding the function
2932 in-line. EXP is the expression that is a call to the builtin
2933 function; if convenient, the result should be placed in TARGET. */
2934
2935 static rtx
2936 expand_builtin_powi (tree exp, rtx target)
2937 {
2938 tree arg0, arg1;
2939 rtx op0, op1;
2940 enum machine_mode mode;
2941 enum machine_mode mode2;
2942
2943 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2944 return NULL_RTX;
2945
2946 arg0 = CALL_EXPR_ARG (exp, 0);
2947 arg1 = CALL_EXPR_ARG (exp, 1);
2948 mode = TYPE_MODE (TREE_TYPE (exp));
2949
2950 /* Emit a libcall to libgcc. */
2951
2952 /* Mode of the 2nd argument must match that of an int. */
2953 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2954
2955 if (target == NULL_RTX)
2956 target = gen_reg_rtx (mode);
2957
2958 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2959 if (GET_MODE (op0) != mode)
2960 op0 = convert_to_mode (mode, op0, 0);
2961 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2962 if (GET_MODE (op1) != mode2)
2963 op1 = convert_to_mode (mode2, op1, 0);
2964
2965 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2966 target, LCT_CONST, mode, 2,
2967 op0, mode, op1, mode2);
2968
2969 return target;
2970 }
2971
2972 /* Expand expression EXP which is a call to the strlen builtin. Return
2973 NULL_RTX if we failed the caller should emit a normal call, otherwise
2974 try to get the result in TARGET, if convenient. */
2975
2976 static rtx
2977 expand_builtin_strlen (tree exp, rtx target,
2978 enum machine_mode target_mode)
2979 {
2980 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2981 return NULL_RTX;
2982 else
2983 {
2984 struct expand_operand ops[4];
2985 rtx pat;
2986 tree len;
2987 tree src = CALL_EXPR_ARG (exp, 0);
2988 rtx src_reg, before_strlen;
2989 enum machine_mode insn_mode = target_mode;
2990 enum insn_code icode = CODE_FOR_nothing;
2991 unsigned int align;
2992
2993 /* If the length can be computed at compile-time, return it. */
2994 len = c_strlen (src, 0);
2995 if (len)
2996 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2997
2998 /* If the length can be computed at compile-time and is constant
2999 integer, but there are side-effects in src, evaluate
3000 src for side-effects, then return len.
3001 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3002 can be optimized into: i++; x = 3; */
3003 len = c_strlen (src, 1);
3004 if (len && TREE_CODE (len) == INTEGER_CST)
3005 {
3006 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3007 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3008 }
3009
3010 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3011
3012 /* If SRC is not a pointer type, don't do this operation inline. */
3013 if (align == 0)
3014 return NULL_RTX;
3015
3016 /* Bail out if we can't compute strlen in the right mode. */
3017 while (insn_mode != VOIDmode)
3018 {
3019 icode = optab_handler (strlen_optab, insn_mode);
3020 if (icode != CODE_FOR_nothing)
3021 break;
3022
3023 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3024 }
3025 if (insn_mode == VOIDmode)
3026 return NULL_RTX;
3027
3028 /* Make a place to hold the source address. We will not expand
3029 the actual source until we are sure that the expansion will
3030 not fail -- there are trees that cannot be expanded twice. */
3031 src_reg = gen_reg_rtx (Pmode);
3032
3033 /* Mark the beginning of the strlen sequence so we can emit the
3034 source operand later. */
3035 before_strlen = get_last_insn ();
3036
3037 create_output_operand (&ops[0], target, insn_mode);
3038 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3039 create_integer_operand (&ops[2], 0);
3040 create_integer_operand (&ops[3], align);
3041 if (!maybe_expand_insn (icode, 4, ops))
3042 return NULL_RTX;
3043
3044 /* Now that we are assured of success, expand the source. */
3045 start_sequence ();
3046 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3047 if (pat != src_reg)
3048 {
3049 #ifdef POINTERS_EXTEND_UNSIGNED
3050 if (GET_MODE (pat) != Pmode)
3051 pat = convert_to_mode (Pmode, pat,
3052 POINTERS_EXTEND_UNSIGNED);
3053 #endif
3054 emit_move_insn (src_reg, pat);
3055 }
3056 pat = get_insns ();
3057 end_sequence ();
3058
3059 if (before_strlen)
3060 emit_insn_after (pat, before_strlen);
3061 else
3062 emit_insn_before (pat, get_insns ());
3063
3064 /* Return the value in the proper mode for this function. */
3065 if (GET_MODE (ops[0].value) == target_mode)
3066 target = ops[0].value;
3067 else if (target != 0)
3068 convert_move (target, ops[0].value, 0);
3069 else
3070 target = convert_to_mode (target_mode, ops[0].value, 0);
3071
3072 return target;
3073 }
3074 }
3075
3076 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3077 bytes from constant string DATA + OFFSET and return it as target
3078 constant. */
3079
3080 static rtx
3081 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3082 enum machine_mode mode)
3083 {
3084 const char *str = (const char *) data;
3085
3086 gcc_assert (offset >= 0
3087 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3088 <= strlen (str) + 1));
3089
3090 return c_readstr (str + offset, mode);
3091 }
3092
3093 /* LEN specify length of the block of memcpy/memset operation.
3094 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3095 In some cases we can make very likely guess on max size, then we
3096 set it into PROBABLE_MAX_SIZE. */
3097
3098 static void
3099 determine_block_size (tree len, rtx len_rtx,
3100 unsigned HOST_WIDE_INT *min_size,
3101 unsigned HOST_WIDE_INT *max_size,
3102 unsigned HOST_WIDE_INT *probable_max_size)
3103 {
3104 if (CONST_INT_P (len_rtx))
3105 {
3106 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3107 return;
3108 }
3109 else
3110 {
3111 wide_int min, max;
3112 enum value_range_type range_type = VR_UNDEFINED;
3113
3114 /* Determine bounds from the type. */
3115 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3116 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3117 else
3118 *min_size = 0;
3119 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3120 *probable_max_size = *max_size
3121 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3122 else
3123 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3124
3125 if (TREE_CODE (len) == SSA_NAME)
3126 range_type = get_range_info (len, &min, &max);
3127 if (range_type == VR_RANGE)
3128 {
3129 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3130 *min_size = min.to_uhwi ();
3131 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3132 *probable_max_size = *max_size = max.to_uhwi ();
3133 }
3134 else if (range_type == VR_ANTI_RANGE)
3135 {
3136 /* Anti range 0...N lets us to determine minimal size to N+1. */
3137 if (min == 0)
3138 {
3139 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3140 *min_size = max.to_uhwi () + 1;
3141 }
3142 /* Code like
3143
3144 int n;
3145 if (n < 100)
3146 memcpy (a, b, n)
3147
3148 Produce anti range allowing negative values of N. We still
3149 can use the information and make a guess that N is not negative.
3150 */
3151 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3152 *probable_max_size = min.to_uhwi () - 1;
3153 }
3154 }
3155 gcc_checking_assert (*max_size <=
3156 (unsigned HOST_WIDE_INT)
3157 GET_MODE_MASK (GET_MODE (len_rtx)));
3158 }
3159
3160 /* Expand a call EXP to the memcpy builtin.
3161 Return NULL_RTX if we failed, the caller should emit a normal call,
3162 otherwise try to get the result in TARGET, if convenient (and in
3163 mode MODE if that's convenient). */
3164
3165 static rtx
3166 expand_builtin_memcpy (tree exp, rtx target)
3167 {
3168 if (!validate_arglist (exp,
3169 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3170 return NULL_RTX;
3171 else
3172 {
3173 tree dest = CALL_EXPR_ARG (exp, 0);
3174 tree src = CALL_EXPR_ARG (exp, 1);
3175 tree len = CALL_EXPR_ARG (exp, 2);
3176 const char *src_str;
3177 unsigned int src_align = get_pointer_alignment (src);
3178 unsigned int dest_align = get_pointer_alignment (dest);
3179 rtx dest_mem, src_mem, dest_addr, len_rtx;
3180 HOST_WIDE_INT expected_size = -1;
3181 unsigned int expected_align = 0;
3182 unsigned HOST_WIDE_INT min_size;
3183 unsigned HOST_WIDE_INT max_size;
3184 unsigned HOST_WIDE_INT probable_max_size;
3185
3186 /* If DEST is not a pointer type, call the normal function. */
3187 if (dest_align == 0)
3188 return NULL_RTX;
3189
3190 /* If either SRC is not a pointer type, don't do this
3191 operation in-line. */
3192 if (src_align == 0)
3193 return NULL_RTX;
3194
3195 if (currently_expanding_gimple_stmt)
3196 stringop_block_profile (currently_expanding_gimple_stmt,
3197 &expected_align, &expected_size);
3198
3199 if (expected_align < dest_align)
3200 expected_align = dest_align;
3201 dest_mem = get_memory_rtx (dest, len);
3202 set_mem_align (dest_mem, dest_align);
3203 len_rtx = expand_normal (len);
3204 determine_block_size (len, len_rtx, &min_size, &max_size,
3205 &probable_max_size);
3206 src_str = c_getstr (src);
3207
3208 /* If SRC is a string constant and block move would be done
3209 by pieces, we can avoid loading the string from memory
3210 and only stored the computed constants. */
3211 if (src_str
3212 && CONST_INT_P (len_rtx)
3213 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3214 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3215 CONST_CAST (char *, src_str),
3216 dest_align, false))
3217 {
3218 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3219 builtin_memcpy_read_str,
3220 CONST_CAST (char *, src_str),
3221 dest_align, false, 0);
3222 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3223 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3224 return dest_mem;
3225 }
3226
3227 src_mem = get_memory_rtx (src, len);
3228 set_mem_align (src_mem, src_align);
3229
3230 /* Copy word part most expediently. */
3231 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3232 CALL_EXPR_TAILCALL (exp)
3233 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3234 expected_align, expected_size,
3235 min_size, max_size, probable_max_size);
3236
3237 if (dest_addr == 0)
3238 {
3239 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3240 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3241 }
3242 return dest_addr;
3243 }
3244 }
3245
3246 /* Expand a call EXP to the mempcpy builtin.
3247 Return NULL_RTX if we failed; the caller should emit a normal call,
3248 otherwise try to get the result in TARGET, if convenient (and in
3249 mode MODE if that's convenient). If ENDP is 0 return the
3250 destination pointer, if ENDP is 1 return the end pointer ala
3251 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3252 stpcpy. */
3253
3254 static rtx
3255 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3256 {
3257 if (!validate_arglist (exp,
3258 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3259 return NULL_RTX;
3260 else
3261 {
3262 tree dest = CALL_EXPR_ARG (exp, 0);
3263 tree src = CALL_EXPR_ARG (exp, 1);
3264 tree len = CALL_EXPR_ARG (exp, 2);
3265 return expand_builtin_mempcpy_args (dest, src, len,
3266 target, mode, /*endp=*/ 1);
3267 }
3268 }
3269
3270 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3271 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3272 so that this can also be called without constructing an actual CALL_EXPR.
3273 The other arguments and return value are the same as for
3274 expand_builtin_mempcpy. */
3275
3276 static rtx
3277 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3278 rtx target, enum machine_mode mode, int endp)
3279 {
3280 /* If return value is ignored, transform mempcpy into memcpy. */
3281 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3282 {
3283 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3284 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3285 dest, src, len);
3286 return expand_expr (result, target, mode, EXPAND_NORMAL);
3287 }
3288 else
3289 {
3290 const char *src_str;
3291 unsigned int src_align = get_pointer_alignment (src);
3292 unsigned int dest_align = get_pointer_alignment (dest);
3293 rtx dest_mem, src_mem, len_rtx;
3294
3295 /* If either SRC or DEST is not a pointer type, don't do this
3296 operation in-line. */
3297 if (dest_align == 0 || src_align == 0)
3298 return NULL_RTX;
3299
3300 /* If LEN is not constant, call the normal function. */
3301 if (! tree_fits_uhwi_p (len))
3302 return NULL_RTX;
3303
3304 len_rtx = expand_normal (len);
3305 src_str = c_getstr (src);
3306
3307 /* If SRC is a string constant and block move would be done
3308 by pieces, we can avoid loading the string from memory
3309 and only stored the computed constants. */
3310 if (src_str
3311 && CONST_INT_P (len_rtx)
3312 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3313 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3314 CONST_CAST (char *, src_str),
3315 dest_align, false))
3316 {
3317 dest_mem = get_memory_rtx (dest, len);
3318 set_mem_align (dest_mem, dest_align);
3319 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3320 builtin_memcpy_read_str,
3321 CONST_CAST (char *, src_str),
3322 dest_align, false, endp);
3323 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3324 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3325 return dest_mem;
3326 }
3327
3328 if (CONST_INT_P (len_rtx)
3329 && can_move_by_pieces (INTVAL (len_rtx),
3330 MIN (dest_align, src_align)))
3331 {
3332 dest_mem = get_memory_rtx (dest, len);
3333 set_mem_align (dest_mem, dest_align);
3334 src_mem = get_memory_rtx (src, len);
3335 set_mem_align (src_mem, src_align);
3336 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3337 MIN (dest_align, src_align), endp);
3338 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3339 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3340 return dest_mem;
3341 }
3342
3343 return NULL_RTX;
3344 }
3345 }
3346
3347 #ifndef HAVE_movstr
3348 # define HAVE_movstr 0
3349 # define CODE_FOR_movstr CODE_FOR_nothing
3350 #endif
3351
3352 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3353 we failed, the caller should emit a normal call, otherwise try to
3354 get the result in TARGET, if convenient. If ENDP is 0 return the
3355 destination pointer, if ENDP is 1 return the end pointer ala
3356 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3357 stpcpy. */
3358
3359 static rtx
3360 expand_movstr (tree dest, tree src, rtx target, int endp)
3361 {
3362 struct expand_operand ops[3];
3363 rtx dest_mem;
3364 rtx src_mem;
3365
3366 if (!HAVE_movstr)
3367 return NULL_RTX;
3368
3369 dest_mem = get_memory_rtx (dest, NULL);
3370 src_mem = get_memory_rtx (src, NULL);
3371 if (!endp)
3372 {
3373 target = force_reg (Pmode, XEXP (dest_mem, 0));
3374 dest_mem = replace_equiv_address (dest_mem, target);
3375 }
3376
3377 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3378 create_fixed_operand (&ops[1], dest_mem);
3379 create_fixed_operand (&ops[2], src_mem);
3380 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3381 return NULL_RTX;
3382
3383 if (endp && target != const0_rtx)
3384 {
3385 target = ops[0].value;
3386 /* movstr is supposed to set end to the address of the NUL
3387 terminator. If the caller requested a mempcpy-like return value,
3388 adjust it. */
3389 if (endp == 1)
3390 {
3391 rtx tem = plus_constant (GET_MODE (target),
3392 gen_lowpart (GET_MODE (target), target), 1);
3393 emit_move_insn (target, force_operand (tem, NULL_RTX));
3394 }
3395 }
3396 return target;
3397 }
3398
3399 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3400 NULL_RTX if we failed the caller should emit a normal call, otherwise
3401 try to get the result in TARGET, if convenient (and in mode MODE if that's
3402 convenient). */
3403
3404 static rtx
3405 expand_builtin_strcpy (tree exp, rtx target)
3406 {
3407 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3408 {
3409 tree dest = CALL_EXPR_ARG (exp, 0);
3410 tree src = CALL_EXPR_ARG (exp, 1);
3411 return expand_builtin_strcpy_args (dest, src, target);
3412 }
3413 return NULL_RTX;
3414 }
3415
3416 /* Helper function to do the actual work for expand_builtin_strcpy. The
3417 arguments to the builtin_strcpy call DEST and SRC are broken out
3418 so that this can also be called without constructing an actual CALL_EXPR.
3419 The other arguments and return value are the same as for
3420 expand_builtin_strcpy. */
3421
3422 static rtx
3423 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3424 {
3425 return expand_movstr (dest, src, target, /*endp=*/0);
3426 }
3427
3428 /* Expand a call EXP to the stpcpy builtin.
3429 Return NULL_RTX if we failed the caller should emit a normal call,
3430 otherwise try to get the result in TARGET, if convenient (and in
3431 mode MODE if that's convenient). */
3432
3433 static rtx
3434 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3435 {
3436 tree dst, src;
3437 location_t loc = EXPR_LOCATION (exp);
3438
3439 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3440 return NULL_RTX;
3441
3442 dst = CALL_EXPR_ARG (exp, 0);
3443 src = CALL_EXPR_ARG (exp, 1);
3444
3445 /* If return value is ignored, transform stpcpy into strcpy. */
3446 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3447 {
3448 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3449 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3450 return expand_expr (result, target, mode, EXPAND_NORMAL);
3451 }
3452 else
3453 {
3454 tree len, lenp1;
3455 rtx ret;
3456
3457 /* Ensure we get an actual string whose length can be evaluated at
3458 compile-time, not an expression containing a string. This is
3459 because the latter will potentially produce pessimized code
3460 when used to produce the return value. */
3461 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3462 return expand_movstr (dst, src, target, /*endp=*/2);
3463
3464 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3465 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3466 target, mode, /*endp=*/2);
3467
3468 if (ret)
3469 return ret;
3470
3471 if (TREE_CODE (len) == INTEGER_CST)
3472 {
3473 rtx len_rtx = expand_normal (len);
3474
3475 if (CONST_INT_P (len_rtx))
3476 {
3477 ret = expand_builtin_strcpy_args (dst, src, target);
3478
3479 if (ret)
3480 {
3481 if (! target)
3482 {
3483 if (mode != VOIDmode)
3484 target = gen_reg_rtx (mode);
3485 else
3486 target = gen_reg_rtx (GET_MODE (ret));
3487 }
3488 if (GET_MODE (target) != GET_MODE (ret))
3489 ret = gen_lowpart (GET_MODE (target), ret);
3490
3491 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3492 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3493 gcc_assert (ret);
3494
3495 return target;
3496 }
3497 }
3498 }
3499
3500 return expand_movstr (dst, src, target, /*endp=*/2);
3501 }
3502 }
3503
3504 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3505 bytes from constant string DATA + OFFSET and return it as target
3506 constant. */
3507
3508 rtx
3509 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3510 enum machine_mode mode)
3511 {
3512 const char *str = (const char *) data;
3513
3514 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3515 return const0_rtx;
3516
3517 return c_readstr (str + offset, mode);
3518 }
3519
3520 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3521 NULL_RTX if we failed the caller should emit a normal call. */
3522
3523 static rtx
3524 expand_builtin_strncpy (tree exp, rtx target)
3525 {
3526 location_t loc = EXPR_LOCATION (exp);
3527
3528 if (validate_arglist (exp,
3529 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3530 {
3531 tree dest = CALL_EXPR_ARG (exp, 0);
3532 tree src = CALL_EXPR_ARG (exp, 1);
3533 tree len = CALL_EXPR_ARG (exp, 2);
3534 tree slen = c_strlen (src, 1);
3535
3536 /* We must be passed a constant len and src parameter. */
3537 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3538 return NULL_RTX;
3539
3540 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3541
3542 /* We're required to pad with trailing zeros if the requested
3543 len is greater than strlen(s2)+1. In that case try to
3544 use store_by_pieces, if it fails, punt. */
3545 if (tree_int_cst_lt (slen, len))
3546 {
3547 unsigned int dest_align = get_pointer_alignment (dest);
3548 const char *p = c_getstr (src);
3549 rtx dest_mem;
3550
3551 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3552 || !can_store_by_pieces (tree_to_uhwi (len),
3553 builtin_strncpy_read_str,
3554 CONST_CAST (char *, p),
3555 dest_align, false))
3556 return NULL_RTX;
3557
3558 dest_mem = get_memory_rtx (dest, len);
3559 store_by_pieces (dest_mem, tree_to_uhwi (len),
3560 builtin_strncpy_read_str,
3561 CONST_CAST (char *, p), dest_align, false, 0);
3562 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3563 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3564 return dest_mem;
3565 }
3566 }
3567 return NULL_RTX;
3568 }
3569
3570 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3571 bytes from constant string DATA + OFFSET and return it as target
3572 constant. */
3573
3574 rtx
3575 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3576 enum machine_mode mode)
3577 {
3578 const char *c = (const char *) data;
3579 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3580
3581 memset (p, *c, GET_MODE_SIZE (mode));
3582
3583 return c_readstr (p, mode);
3584 }
3585
3586 /* Callback routine for store_by_pieces. Return the RTL of a register
3587 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3588 char value given in the RTL register data. For example, if mode is
3589 4 bytes wide, return the RTL for 0x01010101*data. */
3590
3591 static rtx
3592 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3593 enum machine_mode mode)
3594 {
3595 rtx target, coeff;
3596 size_t size;
3597 char *p;
3598
3599 size = GET_MODE_SIZE (mode);
3600 if (size == 1)
3601 return (rtx) data;
3602
3603 p = XALLOCAVEC (char, size);
3604 memset (p, 1, size);
3605 coeff = c_readstr (p, mode);
3606
3607 target = convert_to_mode (mode, (rtx) data, 1);
3608 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3609 return force_reg (mode, target);
3610 }
3611
3612 /* Expand expression EXP, which is a call to the memset builtin. Return
3613 NULL_RTX if we failed the caller should emit a normal call, otherwise
3614 try to get the result in TARGET, if convenient (and in mode MODE if that's
3615 convenient). */
3616
3617 static rtx
3618 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3619 {
3620 if (!validate_arglist (exp,
3621 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3622 return NULL_RTX;
3623 else
3624 {
3625 tree dest = CALL_EXPR_ARG (exp, 0);
3626 tree val = CALL_EXPR_ARG (exp, 1);
3627 tree len = CALL_EXPR_ARG (exp, 2);
3628 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3629 }
3630 }
3631
3632 /* Helper function to do the actual work for expand_builtin_memset. The
3633 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3634 so that this can also be called without constructing an actual CALL_EXPR.
3635 The other arguments and return value are the same as for
3636 expand_builtin_memset. */
3637
3638 static rtx
3639 expand_builtin_memset_args (tree dest, tree val, tree len,
3640 rtx target, enum machine_mode mode, tree orig_exp)
3641 {
3642 tree fndecl, fn;
3643 enum built_in_function fcode;
3644 enum machine_mode val_mode;
3645 char c;
3646 unsigned int dest_align;
3647 rtx dest_mem, dest_addr, len_rtx;
3648 HOST_WIDE_INT expected_size = -1;
3649 unsigned int expected_align = 0;
3650 unsigned HOST_WIDE_INT min_size;
3651 unsigned HOST_WIDE_INT max_size;
3652 unsigned HOST_WIDE_INT probable_max_size;
3653
3654 dest_align = get_pointer_alignment (dest);
3655
3656 /* If DEST is not a pointer type, don't do this operation in-line. */
3657 if (dest_align == 0)
3658 return NULL_RTX;
3659
3660 if (currently_expanding_gimple_stmt)
3661 stringop_block_profile (currently_expanding_gimple_stmt,
3662 &expected_align, &expected_size);
3663
3664 if (expected_align < dest_align)
3665 expected_align = dest_align;
3666
3667 /* If the LEN parameter is zero, return DEST. */
3668 if (integer_zerop (len))
3669 {
3670 /* Evaluate and ignore VAL in case it has side-effects. */
3671 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3672 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3673 }
3674
3675 /* Stabilize the arguments in case we fail. */
3676 dest = builtin_save_expr (dest);
3677 val = builtin_save_expr (val);
3678 len = builtin_save_expr (len);
3679
3680 len_rtx = expand_normal (len);
3681 determine_block_size (len, len_rtx, &min_size, &max_size,
3682 &probable_max_size);
3683 dest_mem = get_memory_rtx (dest, len);
3684 val_mode = TYPE_MODE (unsigned_char_type_node);
3685
3686 if (TREE_CODE (val) != INTEGER_CST)
3687 {
3688 rtx val_rtx;
3689
3690 val_rtx = expand_normal (val);
3691 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3692
3693 /* Assume that we can memset by pieces if we can store
3694 * the coefficients by pieces (in the required modes).
3695 * We can't pass builtin_memset_gen_str as that emits RTL. */
3696 c = 1;
3697 if (tree_fits_uhwi_p (len)
3698 && can_store_by_pieces (tree_to_uhwi (len),
3699 builtin_memset_read_str, &c, dest_align,
3700 true))
3701 {
3702 val_rtx = force_reg (val_mode, val_rtx);
3703 store_by_pieces (dest_mem, tree_to_uhwi (len),
3704 builtin_memset_gen_str, val_rtx, dest_align,
3705 true, 0);
3706 }
3707 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3708 dest_align, expected_align,
3709 expected_size, min_size, max_size,
3710 probable_max_size))
3711 goto do_libcall;
3712
3713 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3714 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3715 return dest_mem;
3716 }
3717
3718 if (target_char_cast (val, &c))
3719 goto do_libcall;
3720
3721 if (c)
3722 {
3723 if (tree_fits_uhwi_p (len)
3724 && can_store_by_pieces (tree_to_uhwi (len),
3725 builtin_memset_read_str, &c, dest_align,
3726 true))
3727 store_by_pieces (dest_mem, tree_to_uhwi (len),
3728 builtin_memset_read_str, &c, dest_align, true, 0);
3729 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3730 gen_int_mode (c, val_mode),
3731 dest_align, expected_align,
3732 expected_size, min_size, max_size,
3733 probable_max_size))
3734 goto do_libcall;
3735
3736 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3737 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3738 return dest_mem;
3739 }
3740
3741 set_mem_align (dest_mem, dest_align);
3742 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3743 CALL_EXPR_TAILCALL (orig_exp)
3744 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3745 expected_align, expected_size,
3746 min_size, max_size,
3747 probable_max_size);
3748
3749 if (dest_addr == 0)
3750 {
3751 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3752 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3753 }
3754
3755 return dest_addr;
3756
3757 do_libcall:
3758 fndecl = get_callee_fndecl (orig_exp);
3759 fcode = DECL_FUNCTION_CODE (fndecl);
3760 if (fcode == BUILT_IN_MEMSET)
3761 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3762 dest, val, len);
3763 else if (fcode == BUILT_IN_BZERO)
3764 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3765 dest, len);
3766 else
3767 gcc_unreachable ();
3768 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3769 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3770 return expand_call (fn, target, target == const0_rtx);
3771 }
3772
3773 /* Expand expression EXP, which is a call to the bzero builtin. Return
3774 NULL_RTX if we failed the caller should emit a normal call. */
3775
3776 static rtx
3777 expand_builtin_bzero (tree exp)
3778 {
3779 tree dest, size;
3780 location_t loc = EXPR_LOCATION (exp);
3781
3782 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3783 return NULL_RTX;
3784
3785 dest = CALL_EXPR_ARG (exp, 0);
3786 size = CALL_EXPR_ARG (exp, 1);
3787
3788 /* New argument list transforming bzero(ptr x, int y) to
3789 memset(ptr x, int 0, size_t y). This is done this way
3790 so that if it isn't expanded inline, we fallback to
3791 calling bzero instead of memset. */
3792
3793 return expand_builtin_memset_args (dest, integer_zero_node,
3794 fold_convert_loc (loc,
3795 size_type_node, size),
3796 const0_rtx, VOIDmode, exp);
3797 }
3798
3799 /* Expand expression EXP, which is a call to the memcmp built-in function.
3800 Return NULL_RTX if we failed and the caller should emit a normal call,
3801 otherwise try to get the result in TARGET, if convenient (and in mode
3802 MODE, if that's convenient). */
3803
3804 static rtx
3805 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3806 ATTRIBUTE_UNUSED enum machine_mode mode)
3807 {
3808 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3809
3810 if (!validate_arglist (exp,
3811 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3812 return NULL_RTX;
3813
3814 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3815 implementing memcmp because it will stop if it encounters two
3816 zero bytes. */
3817 #if defined HAVE_cmpmemsi
3818 {
3819 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3820 rtx result;
3821 rtx insn;
3822 tree arg1 = CALL_EXPR_ARG (exp, 0);
3823 tree arg2 = CALL_EXPR_ARG (exp, 1);
3824 tree len = CALL_EXPR_ARG (exp, 2);
3825
3826 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3827 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3828 enum machine_mode insn_mode;
3829
3830 if (HAVE_cmpmemsi)
3831 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3832 else
3833 return NULL_RTX;
3834
3835 /* If we don't have POINTER_TYPE, call the function. */
3836 if (arg1_align == 0 || arg2_align == 0)
3837 return NULL_RTX;
3838
3839 /* Make a place to write the result of the instruction. */
3840 result = target;
3841 if (! (result != 0
3842 && REG_P (result) && GET_MODE (result) == insn_mode
3843 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3844 result = gen_reg_rtx (insn_mode);
3845
3846 arg1_rtx = get_memory_rtx (arg1, len);
3847 arg2_rtx = get_memory_rtx (arg2, len);
3848 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3849
3850 /* Set MEM_SIZE as appropriate. */
3851 if (CONST_INT_P (arg3_rtx))
3852 {
3853 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3854 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3855 }
3856
3857 if (HAVE_cmpmemsi)
3858 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3859 GEN_INT (MIN (arg1_align, arg2_align)));
3860 else
3861 gcc_unreachable ();
3862
3863 if (insn)
3864 emit_insn (insn);
3865 else
3866 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3867 TYPE_MODE (integer_type_node), 3,
3868 XEXP (arg1_rtx, 0), Pmode,
3869 XEXP (arg2_rtx, 0), Pmode,
3870 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3871 TYPE_UNSIGNED (sizetype)),
3872 TYPE_MODE (sizetype));
3873
3874 /* Return the value in the proper mode for this function. */
3875 mode = TYPE_MODE (TREE_TYPE (exp));
3876 if (GET_MODE (result) == mode)
3877 return result;
3878 else if (target != 0)
3879 {
3880 convert_move (target, result, 0);
3881 return target;
3882 }
3883 else
3884 return convert_to_mode (mode, result, 0);
3885 }
3886 #endif /* HAVE_cmpmemsi. */
3887
3888 return NULL_RTX;
3889 }
3890
3891 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3892 if we failed the caller should emit a normal call, otherwise try to get
3893 the result in TARGET, if convenient. */
3894
3895 static rtx
3896 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3897 {
3898 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3899 return NULL_RTX;
3900
3901 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3902 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3903 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3904 {
3905 rtx arg1_rtx, arg2_rtx;
3906 rtx result, insn = NULL_RTX;
3907 tree fndecl, fn;
3908 tree arg1 = CALL_EXPR_ARG (exp, 0);
3909 tree arg2 = CALL_EXPR_ARG (exp, 1);
3910
3911 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3912 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3913
3914 /* If we don't have POINTER_TYPE, call the function. */
3915 if (arg1_align == 0 || arg2_align == 0)
3916 return NULL_RTX;
3917
3918 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3919 arg1 = builtin_save_expr (arg1);
3920 arg2 = builtin_save_expr (arg2);
3921
3922 arg1_rtx = get_memory_rtx (arg1, NULL);
3923 arg2_rtx = get_memory_rtx (arg2, NULL);
3924
3925 #ifdef HAVE_cmpstrsi
3926 /* Try to call cmpstrsi. */
3927 if (HAVE_cmpstrsi)
3928 {
3929 enum machine_mode insn_mode
3930 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3931
3932 /* Make a place to write the result of the instruction. */
3933 result = target;
3934 if (! (result != 0
3935 && REG_P (result) && GET_MODE (result) == insn_mode
3936 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3937 result = gen_reg_rtx (insn_mode);
3938
3939 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3940 GEN_INT (MIN (arg1_align, arg2_align)));
3941 }
3942 #endif
3943 #ifdef HAVE_cmpstrnsi
3944 /* Try to determine at least one length and call cmpstrnsi. */
3945 if (!insn && HAVE_cmpstrnsi)
3946 {
3947 tree len;
3948 rtx arg3_rtx;
3949
3950 enum machine_mode insn_mode
3951 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3952 tree len1 = c_strlen (arg1, 1);
3953 tree len2 = c_strlen (arg2, 1);
3954
3955 if (len1)
3956 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3957 if (len2)
3958 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3959
3960 /* If we don't have a constant length for the first, use the length
3961 of the second, if we know it. We don't require a constant for
3962 this case; some cost analysis could be done if both are available
3963 but neither is constant. For now, assume they're equally cheap,
3964 unless one has side effects. If both strings have constant lengths,
3965 use the smaller. */
3966
3967 if (!len1)
3968 len = len2;
3969 else if (!len2)
3970 len = len1;
3971 else if (TREE_SIDE_EFFECTS (len1))
3972 len = len2;
3973 else if (TREE_SIDE_EFFECTS (len2))
3974 len = len1;
3975 else if (TREE_CODE (len1) != INTEGER_CST)
3976 len = len2;
3977 else if (TREE_CODE (len2) != INTEGER_CST)
3978 len = len1;
3979 else if (tree_int_cst_lt (len1, len2))
3980 len = len1;
3981 else
3982 len = len2;
3983
3984 /* If both arguments have side effects, we cannot optimize. */
3985 if (!len || TREE_SIDE_EFFECTS (len))
3986 goto do_libcall;
3987
3988 arg3_rtx = expand_normal (len);
3989
3990 /* Make a place to write the result of the instruction. */
3991 result = target;
3992 if (! (result != 0
3993 && REG_P (result) && GET_MODE (result) == insn_mode
3994 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3995 result = gen_reg_rtx (insn_mode);
3996
3997 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3998 GEN_INT (MIN (arg1_align, arg2_align)));
3999 }
4000 #endif
4001
4002 if (insn)
4003 {
4004 enum machine_mode mode;
4005 emit_insn (insn);
4006
4007 /* Return the value in the proper mode for this function. */
4008 mode = TYPE_MODE (TREE_TYPE (exp));
4009 if (GET_MODE (result) == mode)
4010 return result;
4011 if (target == 0)
4012 return convert_to_mode (mode, result, 0);
4013 convert_move (target, result, 0);
4014 return target;
4015 }
4016
4017 /* Expand the library call ourselves using a stabilized argument
4018 list to avoid re-evaluating the function's arguments twice. */
4019 #ifdef HAVE_cmpstrnsi
4020 do_libcall:
4021 #endif
4022 fndecl = get_callee_fndecl (exp);
4023 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4024 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4025 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4026 return expand_call (fn, target, target == const0_rtx);
4027 }
4028 #endif
4029 return NULL_RTX;
4030 }
4031
4032 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4033 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4034 the result in TARGET, if convenient. */
4035
4036 static rtx
4037 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4038 ATTRIBUTE_UNUSED enum machine_mode mode)
4039 {
4040 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4041
4042 if (!validate_arglist (exp,
4043 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4044 return NULL_RTX;
4045
4046 /* If c_strlen can determine an expression for one of the string
4047 lengths, and it doesn't have side effects, then emit cmpstrnsi
4048 using length MIN(strlen(string)+1, arg3). */
4049 #ifdef HAVE_cmpstrnsi
4050 if (HAVE_cmpstrnsi)
4051 {
4052 tree len, len1, len2;
4053 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4054 rtx result, insn;
4055 tree fndecl, fn;
4056 tree arg1 = CALL_EXPR_ARG (exp, 0);
4057 tree arg2 = CALL_EXPR_ARG (exp, 1);
4058 tree arg3 = CALL_EXPR_ARG (exp, 2);
4059
4060 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4061 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4062 enum machine_mode insn_mode
4063 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4064
4065 len1 = c_strlen (arg1, 1);
4066 len2 = c_strlen (arg2, 1);
4067
4068 if (len1)
4069 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4070 if (len2)
4071 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4072
4073 /* If we don't have a constant length for the first, use the length
4074 of the second, if we know it. We don't require a constant for
4075 this case; some cost analysis could be done if both are available
4076 but neither is constant. For now, assume they're equally cheap,
4077 unless one has side effects. If both strings have constant lengths,
4078 use the smaller. */
4079
4080 if (!len1)
4081 len = len2;
4082 else if (!len2)
4083 len = len1;
4084 else if (TREE_SIDE_EFFECTS (len1))
4085 len = len2;
4086 else if (TREE_SIDE_EFFECTS (len2))
4087 len = len1;
4088 else if (TREE_CODE (len1) != INTEGER_CST)
4089 len = len2;
4090 else if (TREE_CODE (len2) != INTEGER_CST)
4091 len = len1;
4092 else if (tree_int_cst_lt (len1, len2))
4093 len = len1;
4094 else
4095 len = len2;
4096
4097 /* If both arguments have side effects, we cannot optimize. */
4098 if (!len || TREE_SIDE_EFFECTS (len))
4099 return NULL_RTX;
4100
4101 /* The actual new length parameter is MIN(len,arg3). */
4102 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4103 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4104
4105 /* If we don't have POINTER_TYPE, call the function. */
4106 if (arg1_align == 0 || arg2_align == 0)
4107 return NULL_RTX;
4108
4109 /* Make a place to write the result of the instruction. */
4110 result = target;
4111 if (! (result != 0
4112 && REG_P (result) && GET_MODE (result) == insn_mode
4113 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4114 result = gen_reg_rtx (insn_mode);
4115
4116 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4117 arg1 = builtin_save_expr (arg1);
4118 arg2 = builtin_save_expr (arg2);
4119 len = builtin_save_expr (len);
4120
4121 arg1_rtx = get_memory_rtx (arg1, len);
4122 arg2_rtx = get_memory_rtx (arg2, len);
4123 arg3_rtx = expand_normal (len);
4124 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4125 GEN_INT (MIN (arg1_align, arg2_align)));
4126 if (insn)
4127 {
4128 emit_insn (insn);
4129
4130 /* Return the value in the proper mode for this function. */
4131 mode = TYPE_MODE (TREE_TYPE (exp));
4132 if (GET_MODE (result) == mode)
4133 return result;
4134 if (target == 0)
4135 return convert_to_mode (mode, result, 0);
4136 convert_move (target, result, 0);
4137 return target;
4138 }
4139
4140 /* Expand the library call ourselves using a stabilized argument
4141 list to avoid re-evaluating the function's arguments twice. */
4142 fndecl = get_callee_fndecl (exp);
4143 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4144 arg1, arg2, len);
4145 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4146 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4147 return expand_call (fn, target, target == const0_rtx);
4148 }
4149 #endif
4150 return NULL_RTX;
4151 }
4152
4153 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4154 if that's convenient. */
4155
4156 rtx
4157 expand_builtin_saveregs (void)
4158 {
4159 rtx val, seq;
4160
4161 /* Don't do __builtin_saveregs more than once in a function.
4162 Save the result of the first call and reuse it. */
4163 if (saveregs_value != 0)
4164 return saveregs_value;
4165
4166 /* When this function is called, it means that registers must be
4167 saved on entry to this function. So we migrate the call to the
4168 first insn of this function. */
4169
4170 start_sequence ();
4171
4172 /* Do whatever the machine needs done in this case. */
4173 val = targetm.calls.expand_builtin_saveregs ();
4174
4175 seq = get_insns ();
4176 end_sequence ();
4177
4178 saveregs_value = val;
4179
4180 /* Put the insns after the NOTE that starts the function. If this
4181 is inside a start_sequence, make the outer-level insn chain current, so
4182 the code is placed at the start of the function. */
4183 push_topmost_sequence ();
4184 emit_insn_after (seq, entry_of_function ());
4185 pop_topmost_sequence ();
4186
4187 return val;
4188 }
4189
4190 /* Expand a call to __builtin_next_arg. */
4191
4192 static rtx
4193 expand_builtin_next_arg (void)
4194 {
4195 /* Checking arguments is already done in fold_builtin_next_arg
4196 that must be called before this function. */
4197 return expand_binop (ptr_mode, add_optab,
4198 crtl->args.internal_arg_pointer,
4199 crtl->args.arg_offset_rtx,
4200 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4201 }
4202
4203 /* Make it easier for the backends by protecting the valist argument
4204 from multiple evaluations. */
4205
4206 static tree
4207 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4208 {
4209 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4210
4211 /* The current way of determining the type of valist is completely
4212 bogus. We should have the information on the va builtin instead. */
4213 if (!vatype)
4214 vatype = targetm.fn_abi_va_list (cfun->decl);
4215
4216 if (TREE_CODE (vatype) == ARRAY_TYPE)
4217 {
4218 if (TREE_SIDE_EFFECTS (valist))
4219 valist = save_expr (valist);
4220
4221 /* For this case, the backends will be expecting a pointer to
4222 vatype, but it's possible we've actually been given an array
4223 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4224 So fix it. */
4225 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4226 {
4227 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4228 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4229 }
4230 }
4231 else
4232 {
4233 tree pt = build_pointer_type (vatype);
4234
4235 if (! needs_lvalue)
4236 {
4237 if (! TREE_SIDE_EFFECTS (valist))
4238 return valist;
4239
4240 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4241 TREE_SIDE_EFFECTS (valist) = 1;
4242 }
4243
4244 if (TREE_SIDE_EFFECTS (valist))
4245 valist = save_expr (valist);
4246 valist = fold_build2_loc (loc, MEM_REF,
4247 vatype, valist, build_int_cst (pt, 0));
4248 }
4249
4250 return valist;
4251 }
4252
4253 /* The "standard" definition of va_list is void*. */
4254
4255 tree
4256 std_build_builtin_va_list (void)
4257 {
4258 return ptr_type_node;
4259 }
4260
4261 /* The "standard" abi va_list is va_list_type_node. */
4262
4263 tree
4264 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4265 {
4266 return va_list_type_node;
4267 }
4268
4269 /* The "standard" type of va_list is va_list_type_node. */
4270
4271 tree
4272 std_canonical_va_list_type (tree type)
4273 {
4274 tree wtype, htype;
4275
4276 if (INDIRECT_REF_P (type))
4277 type = TREE_TYPE (type);
4278 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4279 type = TREE_TYPE (type);
4280 wtype = va_list_type_node;
4281 htype = type;
4282 /* Treat structure va_list types. */
4283 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4284 htype = TREE_TYPE (htype);
4285 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4286 {
4287 /* If va_list is an array type, the argument may have decayed
4288 to a pointer type, e.g. by being passed to another function.
4289 In that case, unwrap both types so that we can compare the
4290 underlying records. */
4291 if (TREE_CODE (htype) == ARRAY_TYPE
4292 || POINTER_TYPE_P (htype))
4293 {
4294 wtype = TREE_TYPE (wtype);
4295 htype = TREE_TYPE (htype);
4296 }
4297 }
4298 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4299 return va_list_type_node;
4300
4301 return NULL_TREE;
4302 }
4303
4304 /* The "standard" implementation of va_start: just assign `nextarg' to
4305 the variable. */
4306
4307 void
4308 std_expand_builtin_va_start (tree valist, rtx nextarg)
4309 {
4310 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4311 convert_move (va_r, nextarg, 0);
4312 }
4313
4314 /* Expand EXP, a call to __builtin_va_start. */
4315
4316 static rtx
4317 expand_builtin_va_start (tree exp)
4318 {
4319 rtx nextarg;
4320 tree valist;
4321 location_t loc = EXPR_LOCATION (exp);
4322
4323 if (call_expr_nargs (exp) < 2)
4324 {
4325 error_at (loc, "too few arguments to function %<va_start%>");
4326 return const0_rtx;
4327 }
4328
4329 if (fold_builtin_next_arg (exp, true))
4330 return const0_rtx;
4331
4332 nextarg = expand_builtin_next_arg ();
4333 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4334
4335 if (targetm.expand_builtin_va_start)
4336 targetm.expand_builtin_va_start (valist, nextarg);
4337 else
4338 std_expand_builtin_va_start (valist, nextarg);
4339
4340 return const0_rtx;
4341 }
4342
4343 /* Expand EXP, a call to __builtin_va_end. */
4344
4345 static rtx
4346 expand_builtin_va_end (tree exp)
4347 {
4348 tree valist = CALL_EXPR_ARG (exp, 0);
4349
4350 /* Evaluate for side effects, if needed. I hate macros that don't
4351 do that. */
4352 if (TREE_SIDE_EFFECTS (valist))
4353 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4354
4355 return const0_rtx;
4356 }
4357
4358 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4359 builtin rather than just as an assignment in stdarg.h because of the
4360 nastiness of array-type va_list types. */
4361
4362 static rtx
4363 expand_builtin_va_copy (tree exp)
4364 {
4365 tree dst, src, t;
4366 location_t loc = EXPR_LOCATION (exp);
4367
4368 dst = CALL_EXPR_ARG (exp, 0);
4369 src = CALL_EXPR_ARG (exp, 1);
4370
4371 dst = stabilize_va_list_loc (loc, dst, 1);
4372 src = stabilize_va_list_loc (loc, src, 0);
4373
4374 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4375
4376 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4377 {
4378 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4379 TREE_SIDE_EFFECTS (t) = 1;
4380 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4381 }
4382 else
4383 {
4384 rtx dstb, srcb, size;
4385
4386 /* Evaluate to pointers. */
4387 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4388 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4389 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4390 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4391
4392 dstb = convert_memory_address (Pmode, dstb);
4393 srcb = convert_memory_address (Pmode, srcb);
4394
4395 /* "Dereference" to BLKmode memories. */
4396 dstb = gen_rtx_MEM (BLKmode, dstb);
4397 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4398 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4399 srcb = gen_rtx_MEM (BLKmode, srcb);
4400 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4401 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4402
4403 /* Copy. */
4404 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4405 }
4406
4407 return const0_rtx;
4408 }
4409
4410 /* Expand a call to one of the builtin functions __builtin_frame_address or
4411 __builtin_return_address. */
4412
4413 static rtx
4414 expand_builtin_frame_address (tree fndecl, tree exp)
4415 {
4416 /* The argument must be a nonnegative integer constant.
4417 It counts the number of frames to scan up the stack.
4418 The value is the return address saved in that frame. */
4419 if (call_expr_nargs (exp) == 0)
4420 /* Warning about missing arg was already issued. */
4421 return const0_rtx;
4422 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4423 {
4424 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4425 error ("invalid argument to %<__builtin_frame_address%>");
4426 else
4427 error ("invalid argument to %<__builtin_return_address%>");
4428 return const0_rtx;
4429 }
4430 else
4431 {
4432 rtx tem
4433 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4434 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4435
4436 /* Some ports cannot access arbitrary stack frames. */
4437 if (tem == NULL)
4438 {
4439 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4440 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4441 else
4442 warning (0, "unsupported argument to %<__builtin_return_address%>");
4443 return const0_rtx;
4444 }
4445
4446 /* For __builtin_frame_address, return what we've got. */
4447 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4448 return tem;
4449
4450 if (!REG_P (tem)
4451 && ! CONSTANT_P (tem))
4452 tem = copy_addr_to_reg (tem);
4453 return tem;
4454 }
4455 }
4456
4457 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4458 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4459 is the same as for allocate_dynamic_stack_space. */
4460
4461 static rtx
4462 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4463 {
4464 rtx op0;
4465 rtx result;
4466 bool valid_arglist;
4467 unsigned int align;
4468 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4469 == BUILT_IN_ALLOCA_WITH_ALIGN);
4470
4471 valid_arglist
4472 = (alloca_with_align
4473 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4474 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4475
4476 if (!valid_arglist)
4477 return NULL_RTX;
4478
4479 /* Compute the argument. */
4480 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4481
4482 /* Compute the alignment. */
4483 align = (alloca_with_align
4484 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4485 : BIGGEST_ALIGNMENT);
4486
4487 /* Allocate the desired space. */
4488 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4489 result = convert_memory_address (ptr_mode, result);
4490
4491 return result;
4492 }
4493
4494 /* Expand a call to bswap builtin in EXP.
4495 Return NULL_RTX if a normal call should be emitted rather than expanding the
4496 function in-line. If convenient, the result should be placed in TARGET.
4497 SUBTARGET may be used as the target for computing one of EXP's operands. */
4498
4499 static rtx
4500 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4501 rtx subtarget)
4502 {
4503 tree arg;
4504 rtx op0;
4505
4506 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4507 return NULL_RTX;
4508
4509 arg = CALL_EXPR_ARG (exp, 0);
4510 op0 = expand_expr (arg,
4511 subtarget && GET_MODE (subtarget) == target_mode
4512 ? subtarget : NULL_RTX,
4513 target_mode, EXPAND_NORMAL);
4514 if (GET_MODE (op0) != target_mode)
4515 op0 = convert_to_mode (target_mode, op0, 1);
4516
4517 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4518
4519 gcc_assert (target);
4520
4521 return convert_to_mode (target_mode, target, 1);
4522 }
4523
4524 /* Expand a call to a unary builtin in EXP.
4525 Return NULL_RTX if a normal call should be emitted rather than expanding the
4526 function in-line. If convenient, the result should be placed in TARGET.
4527 SUBTARGET may be used as the target for computing one of EXP's operands. */
4528
4529 static rtx
4530 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4531 rtx subtarget, optab op_optab)
4532 {
4533 rtx op0;
4534
4535 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4536 return NULL_RTX;
4537
4538 /* Compute the argument. */
4539 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4540 (subtarget
4541 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4542 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4543 VOIDmode, EXPAND_NORMAL);
4544 /* Compute op, into TARGET if possible.
4545 Set TARGET to wherever the result comes back. */
4546 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4547 op_optab, op0, target, op_optab != clrsb_optab);
4548 gcc_assert (target);
4549
4550 return convert_to_mode (target_mode, target, 0);
4551 }
4552
4553 /* Expand a call to __builtin_expect. We just return our argument
4554 as the builtin_expect semantic should've been already executed by
4555 tree branch prediction pass. */
4556
4557 static rtx
4558 expand_builtin_expect (tree exp, rtx target)
4559 {
4560 tree arg;
4561
4562 if (call_expr_nargs (exp) < 2)
4563 return const0_rtx;
4564 arg = CALL_EXPR_ARG (exp, 0);
4565
4566 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4567 /* When guessing was done, the hints should be already stripped away. */
4568 gcc_assert (!flag_guess_branch_prob
4569 || optimize == 0 || seen_error ());
4570 return target;
4571 }
4572
4573 /* Expand a call to __builtin_assume_aligned. We just return our first
4574 argument as the builtin_assume_aligned semantic should've been already
4575 executed by CCP. */
4576
4577 static rtx
4578 expand_builtin_assume_aligned (tree exp, rtx target)
4579 {
4580 if (call_expr_nargs (exp) < 2)
4581 return const0_rtx;
4582 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4583 EXPAND_NORMAL);
4584 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4585 && (call_expr_nargs (exp) < 3
4586 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4587 return target;
4588 }
4589
4590 void
4591 expand_builtin_trap (void)
4592 {
4593 #ifdef HAVE_trap
4594 if (HAVE_trap)
4595 {
4596 rtx insn = emit_insn (gen_trap ());
4597 /* For trap insns when not accumulating outgoing args force
4598 REG_ARGS_SIZE note to prevent crossjumping of calls with
4599 different args sizes. */
4600 if (!ACCUMULATE_OUTGOING_ARGS)
4601 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4602 }
4603 else
4604 #endif
4605 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4606 emit_barrier ();
4607 }
4608
4609 /* Expand a call to __builtin_unreachable. We do nothing except emit
4610 a barrier saying that control flow will not pass here.
4611
4612 It is the responsibility of the program being compiled to ensure
4613 that control flow does never reach __builtin_unreachable. */
4614 static void
4615 expand_builtin_unreachable (void)
4616 {
4617 emit_barrier ();
4618 }
4619
4620 /* Expand EXP, a call to fabs, fabsf or fabsl.
4621 Return NULL_RTX if a normal call should be emitted rather than expanding
4622 the function inline. If convenient, the result should be placed
4623 in TARGET. SUBTARGET may be used as the target for computing
4624 the operand. */
4625
4626 static rtx
4627 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4628 {
4629 enum machine_mode mode;
4630 tree arg;
4631 rtx op0;
4632
4633 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4634 return NULL_RTX;
4635
4636 arg = CALL_EXPR_ARG (exp, 0);
4637 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4638 mode = TYPE_MODE (TREE_TYPE (arg));
4639 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4640 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4641 }
4642
4643 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4644 Return NULL is a normal call should be emitted rather than expanding the
4645 function inline. If convenient, the result should be placed in TARGET.
4646 SUBTARGET may be used as the target for computing the operand. */
4647
4648 static rtx
4649 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4650 {
4651 rtx op0, op1;
4652 tree arg;
4653
4654 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4655 return NULL_RTX;
4656
4657 arg = CALL_EXPR_ARG (exp, 0);
4658 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4659
4660 arg = CALL_EXPR_ARG (exp, 1);
4661 op1 = expand_normal (arg);
4662
4663 return expand_copysign (op0, op1, target);
4664 }
4665
4666 /* Expand a call to __builtin___clear_cache. */
4667
4668 static rtx
4669 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4670 {
4671 #ifndef HAVE_clear_cache
4672 #ifdef CLEAR_INSN_CACHE
4673 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4674 does something. Just do the default expansion to a call to
4675 __clear_cache(). */
4676 return NULL_RTX;
4677 #else
4678 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4679 does nothing. There is no need to call it. Do nothing. */
4680 return const0_rtx;
4681 #endif /* CLEAR_INSN_CACHE */
4682 #else
4683 /* We have a "clear_cache" insn, and it will handle everything. */
4684 tree begin, end;
4685 rtx begin_rtx, end_rtx;
4686
4687 /* We must not expand to a library call. If we did, any
4688 fallback library function in libgcc that might contain a call to
4689 __builtin___clear_cache() would recurse infinitely. */
4690 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4691 {
4692 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4693 return const0_rtx;
4694 }
4695
4696 if (HAVE_clear_cache)
4697 {
4698 struct expand_operand ops[2];
4699
4700 begin = CALL_EXPR_ARG (exp, 0);
4701 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4702
4703 end = CALL_EXPR_ARG (exp, 1);
4704 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4705
4706 create_address_operand (&ops[0], begin_rtx);
4707 create_address_operand (&ops[1], end_rtx);
4708 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4709 return const0_rtx;
4710 }
4711 return const0_rtx;
4712 #endif /* HAVE_clear_cache */
4713 }
4714
4715 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4716
4717 static rtx
4718 round_trampoline_addr (rtx tramp)
4719 {
4720 rtx temp, addend, mask;
4721
4722 /* If we don't need too much alignment, we'll have been guaranteed
4723 proper alignment by get_trampoline_type. */
4724 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4725 return tramp;
4726
4727 /* Round address up to desired boundary. */
4728 temp = gen_reg_rtx (Pmode);
4729 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4730 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4731
4732 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4733 temp, 0, OPTAB_LIB_WIDEN);
4734 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4735 temp, 0, OPTAB_LIB_WIDEN);
4736
4737 return tramp;
4738 }
4739
4740 static rtx
4741 expand_builtin_init_trampoline (tree exp, bool onstack)
4742 {
4743 tree t_tramp, t_func, t_chain;
4744 rtx m_tramp, r_tramp, r_chain, tmp;
4745
4746 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4747 POINTER_TYPE, VOID_TYPE))
4748 return NULL_RTX;
4749
4750 t_tramp = CALL_EXPR_ARG (exp, 0);
4751 t_func = CALL_EXPR_ARG (exp, 1);
4752 t_chain = CALL_EXPR_ARG (exp, 2);
4753
4754 r_tramp = expand_normal (t_tramp);
4755 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4756 MEM_NOTRAP_P (m_tramp) = 1;
4757
4758 /* If ONSTACK, the TRAMP argument should be the address of a field
4759 within the local function's FRAME decl. Either way, let's see if
4760 we can fill in the MEM_ATTRs for this memory. */
4761 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4762 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4763
4764 /* Creator of a heap trampoline is responsible for making sure the
4765 address is aligned to at least STACK_BOUNDARY. Normally malloc
4766 will ensure this anyhow. */
4767 tmp = round_trampoline_addr (r_tramp);
4768 if (tmp != r_tramp)
4769 {
4770 m_tramp = change_address (m_tramp, BLKmode, tmp);
4771 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4772 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4773 }
4774
4775 /* The FUNC argument should be the address of the nested function.
4776 Extract the actual function decl to pass to the hook. */
4777 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4778 t_func = TREE_OPERAND (t_func, 0);
4779 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4780
4781 r_chain = expand_normal (t_chain);
4782
4783 /* Generate insns to initialize the trampoline. */
4784 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4785
4786 if (onstack)
4787 {
4788 trampolines_created = 1;
4789
4790 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4791 "trampoline generated for nested function %qD", t_func);
4792 }
4793
4794 return const0_rtx;
4795 }
4796
4797 static rtx
4798 expand_builtin_adjust_trampoline (tree exp)
4799 {
4800 rtx tramp;
4801
4802 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4803 return NULL_RTX;
4804
4805 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4806 tramp = round_trampoline_addr (tramp);
4807 if (targetm.calls.trampoline_adjust_address)
4808 tramp = targetm.calls.trampoline_adjust_address (tramp);
4809
4810 return tramp;
4811 }
4812
4813 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4814 function. The function first checks whether the back end provides
4815 an insn to implement signbit for the respective mode. If not, it
4816 checks whether the floating point format of the value is such that
4817 the sign bit can be extracted. If that is not the case, the
4818 function returns NULL_RTX to indicate that a normal call should be
4819 emitted rather than expanding the function in-line. EXP is the
4820 expression that is a call to the builtin function; if convenient,
4821 the result should be placed in TARGET. */
4822 static rtx
4823 expand_builtin_signbit (tree exp, rtx target)
4824 {
4825 const struct real_format *fmt;
4826 enum machine_mode fmode, imode, rmode;
4827 tree arg;
4828 int word, bitpos;
4829 enum insn_code icode;
4830 rtx temp;
4831 location_t loc = EXPR_LOCATION (exp);
4832
4833 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4834 return NULL_RTX;
4835
4836 arg = CALL_EXPR_ARG (exp, 0);
4837 fmode = TYPE_MODE (TREE_TYPE (arg));
4838 rmode = TYPE_MODE (TREE_TYPE (exp));
4839 fmt = REAL_MODE_FORMAT (fmode);
4840
4841 arg = builtin_save_expr (arg);
4842
4843 /* Expand the argument yielding a RTX expression. */
4844 temp = expand_normal (arg);
4845
4846 /* Check if the back end provides an insn that handles signbit for the
4847 argument's mode. */
4848 icode = optab_handler (signbit_optab, fmode);
4849 if (icode != CODE_FOR_nothing)
4850 {
4851 rtx last = get_last_insn ();
4852 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4853 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4854 return target;
4855 delete_insns_since (last);
4856 }
4857
4858 /* For floating point formats without a sign bit, implement signbit
4859 as "ARG < 0.0". */
4860 bitpos = fmt->signbit_ro;
4861 if (bitpos < 0)
4862 {
4863 /* But we can't do this if the format supports signed zero. */
4864 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4865 return NULL_RTX;
4866
4867 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4868 build_real (TREE_TYPE (arg), dconst0));
4869 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4870 }
4871
4872 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4873 {
4874 imode = int_mode_for_mode (fmode);
4875 if (imode == BLKmode)
4876 return NULL_RTX;
4877 temp = gen_lowpart (imode, temp);
4878 }
4879 else
4880 {
4881 imode = word_mode;
4882 /* Handle targets with different FP word orders. */
4883 if (FLOAT_WORDS_BIG_ENDIAN)
4884 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4885 else
4886 word = bitpos / BITS_PER_WORD;
4887 temp = operand_subword_force (temp, word, fmode);
4888 bitpos = bitpos % BITS_PER_WORD;
4889 }
4890
4891 /* Force the intermediate word_mode (or narrower) result into a
4892 register. This avoids attempting to create paradoxical SUBREGs
4893 of floating point modes below. */
4894 temp = force_reg (imode, temp);
4895
4896 /* If the bitpos is within the "result mode" lowpart, the operation
4897 can be implement with a single bitwise AND. Otherwise, we need
4898 a right shift and an AND. */
4899
4900 if (bitpos < GET_MODE_BITSIZE (rmode))
4901 {
4902 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4903
4904 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4905 temp = gen_lowpart (rmode, temp);
4906 temp = expand_binop (rmode, and_optab, temp,
4907 immed_wide_int_const (mask, rmode),
4908 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4909 }
4910 else
4911 {
4912 /* Perform a logical right shift to place the signbit in the least
4913 significant bit, then truncate the result to the desired mode
4914 and mask just this bit. */
4915 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4916 temp = gen_lowpart (rmode, temp);
4917 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4918 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4919 }
4920
4921 return temp;
4922 }
4923
4924 /* Expand fork or exec calls. TARGET is the desired target of the
4925 call. EXP is the call. FN is the
4926 identificator of the actual function. IGNORE is nonzero if the
4927 value is to be ignored. */
4928
4929 static rtx
4930 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4931 {
4932 tree id, decl;
4933 tree call;
4934
4935 /* If we are not profiling, just call the function. */
4936 if (!profile_arc_flag)
4937 return NULL_RTX;
4938
4939 /* Otherwise call the wrapper. This should be equivalent for the rest of
4940 compiler, so the code does not diverge, and the wrapper may run the
4941 code necessary for keeping the profiling sane. */
4942
4943 switch (DECL_FUNCTION_CODE (fn))
4944 {
4945 case BUILT_IN_FORK:
4946 id = get_identifier ("__gcov_fork");
4947 break;
4948
4949 case BUILT_IN_EXECL:
4950 id = get_identifier ("__gcov_execl");
4951 break;
4952
4953 case BUILT_IN_EXECV:
4954 id = get_identifier ("__gcov_execv");
4955 break;
4956
4957 case BUILT_IN_EXECLP:
4958 id = get_identifier ("__gcov_execlp");
4959 break;
4960
4961 case BUILT_IN_EXECLE:
4962 id = get_identifier ("__gcov_execle");
4963 break;
4964
4965 case BUILT_IN_EXECVP:
4966 id = get_identifier ("__gcov_execvp");
4967 break;
4968
4969 case BUILT_IN_EXECVE:
4970 id = get_identifier ("__gcov_execve");
4971 break;
4972
4973 default:
4974 gcc_unreachable ();
4975 }
4976
4977 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4978 FUNCTION_DECL, id, TREE_TYPE (fn));
4979 DECL_EXTERNAL (decl) = 1;
4980 TREE_PUBLIC (decl) = 1;
4981 DECL_ARTIFICIAL (decl) = 1;
4982 TREE_NOTHROW (decl) = 1;
4983 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4984 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4985 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4986 return expand_call (call, target, ignore);
4987 }
4988
4989
4990 \f
4991 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4992 the pointer in these functions is void*, the tree optimizers may remove
4993 casts. The mode computed in expand_builtin isn't reliable either, due
4994 to __sync_bool_compare_and_swap.
4995
4996 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4997 group of builtins. This gives us log2 of the mode size. */
4998
4999 static inline enum machine_mode
5000 get_builtin_sync_mode (int fcode_diff)
5001 {
5002 /* The size is not negotiable, so ask not to get BLKmode in return
5003 if the target indicates that a smaller size would be better. */
5004 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5005 }
5006
5007 /* Expand the memory expression LOC and return the appropriate memory operand
5008 for the builtin_sync operations. */
5009
5010 static rtx
5011 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5012 {
5013 rtx addr, mem;
5014
5015 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5016 addr = convert_memory_address (Pmode, addr);
5017
5018 /* Note that we explicitly do not want any alias information for this
5019 memory, so that we kill all other live memories. Otherwise we don't
5020 satisfy the full barrier semantics of the intrinsic. */
5021 mem = validize_mem (gen_rtx_MEM (mode, addr));
5022
5023 /* The alignment needs to be at least according to that of the mode. */
5024 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5025 get_pointer_alignment (loc)));
5026 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5027 MEM_VOLATILE_P (mem) = 1;
5028
5029 return mem;
5030 }
5031
5032 /* Make sure an argument is in the right mode.
5033 EXP is the tree argument.
5034 MODE is the mode it should be in. */
5035
5036 static rtx
5037 expand_expr_force_mode (tree exp, enum machine_mode mode)
5038 {
5039 rtx val;
5040 enum machine_mode old_mode;
5041
5042 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5043 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5044 of CONST_INTs, where we know the old_mode only from the call argument. */
5045
5046 old_mode = GET_MODE (val);
5047 if (old_mode == VOIDmode)
5048 old_mode = TYPE_MODE (TREE_TYPE (exp));
5049 val = convert_modes (mode, old_mode, val, 1);
5050 return val;
5051 }
5052
5053
5054 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5055 EXP is the CALL_EXPR. CODE is the rtx code
5056 that corresponds to the arithmetic or logical operation from the name;
5057 an exception here is that NOT actually means NAND. TARGET is an optional
5058 place for us to store the results; AFTER is true if this is the
5059 fetch_and_xxx form. */
5060
5061 static rtx
5062 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5063 enum rtx_code code, bool after,
5064 rtx target)
5065 {
5066 rtx val, mem;
5067 location_t loc = EXPR_LOCATION (exp);
5068
5069 if (code == NOT && warn_sync_nand)
5070 {
5071 tree fndecl = get_callee_fndecl (exp);
5072 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5073
5074 static bool warned_f_a_n, warned_n_a_f;
5075
5076 switch (fcode)
5077 {
5078 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5079 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5080 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5081 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5082 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5083 if (warned_f_a_n)
5084 break;
5085
5086 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5087 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5088 warned_f_a_n = true;
5089 break;
5090
5091 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5092 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5093 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5094 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5095 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5096 if (warned_n_a_f)
5097 break;
5098
5099 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5100 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5101 warned_n_a_f = true;
5102 break;
5103
5104 default:
5105 gcc_unreachable ();
5106 }
5107 }
5108
5109 /* Expand the operands. */
5110 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5111 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5112
5113 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5114 after);
5115 }
5116
5117 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5118 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5119 true if this is the boolean form. TARGET is a place for us to store the
5120 results; this is NOT optional if IS_BOOL is true. */
5121
5122 static rtx
5123 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5124 bool is_bool, rtx target)
5125 {
5126 rtx old_val, new_val, mem;
5127 rtx *pbool, *poval;
5128
5129 /* Expand the operands. */
5130 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5131 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5132 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5133
5134 pbool = poval = NULL;
5135 if (target != const0_rtx)
5136 {
5137 if (is_bool)
5138 pbool = &target;
5139 else
5140 poval = &target;
5141 }
5142 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5143 false, MEMMODEL_SEQ_CST,
5144 MEMMODEL_SEQ_CST))
5145 return NULL_RTX;
5146
5147 return target;
5148 }
5149
5150 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5151 general form is actually an atomic exchange, and some targets only
5152 support a reduced form with the second argument being a constant 1.
5153 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5154 the results. */
5155
5156 static rtx
5157 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5158 rtx target)
5159 {
5160 rtx val, mem;
5161
5162 /* Expand the operands. */
5163 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5164 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5165
5166 return expand_sync_lock_test_and_set (target, mem, val);
5167 }
5168
5169 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5170
5171 static void
5172 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5173 {
5174 rtx mem;
5175
5176 /* Expand the operands. */
5177 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5178
5179 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5180 }
5181
5182 /* Given an integer representing an ``enum memmodel'', verify its
5183 correctness and return the memory model enum. */
5184
5185 static enum memmodel
5186 get_memmodel (tree exp)
5187 {
5188 rtx op;
5189 unsigned HOST_WIDE_INT val;
5190
5191 /* If the parameter is not a constant, it's a run time value so we'll just
5192 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5193 if (TREE_CODE (exp) != INTEGER_CST)
5194 return MEMMODEL_SEQ_CST;
5195
5196 op = expand_normal (exp);
5197
5198 val = INTVAL (op);
5199 if (targetm.memmodel_check)
5200 val = targetm.memmodel_check (val);
5201 else if (val & ~MEMMODEL_MASK)
5202 {
5203 warning (OPT_Winvalid_memory_model,
5204 "Unknown architecture specifier in memory model to builtin.");
5205 return MEMMODEL_SEQ_CST;
5206 }
5207
5208 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5209 {
5210 warning (OPT_Winvalid_memory_model,
5211 "invalid memory model argument to builtin");
5212 return MEMMODEL_SEQ_CST;
5213 }
5214
5215 return (enum memmodel) val;
5216 }
5217
5218 /* Expand the __atomic_exchange intrinsic:
5219 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5220 EXP is the CALL_EXPR.
5221 TARGET is an optional place for us to store the results. */
5222
5223 static rtx
5224 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5225 {
5226 rtx val, mem;
5227 enum memmodel model;
5228
5229 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5230 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5231 {
5232 error ("invalid memory model for %<__atomic_exchange%>");
5233 return NULL_RTX;
5234 }
5235
5236 if (!flag_inline_atomics)
5237 return NULL_RTX;
5238
5239 /* Expand the operands. */
5240 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5241 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5242
5243 return expand_atomic_exchange (target, mem, val, model);
5244 }
5245
5246 /* Expand the __atomic_compare_exchange intrinsic:
5247 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5248 TYPE desired, BOOL weak,
5249 enum memmodel success,
5250 enum memmodel failure)
5251 EXP is the CALL_EXPR.
5252 TARGET is an optional place for us to store the results. */
5253
5254 static rtx
5255 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5256 rtx target)
5257 {
5258 rtx expect, desired, mem, oldval, label;
5259 enum memmodel success, failure;
5260 tree weak;
5261 bool is_weak;
5262
5263 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5264 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5265
5266 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5267 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5268 {
5269 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5270 return NULL_RTX;
5271 }
5272
5273 if (failure > success)
5274 {
5275 error ("failure memory model cannot be stronger than success "
5276 "memory model for %<__atomic_compare_exchange%>");
5277 return NULL_RTX;
5278 }
5279
5280 if (!flag_inline_atomics)
5281 return NULL_RTX;
5282
5283 /* Expand the operands. */
5284 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5285
5286 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5287 expect = convert_memory_address (Pmode, expect);
5288 expect = gen_rtx_MEM (mode, expect);
5289 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5290
5291 weak = CALL_EXPR_ARG (exp, 3);
5292 is_weak = false;
5293 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5294 is_weak = true;
5295
5296 if (target == const0_rtx)
5297 target = NULL;
5298
5299 /* Lest the rtl backend create a race condition with an imporoper store
5300 to memory, always create a new pseudo for OLDVAL. */
5301 oldval = NULL;
5302
5303 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5304 is_weak, success, failure))
5305 return NULL_RTX;
5306
5307 /* Conditionally store back to EXPECT, lest we create a race condition
5308 with an improper store to memory. */
5309 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5310 the normal case where EXPECT is totally private, i.e. a register. At
5311 which point the store can be unconditional. */
5312 label = gen_label_rtx ();
5313 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5314 emit_move_insn (expect, oldval);
5315 emit_label (label);
5316
5317 return target;
5318 }
5319
5320 /* Expand the __atomic_load intrinsic:
5321 TYPE __atomic_load (TYPE *object, enum memmodel)
5322 EXP is the CALL_EXPR.
5323 TARGET is an optional place for us to store the results. */
5324
5325 static rtx
5326 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5327 {
5328 rtx mem;
5329 enum memmodel model;
5330
5331 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5332 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5333 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5334 {
5335 error ("invalid memory model for %<__atomic_load%>");
5336 return NULL_RTX;
5337 }
5338
5339 if (!flag_inline_atomics)
5340 return NULL_RTX;
5341
5342 /* Expand the operand. */
5343 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5344
5345 return expand_atomic_load (target, mem, model);
5346 }
5347
5348
5349 /* Expand the __atomic_store intrinsic:
5350 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5351 EXP is the CALL_EXPR.
5352 TARGET is an optional place for us to store the results. */
5353
5354 static rtx
5355 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5356 {
5357 rtx mem, val;
5358 enum memmodel model;
5359
5360 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5361 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5362 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5363 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5364 {
5365 error ("invalid memory model for %<__atomic_store%>");
5366 return NULL_RTX;
5367 }
5368
5369 if (!flag_inline_atomics)
5370 return NULL_RTX;
5371
5372 /* Expand the operands. */
5373 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5374 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5375
5376 return expand_atomic_store (mem, val, model, false);
5377 }
5378
5379 /* Expand the __atomic_fetch_XXX intrinsic:
5380 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5381 EXP is the CALL_EXPR.
5382 TARGET is an optional place for us to store the results.
5383 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5384 FETCH_AFTER is true if returning the result of the operation.
5385 FETCH_AFTER is false if returning the value before the operation.
5386 IGNORE is true if the result is not used.
5387 EXT_CALL is the correct builtin for an external call if this cannot be
5388 resolved to an instruction sequence. */
5389
5390 static rtx
5391 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5392 enum rtx_code code, bool fetch_after,
5393 bool ignore, enum built_in_function ext_call)
5394 {
5395 rtx val, mem, ret;
5396 enum memmodel model;
5397 tree fndecl;
5398 tree addr;
5399
5400 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5401
5402 /* Expand the operands. */
5403 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5404 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5405
5406 /* Only try generating instructions if inlining is turned on. */
5407 if (flag_inline_atomics)
5408 {
5409 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5410 if (ret)
5411 return ret;
5412 }
5413
5414 /* Return if a different routine isn't needed for the library call. */
5415 if (ext_call == BUILT_IN_NONE)
5416 return NULL_RTX;
5417
5418 /* Change the call to the specified function. */
5419 fndecl = get_callee_fndecl (exp);
5420 addr = CALL_EXPR_FN (exp);
5421 STRIP_NOPS (addr);
5422
5423 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5424 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5425
5426 /* Expand the call here so we can emit trailing code. */
5427 ret = expand_call (exp, target, ignore);
5428
5429 /* Replace the original function just in case it matters. */
5430 TREE_OPERAND (addr, 0) = fndecl;
5431
5432 /* Then issue the arithmetic correction to return the right result. */
5433 if (!ignore)
5434 {
5435 if (code == NOT)
5436 {
5437 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5438 OPTAB_LIB_WIDEN);
5439 ret = expand_simple_unop (mode, NOT, ret, target, true);
5440 }
5441 else
5442 ret = expand_simple_binop (mode, code, ret, val, target, true,
5443 OPTAB_LIB_WIDEN);
5444 }
5445 return ret;
5446 }
5447
5448
5449 #ifndef HAVE_atomic_clear
5450 # define HAVE_atomic_clear 0
5451 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5452 #endif
5453
5454 /* Expand an atomic clear operation.
5455 void _atomic_clear (BOOL *obj, enum memmodel)
5456 EXP is the call expression. */
5457
5458 static rtx
5459 expand_builtin_atomic_clear (tree exp)
5460 {
5461 enum machine_mode mode;
5462 rtx mem, ret;
5463 enum memmodel model;
5464
5465 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5466 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5467 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5468
5469 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5470 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5471 {
5472 error ("invalid memory model for %<__atomic_store%>");
5473 return const0_rtx;
5474 }
5475
5476 if (HAVE_atomic_clear)
5477 {
5478 emit_insn (gen_atomic_clear (mem, model));
5479 return const0_rtx;
5480 }
5481
5482 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5483 Failing that, a store is issued by __atomic_store. The only way this can
5484 fail is if the bool type is larger than a word size. Unlikely, but
5485 handle it anyway for completeness. Assume a single threaded model since
5486 there is no atomic support in this case, and no barriers are required. */
5487 ret = expand_atomic_store (mem, const0_rtx, model, true);
5488 if (!ret)
5489 emit_move_insn (mem, const0_rtx);
5490 return const0_rtx;
5491 }
5492
5493 /* Expand an atomic test_and_set operation.
5494 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5495 EXP is the call expression. */
5496
5497 static rtx
5498 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5499 {
5500 rtx mem;
5501 enum memmodel model;
5502 enum machine_mode mode;
5503
5504 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5505 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5506 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5507
5508 return expand_atomic_test_and_set (target, mem, model);
5509 }
5510
5511
5512 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5513 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5514
5515 static tree
5516 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5517 {
5518 int size;
5519 enum machine_mode mode;
5520 unsigned int mode_align, type_align;
5521
5522 if (TREE_CODE (arg0) != INTEGER_CST)
5523 return NULL_TREE;
5524
5525 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5526 mode = mode_for_size (size, MODE_INT, 0);
5527 mode_align = GET_MODE_ALIGNMENT (mode);
5528
5529 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5530 type_align = mode_align;
5531 else
5532 {
5533 tree ttype = TREE_TYPE (arg1);
5534
5535 /* This function is usually invoked and folded immediately by the front
5536 end before anything else has a chance to look at it. The pointer
5537 parameter at this point is usually cast to a void *, so check for that
5538 and look past the cast. */
5539 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5540 && VOID_TYPE_P (TREE_TYPE (ttype)))
5541 arg1 = TREE_OPERAND (arg1, 0);
5542
5543 ttype = TREE_TYPE (arg1);
5544 gcc_assert (POINTER_TYPE_P (ttype));
5545
5546 /* Get the underlying type of the object. */
5547 ttype = TREE_TYPE (ttype);
5548 type_align = TYPE_ALIGN (ttype);
5549 }
5550
5551 /* If the object has smaller alignment, the the lock free routines cannot
5552 be used. */
5553 if (type_align < mode_align)
5554 return boolean_false_node;
5555
5556 /* Check if a compare_and_swap pattern exists for the mode which represents
5557 the required size. The pattern is not allowed to fail, so the existence
5558 of the pattern indicates support is present. */
5559 if (can_compare_and_swap_p (mode, true))
5560 return boolean_true_node;
5561 else
5562 return boolean_false_node;
5563 }
5564
5565 /* Return true if the parameters to call EXP represent an object which will
5566 always generate lock free instructions. The first argument represents the
5567 size of the object, and the second parameter is a pointer to the object
5568 itself. If NULL is passed for the object, then the result is based on
5569 typical alignment for an object of the specified size. Otherwise return
5570 false. */
5571
5572 static rtx
5573 expand_builtin_atomic_always_lock_free (tree exp)
5574 {
5575 tree size;
5576 tree arg0 = CALL_EXPR_ARG (exp, 0);
5577 tree arg1 = CALL_EXPR_ARG (exp, 1);
5578
5579 if (TREE_CODE (arg0) != INTEGER_CST)
5580 {
5581 error ("non-constant argument 1 to __atomic_always_lock_free");
5582 return const0_rtx;
5583 }
5584
5585 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5586 if (size == boolean_true_node)
5587 return const1_rtx;
5588 return const0_rtx;
5589 }
5590
5591 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5592 is lock free on this architecture. */
5593
5594 static tree
5595 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5596 {
5597 if (!flag_inline_atomics)
5598 return NULL_TREE;
5599
5600 /* If it isn't always lock free, don't generate a result. */
5601 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5602 return boolean_true_node;
5603
5604 return NULL_TREE;
5605 }
5606
5607 /* Return true if the parameters to call EXP represent an object which will
5608 always generate lock free instructions. The first argument represents the
5609 size of the object, and the second parameter is a pointer to the object
5610 itself. If NULL is passed for the object, then the result is based on
5611 typical alignment for an object of the specified size. Otherwise return
5612 NULL*/
5613
5614 static rtx
5615 expand_builtin_atomic_is_lock_free (tree exp)
5616 {
5617 tree size;
5618 tree arg0 = CALL_EXPR_ARG (exp, 0);
5619 tree arg1 = CALL_EXPR_ARG (exp, 1);
5620
5621 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5622 {
5623 error ("non-integer argument 1 to __atomic_is_lock_free");
5624 return NULL_RTX;
5625 }
5626
5627 if (!flag_inline_atomics)
5628 return NULL_RTX;
5629
5630 /* If the value is known at compile time, return the RTX for it. */
5631 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5632 if (size == boolean_true_node)
5633 return const1_rtx;
5634
5635 return NULL_RTX;
5636 }
5637
5638 /* Expand the __atomic_thread_fence intrinsic:
5639 void __atomic_thread_fence (enum memmodel)
5640 EXP is the CALL_EXPR. */
5641
5642 static void
5643 expand_builtin_atomic_thread_fence (tree exp)
5644 {
5645 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5646 expand_mem_thread_fence (model);
5647 }
5648
5649 /* Expand the __atomic_signal_fence intrinsic:
5650 void __atomic_signal_fence (enum memmodel)
5651 EXP is the CALL_EXPR. */
5652
5653 static void
5654 expand_builtin_atomic_signal_fence (tree exp)
5655 {
5656 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5657 expand_mem_signal_fence (model);
5658 }
5659
5660 /* Expand the __sync_synchronize intrinsic. */
5661
5662 static void
5663 expand_builtin_sync_synchronize (void)
5664 {
5665 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5666 }
5667
5668 static rtx
5669 expand_builtin_thread_pointer (tree exp, rtx target)
5670 {
5671 enum insn_code icode;
5672 if (!validate_arglist (exp, VOID_TYPE))
5673 return const0_rtx;
5674 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5675 if (icode != CODE_FOR_nothing)
5676 {
5677 struct expand_operand op;
5678 /* If the target is not sutitable then create a new target. */
5679 if (target == NULL_RTX
5680 || !REG_P (target)
5681 || GET_MODE (target) != Pmode)
5682 target = gen_reg_rtx (Pmode);
5683 create_output_operand (&op, target, Pmode);
5684 expand_insn (icode, 1, &op);
5685 return target;
5686 }
5687 error ("__builtin_thread_pointer is not supported on this target");
5688 return const0_rtx;
5689 }
5690
5691 static void
5692 expand_builtin_set_thread_pointer (tree exp)
5693 {
5694 enum insn_code icode;
5695 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5696 return;
5697 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5698 if (icode != CODE_FOR_nothing)
5699 {
5700 struct expand_operand op;
5701 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5702 Pmode, EXPAND_NORMAL);
5703 create_input_operand (&op, val, Pmode);
5704 expand_insn (icode, 1, &op);
5705 return;
5706 }
5707 error ("__builtin_set_thread_pointer is not supported on this target");
5708 }
5709
5710 \f
5711 /* Emit code to restore the current value of stack. */
5712
5713 static void
5714 expand_stack_restore (tree var)
5715 {
5716 rtx prev, sa = expand_normal (var);
5717
5718 sa = convert_memory_address (Pmode, sa);
5719
5720 prev = get_last_insn ();
5721 emit_stack_restore (SAVE_BLOCK, sa);
5722 fixup_args_size_notes (prev, get_last_insn (), 0);
5723 }
5724
5725
5726 /* Emit code to save the current value of stack. */
5727
5728 static rtx
5729 expand_stack_save (void)
5730 {
5731 rtx ret = NULL_RTX;
5732
5733 do_pending_stack_adjust ();
5734 emit_stack_save (SAVE_BLOCK, &ret);
5735 return ret;
5736 }
5737
5738 /* Expand an expression EXP that calls a built-in function,
5739 with result going to TARGET if that's convenient
5740 (and in mode MODE if that's convenient).
5741 SUBTARGET may be used as the target for computing one of EXP's operands.
5742 IGNORE is nonzero if the value is to be ignored. */
5743
5744 rtx
5745 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5746 int ignore)
5747 {
5748 tree fndecl = get_callee_fndecl (exp);
5749 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5750 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5751 int flags;
5752
5753 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5754 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5755
5756 /* When not optimizing, generate calls to library functions for a certain
5757 set of builtins. */
5758 if (!optimize
5759 && !called_as_built_in (fndecl)
5760 && fcode != BUILT_IN_FORK
5761 && fcode != BUILT_IN_EXECL
5762 && fcode != BUILT_IN_EXECV
5763 && fcode != BUILT_IN_EXECLP
5764 && fcode != BUILT_IN_EXECLE
5765 && fcode != BUILT_IN_EXECVP
5766 && fcode != BUILT_IN_EXECVE
5767 && fcode != BUILT_IN_ALLOCA
5768 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5769 && fcode != BUILT_IN_FREE)
5770 return expand_call (exp, target, ignore);
5771
5772 /* The built-in function expanders test for target == const0_rtx
5773 to determine whether the function's result will be ignored. */
5774 if (ignore)
5775 target = const0_rtx;
5776
5777 /* If the result of a pure or const built-in function is ignored, and
5778 none of its arguments are volatile, we can avoid expanding the
5779 built-in call and just evaluate the arguments for side-effects. */
5780 if (target == const0_rtx
5781 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5782 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5783 {
5784 bool volatilep = false;
5785 tree arg;
5786 call_expr_arg_iterator iter;
5787
5788 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5789 if (TREE_THIS_VOLATILE (arg))
5790 {
5791 volatilep = true;
5792 break;
5793 }
5794
5795 if (! volatilep)
5796 {
5797 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5798 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5799 return const0_rtx;
5800 }
5801 }
5802
5803 switch (fcode)
5804 {
5805 CASE_FLT_FN (BUILT_IN_FABS):
5806 case BUILT_IN_FABSD32:
5807 case BUILT_IN_FABSD64:
5808 case BUILT_IN_FABSD128:
5809 target = expand_builtin_fabs (exp, target, subtarget);
5810 if (target)
5811 return target;
5812 break;
5813
5814 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5815 target = expand_builtin_copysign (exp, target, subtarget);
5816 if (target)
5817 return target;
5818 break;
5819
5820 /* Just do a normal library call if we were unable to fold
5821 the values. */
5822 CASE_FLT_FN (BUILT_IN_CABS):
5823 break;
5824
5825 CASE_FLT_FN (BUILT_IN_EXP):
5826 CASE_FLT_FN (BUILT_IN_EXP10):
5827 CASE_FLT_FN (BUILT_IN_POW10):
5828 CASE_FLT_FN (BUILT_IN_EXP2):
5829 CASE_FLT_FN (BUILT_IN_EXPM1):
5830 CASE_FLT_FN (BUILT_IN_LOGB):
5831 CASE_FLT_FN (BUILT_IN_LOG):
5832 CASE_FLT_FN (BUILT_IN_LOG10):
5833 CASE_FLT_FN (BUILT_IN_LOG2):
5834 CASE_FLT_FN (BUILT_IN_LOG1P):
5835 CASE_FLT_FN (BUILT_IN_TAN):
5836 CASE_FLT_FN (BUILT_IN_ASIN):
5837 CASE_FLT_FN (BUILT_IN_ACOS):
5838 CASE_FLT_FN (BUILT_IN_ATAN):
5839 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5840 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5841 because of possible accuracy problems. */
5842 if (! flag_unsafe_math_optimizations)
5843 break;
5844 CASE_FLT_FN (BUILT_IN_SQRT):
5845 CASE_FLT_FN (BUILT_IN_FLOOR):
5846 CASE_FLT_FN (BUILT_IN_CEIL):
5847 CASE_FLT_FN (BUILT_IN_TRUNC):
5848 CASE_FLT_FN (BUILT_IN_ROUND):
5849 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5850 CASE_FLT_FN (BUILT_IN_RINT):
5851 target = expand_builtin_mathfn (exp, target, subtarget);
5852 if (target)
5853 return target;
5854 break;
5855
5856 CASE_FLT_FN (BUILT_IN_FMA):
5857 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5858 if (target)
5859 return target;
5860 break;
5861
5862 CASE_FLT_FN (BUILT_IN_ILOGB):
5863 if (! flag_unsafe_math_optimizations)
5864 break;
5865 CASE_FLT_FN (BUILT_IN_ISINF):
5866 CASE_FLT_FN (BUILT_IN_FINITE):
5867 case BUILT_IN_ISFINITE:
5868 case BUILT_IN_ISNORMAL:
5869 target = expand_builtin_interclass_mathfn (exp, target);
5870 if (target)
5871 return target;
5872 break;
5873
5874 CASE_FLT_FN (BUILT_IN_ICEIL):
5875 CASE_FLT_FN (BUILT_IN_LCEIL):
5876 CASE_FLT_FN (BUILT_IN_LLCEIL):
5877 CASE_FLT_FN (BUILT_IN_LFLOOR):
5878 CASE_FLT_FN (BUILT_IN_IFLOOR):
5879 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5880 target = expand_builtin_int_roundingfn (exp, target);
5881 if (target)
5882 return target;
5883 break;
5884
5885 CASE_FLT_FN (BUILT_IN_IRINT):
5886 CASE_FLT_FN (BUILT_IN_LRINT):
5887 CASE_FLT_FN (BUILT_IN_LLRINT):
5888 CASE_FLT_FN (BUILT_IN_IROUND):
5889 CASE_FLT_FN (BUILT_IN_LROUND):
5890 CASE_FLT_FN (BUILT_IN_LLROUND):
5891 target = expand_builtin_int_roundingfn_2 (exp, target);
5892 if (target)
5893 return target;
5894 break;
5895
5896 CASE_FLT_FN (BUILT_IN_POWI):
5897 target = expand_builtin_powi (exp, target);
5898 if (target)
5899 return target;
5900 break;
5901
5902 CASE_FLT_FN (BUILT_IN_ATAN2):
5903 CASE_FLT_FN (BUILT_IN_LDEXP):
5904 CASE_FLT_FN (BUILT_IN_SCALB):
5905 CASE_FLT_FN (BUILT_IN_SCALBN):
5906 CASE_FLT_FN (BUILT_IN_SCALBLN):
5907 if (! flag_unsafe_math_optimizations)
5908 break;
5909
5910 CASE_FLT_FN (BUILT_IN_FMOD):
5911 CASE_FLT_FN (BUILT_IN_REMAINDER):
5912 CASE_FLT_FN (BUILT_IN_DREM):
5913 CASE_FLT_FN (BUILT_IN_POW):
5914 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5915 if (target)
5916 return target;
5917 break;
5918
5919 CASE_FLT_FN (BUILT_IN_CEXPI):
5920 target = expand_builtin_cexpi (exp, target);
5921 gcc_assert (target);
5922 return target;
5923
5924 CASE_FLT_FN (BUILT_IN_SIN):
5925 CASE_FLT_FN (BUILT_IN_COS):
5926 if (! flag_unsafe_math_optimizations)
5927 break;
5928 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5929 if (target)
5930 return target;
5931 break;
5932
5933 CASE_FLT_FN (BUILT_IN_SINCOS):
5934 if (! flag_unsafe_math_optimizations)
5935 break;
5936 target = expand_builtin_sincos (exp);
5937 if (target)
5938 return target;
5939 break;
5940
5941 case BUILT_IN_APPLY_ARGS:
5942 return expand_builtin_apply_args ();
5943
5944 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5945 FUNCTION with a copy of the parameters described by
5946 ARGUMENTS, and ARGSIZE. It returns a block of memory
5947 allocated on the stack into which is stored all the registers
5948 that might possibly be used for returning the result of a
5949 function. ARGUMENTS is the value returned by
5950 __builtin_apply_args. ARGSIZE is the number of bytes of
5951 arguments that must be copied. ??? How should this value be
5952 computed? We'll also need a safe worst case value for varargs
5953 functions. */
5954 case BUILT_IN_APPLY:
5955 if (!validate_arglist (exp, POINTER_TYPE,
5956 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5957 && !validate_arglist (exp, REFERENCE_TYPE,
5958 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5959 return const0_rtx;
5960 else
5961 {
5962 rtx ops[3];
5963
5964 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5965 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5966 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5967
5968 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5969 }
5970
5971 /* __builtin_return (RESULT) causes the function to return the
5972 value described by RESULT. RESULT is address of the block of
5973 memory returned by __builtin_apply. */
5974 case BUILT_IN_RETURN:
5975 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5976 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5977 return const0_rtx;
5978
5979 case BUILT_IN_SAVEREGS:
5980 return expand_builtin_saveregs ();
5981
5982 case BUILT_IN_VA_ARG_PACK:
5983 /* All valid uses of __builtin_va_arg_pack () are removed during
5984 inlining. */
5985 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5986 return const0_rtx;
5987
5988 case BUILT_IN_VA_ARG_PACK_LEN:
5989 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5990 inlining. */
5991 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5992 return const0_rtx;
5993
5994 /* Return the address of the first anonymous stack arg. */
5995 case BUILT_IN_NEXT_ARG:
5996 if (fold_builtin_next_arg (exp, false))
5997 return const0_rtx;
5998 return expand_builtin_next_arg ();
5999
6000 case BUILT_IN_CLEAR_CACHE:
6001 target = expand_builtin___clear_cache (exp);
6002 if (target)
6003 return target;
6004 break;
6005
6006 case BUILT_IN_CLASSIFY_TYPE:
6007 return expand_builtin_classify_type (exp);
6008
6009 case BUILT_IN_CONSTANT_P:
6010 return const0_rtx;
6011
6012 case BUILT_IN_FRAME_ADDRESS:
6013 case BUILT_IN_RETURN_ADDRESS:
6014 return expand_builtin_frame_address (fndecl, exp);
6015
6016 /* Returns the address of the area where the structure is returned.
6017 0 otherwise. */
6018 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6019 if (call_expr_nargs (exp) != 0
6020 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6021 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6022 return const0_rtx;
6023 else
6024 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6025
6026 case BUILT_IN_ALLOCA:
6027 case BUILT_IN_ALLOCA_WITH_ALIGN:
6028 /* If the allocation stems from the declaration of a variable-sized
6029 object, it cannot accumulate. */
6030 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6031 if (target)
6032 return target;
6033 break;
6034
6035 case BUILT_IN_STACK_SAVE:
6036 return expand_stack_save ();
6037
6038 case BUILT_IN_STACK_RESTORE:
6039 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6040 return const0_rtx;
6041
6042 case BUILT_IN_BSWAP16:
6043 case BUILT_IN_BSWAP32:
6044 case BUILT_IN_BSWAP64:
6045 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6046 if (target)
6047 return target;
6048 break;
6049
6050 CASE_INT_FN (BUILT_IN_FFS):
6051 target = expand_builtin_unop (target_mode, exp, target,
6052 subtarget, ffs_optab);
6053 if (target)
6054 return target;
6055 break;
6056
6057 CASE_INT_FN (BUILT_IN_CLZ):
6058 target = expand_builtin_unop (target_mode, exp, target,
6059 subtarget, clz_optab);
6060 if (target)
6061 return target;
6062 break;
6063
6064 CASE_INT_FN (BUILT_IN_CTZ):
6065 target = expand_builtin_unop (target_mode, exp, target,
6066 subtarget, ctz_optab);
6067 if (target)
6068 return target;
6069 break;
6070
6071 CASE_INT_FN (BUILT_IN_CLRSB):
6072 target = expand_builtin_unop (target_mode, exp, target,
6073 subtarget, clrsb_optab);
6074 if (target)
6075 return target;
6076 break;
6077
6078 CASE_INT_FN (BUILT_IN_POPCOUNT):
6079 target = expand_builtin_unop (target_mode, exp, target,
6080 subtarget, popcount_optab);
6081 if (target)
6082 return target;
6083 break;
6084
6085 CASE_INT_FN (BUILT_IN_PARITY):
6086 target = expand_builtin_unop (target_mode, exp, target,
6087 subtarget, parity_optab);
6088 if (target)
6089 return target;
6090 break;
6091
6092 case BUILT_IN_STRLEN:
6093 target = expand_builtin_strlen (exp, target, target_mode);
6094 if (target)
6095 return target;
6096 break;
6097
6098 case BUILT_IN_STRCPY:
6099 target = expand_builtin_strcpy (exp, target);
6100 if (target)
6101 return target;
6102 break;
6103
6104 case BUILT_IN_STRNCPY:
6105 target = expand_builtin_strncpy (exp, target);
6106 if (target)
6107 return target;
6108 break;
6109
6110 case BUILT_IN_STPCPY:
6111 target = expand_builtin_stpcpy (exp, target, mode);
6112 if (target)
6113 return target;
6114 break;
6115
6116 case BUILT_IN_MEMCPY:
6117 target = expand_builtin_memcpy (exp, target);
6118 if (target)
6119 return target;
6120 break;
6121
6122 case BUILT_IN_MEMPCPY:
6123 target = expand_builtin_mempcpy (exp, target, mode);
6124 if (target)
6125 return target;
6126 break;
6127
6128 case BUILT_IN_MEMSET:
6129 target = expand_builtin_memset (exp, target, mode);
6130 if (target)
6131 return target;
6132 break;
6133
6134 case BUILT_IN_BZERO:
6135 target = expand_builtin_bzero (exp);
6136 if (target)
6137 return target;
6138 break;
6139
6140 case BUILT_IN_STRCMP:
6141 target = expand_builtin_strcmp (exp, target);
6142 if (target)
6143 return target;
6144 break;
6145
6146 case BUILT_IN_STRNCMP:
6147 target = expand_builtin_strncmp (exp, target, mode);
6148 if (target)
6149 return target;
6150 break;
6151
6152 case BUILT_IN_BCMP:
6153 case BUILT_IN_MEMCMP:
6154 target = expand_builtin_memcmp (exp, target, mode);
6155 if (target)
6156 return target;
6157 break;
6158
6159 case BUILT_IN_SETJMP:
6160 /* This should have been lowered to the builtins below. */
6161 gcc_unreachable ();
6162
6163 case BUILT_IN_SETJMP_SETUP:
6164 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6165 and the receiver label. */
6166 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6167 {
6168 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6169 VOIDmode, EXPAND_NORMAL);
6170 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6171 rtx label_r = label_rtx (label);
6172
6173 /* This is copied from the handling of non-local gotos. */
6174 expand_builtin_setjmp_setup (buf_addr, label_r);
6175 nonlocal_goto_handler_labels
6176 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6177 nonlocal_goto_handler_labels);
6178 /* ??? Do not let expand_label treat us as such since we would
6179 not want to be both on the list of non-local labels and on
6180 the list of forced labels. */
6181 FORCED_LABEL (label) = 0;
6182 return const0_rtx;
6183 }
6184 break;
6185
6186 case BUILT_IN_SETJMP_RECEIVER:
6187 /* __builtin_setjmp_receiver is passed the receiver label. */
6188 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6189 {
6190 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6191 rtx label_r = label_rtx (label);
6192
6193 expand_builtin_setjmp_receiver (label_r);
6194 return const0_rtx;
6195 }
6196 break;
6197
6198 /* __builtin_longjmp is passed a pointer to an array of five words.
6199 It's similar to the C library longjmp function but works with
6200 __builtin_setjmp above. */
6201 case BUILT_IN_LONGJMP:
6202 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6203 {
6204 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6205 VOIDmode, EXPAND_NORMAL);
6206 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6207
6208 if (value != const1_rtx)
6209 {
6210 error ("%<__builtin_longjmp%> second argument must be 1");
6211 return const0_rtx;
6212 }
6213
6214 expand_builtin_longjmp (buf_addr, value);
6215 return const0_rtx;
6216 }
6217 break;
6218
6219 case BUILT_IN_NONLOCAL_GOTO:
6220 target = expand_builtin_nonlocal_goto (exp);
6221 if (target)
6222 return target;
6223 break;
6224
6225 /* This updates the setjmp buffer that is its argument with the value
6226 of the current stack pointer. */
6227 case BUILT_IN_UPDATE_SETJMP_BUF:
6228 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6229 {
6230 rtx buf_addr
6231 = expand_normal (CALL_EXPR_ARG (exp, 0));
6232
6233 expand_builtin_update_setjmp_buf (buf_addr);
6234 return const0_rtx;
6235 }
6236 break;
6237
6238 case BUILT_IN_TRAP:
6239 expand_builtin_trap ();
6240 return const0_rtx;
6241
6242 case BUILT_IN_UNREACHABLE:
6243 expand_builtin_unreachable ();
6244 return const0_rtx;
6245
6246 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6247 case BUILT_IN_SIGNBITD32:
6248 case BUILT_IN_SIGNBITD64:
6249 case BUILT_IN_SIGNBITD128:
6250 target = expand_builtin_signbit (exp, target);
6251 if (target)
6252 return target;
6253 break;
6254
6255 /* Various hooks for the DWARF 2 __throw routine. */
6256 case BUILT_IN_UNWIND_INIT:
6257 expand_builtin_unwind_init ();
6258 return const0_rtx;
6259 case BUILT_IN_DWARF_CFA:
6260 return virtual_cfa_rtx;
6261 #ifdef DWARF2_UNWIND_INFO
6262 case BUILT_IN_DWARF_SP_COLUMN:
6263 return expand_builtin_dwarf_sp_column ();
6264 case BUILT_IN_INIT_DWARF_REG_SIZES:
6265 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6266 return const0_rtx;
6267 #endif
6268 case BUILT_IN_FROB_RETURN_ADDR:
6269 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6270 case BUILT_IN_EXTRACT_RETURN_ADDR:
6271 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6272 case BUILT_IN_EH_RETURN:
6273 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6274 CALL_EXPR_ARG (exp, 1));
6275 return const0_rtx;
6276 #ifdef EH_RETURN_DATA_REGNO
6277 case BUILT_IN_EH_RETURN_DATA_REGNO:
6278 return expand_builtin_eh_return_data_regno (exp);
6279 #endif
6280 case BUILT_IN_EXTEND_POINTER:
6281 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6282 case BUILT_IN_EH_POINTER:
6283 return expand_builtin_eh_pointer (exp);
6284 case BUILT_IN_EH_FILTER:
6285 return expand_builtin_eh_filter (exp);
6286 case BUILT_IN_EH_COPY_VALUES:
6287 return expand_builtin_eh_copy_values (exp);
6288
6289 case BUILT_IN_VA_START:
6290 return expand_builtin_va_start (exp);
6291 case BUILT_IN_VA_END:
6292 return expand_builtin_va_end (exp);
6293 case BUILT_IN_VA_COPY:
6294 return expand_builtin_va_copy (exp);
6295 case BUILT_IN_EXPECT:
6296 return expand_builtin_expect (exp, target);
6297 case BUILT_IN_ASSUME_ALIGNED:
6298 return expand_builtin_assume_aligned (exp, target);
6299 case BUILT_IN_PREFETCH:
6300 expand_builtin_prefetch (exp);
6301 return const0_rtx;
6302
6303 case BUILT_IN_INIT_TRAMPOLINE:
6304 return expand_builtin_init_trampoline (exp, true);
6305 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6306 return expand_builtin_init_trampoline (exp, false);
6307 case BUILT_IN_ADJUST_TRAMPOLINE:
6308 return expand_builtin_adjust_trampoline (exp);
6309
6310 case BUILT_IN_FORK:
6311 case BUILT_IN_EXECL:
6312 case BUILT_IN_EXECV:
6313 case BUILT_IN_EXECLP:
6314 case BUILT_IN_EXECLE:
6315 case BUILT_IN_EXECVP:
6316 case BUILT_IN_EXECVE:
6317 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6318 if (target)
6319 return target;
6320 break;
6321
6322 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6323 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6324 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6325 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6326 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6327 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6328 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6329 if (target)
6330 return target;
6331 break;
6332
6333 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6334 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6335 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6336 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6337 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6338 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6339 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6340 if (target)
6341 return target;
6342 break;
6343
6344 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6345 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6346 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6347 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6348 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6349 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6350 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6351 if (target)
6352 return target;
6353 break;
6354
6355 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6356 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6357 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6358 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6359 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6360 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6361 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6362 if (target)
6363 return target;
6364 break;
6365
6366 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6367 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6368 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6369 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6370 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6371 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6372 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6373 if (target)
6374 return target;
6375 break;
6376
6377 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6378 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6379 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6380 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6381 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6382 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6383 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6384 if (target)
6385 return target;
6386 break;
6387
6388 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6389 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6390 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6391 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6392 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6393 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6394 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6395 if (target)
6396 return target;
6397 break;
6398
6399 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6400 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6401 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6402 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6403 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6404 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6405 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6406 if (target)
6407 return target;
6408 break;
6409
6410 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6411 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6412 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6413 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6414 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6415 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6416 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6417 if (target)
6418 return target;
6419 break;
6420
6421 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6422 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6423 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6424 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6425 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6426 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6427 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6428 if (target)
6429 return target;
6430 break;
6431
6432 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6433 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6434 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6435 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6436 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6437 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6438 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6439 if (target)
6440 return target;
6441 break;
6442
6443 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6444 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6445 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6446 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6447 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6448 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6449 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6450 if (target)
6451 return target;
6452 break;
6453
6454 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6455 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6456 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6457 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6458 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6459 if (mode == VOIDmode)
6460 mode = TYPE_MODE (boolean_type_node);
6461 if (!target || !register_operand (target, mode))
6462 target = gen_reg_rtx (mode);
6463
6464 mode = get_builtin_sync_mode
6465 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6466 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6467 if (target)
6468 return target;
6469 break;
6470
6471 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6472 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6473 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6474 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6475 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6476 mode = get_builtin_sync_mode
6477 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6478 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6479 if (target)
6480 return target;
6481 break;
6482
6483 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6484 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6485 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6486 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6487 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6488 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6489 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6490 if (target)
6491 return target;
6492 break;
6493
6494 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6495 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6496 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6497 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6498 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6499 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6500 expand_builtin_sync_lock_release (mode, exp);
6501 return const0_rtx;
6502
6503 case BUILT_IN_SYNC_SYNCHRONIZE:
6504 expand_builtin_sync_synchronize ();
6505 return const0_rtx;
6506
6507 case BUILT_IN_ATOMIC_EXCHANGE_1:
6508 case BUILT_IN_ATOMIC_EXCHANGE_2:
6509 case BUILT_IN_ATOMIC_EXCHANGE_4:
6510 case BUILT_IN_ATOMIC_EXCHANGE_8:
6511 case BUILT_IN_ATOMIC_EXCHANGE_16:
6512 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6513 target = expand_builtin_atomic_exchange (mode, exp, target);
6514 if (target)
6515 return target;
6516 break;
6517
6518 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6519 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6520 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6521 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6522 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6523 {
6524 unsigned int nargs, z;
6525 vec<tree, va_gc> *vec;
6526
6527 mode =
6528 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6529 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6530 if (target)
6531 return target;
6532
6533 /* If this is turned into an external library call, the weak parameter
6534 must be dropped to match the expected parameter list. */
6535 nargs = call_expr_nargs (exp);
6536 vec_alloc (vec, nargs - 1);
6537 for (z = 0; z < 3; z++)
6538 vec->quick_push (CALL_EXPR_ARG (exp, z));
6539 /* Skip the boolean weak parameter. */
6540 for (z = 4; z < 6; z++)
6541 vec->quick_push (CALL_EXPR_ARG (exp, z));
6542 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6543 break;
6544 }
6545
6546 case BUILT_IN_ATOMIC_LOAD_1:
6547 case BUILT_IN_ATOMIC_LOAD_2:
6548 case BUILT_IN_ATOMIC_LOAD_4:
6549 case BUILT_IN_ATOMIC_LOAD_8:
6550 case BUILT_IN_ATOMIC_LOAD_16:
6551 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6552 target = expand_builtin_atomic_load (mode, exp, target);
6553 if (target)
6554 return target;
6555 break;
6556
6557 case BUILT_IN_ATOMIC_STORE_1:
6558 case BUILT_IN_ATOMIC_STORE_2:
6559 case BUILT_IN_ATOMIC_STORE_4:
6560 case BUILT_IN_ATOMIC_STORE_8:
6561 case BUILT_IN_ATOMIC_STORE_16:
6562 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6563 target = expand_builtin_atomic_store (mode, exp);
6564 if (target)
6565 return const0_rtx;
6566 break;
6567
6568 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6569 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6570 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6571 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6572 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6573 {
6574 enum built_in_function lib;
6575 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6576 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6577 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6578 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6579 ignore, lib);
6580 if (target)
6581 return target;
6582 break;
6583 }
6584 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6585 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6586 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6587 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6588 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6589 {
6590 enum built_in_function lib;
6591 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6592 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6593 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6594 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6595 ignore, lib);
6596 if (target)
6597 return target;
6598 break;
6599 }
6600 case BUILT_IN_ATOMIC_AND_FETCH_1:
6601 case BUILT_IN_ATOMIC_AND_FETCH_2:
6602 case BUILT_IN_ATOMIC_AND_FETCH_4:
6603 case BUILT_IN_ATOMIC_AND_FETCH_8:
6604 case BUILT_IN_ATOMIC_AND_FETCH_16:
6605 {
6606 enum built_in_function lib;
6607 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6608 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6609 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6610 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6611 ignore, lib);
6612 if (target)
6613 return target;
6614 break;
6615 }
6616 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6617 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6618 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6619 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6620 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6621 {
6622 enum built_in_function lib;
6623 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6624 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6625 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6626 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6627 ignore, lib);
6628 if (target)
6629 return target;
6630 break;
6631 }
6632 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6633 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6634 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6635 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6636 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6637 {
6638 enum built_in_function lib;
6639 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6640 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6641 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6642 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6643 ignore, lib);
6644 if (target)
6645 return target;
6646 break;
6647 }
6648 case BUILT_IN_ATOMIC_OR_FETCH_1:
6649 case BUILT_IN_ATOMIC_OR_FETCH_2:
6650 case BUILT_IN_ATOMIC_OR_FETCH_4:
6651 case BUILT_IN_ATOMIC_OR_FETCH_8:
6652 case BUILT_IN_ATOMIC_OR_FETCH_16:
6653 {
6654 enum built_in_function lib;
6655 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6656 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6657 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6658 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6659 ignore, lib);
6660 if (target)
6661 return target;
6662 break;
6663 }
6664 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6665 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6666 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6667 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6668 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6669 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6670 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6671 ignore, BUILT_IN_NONE);
6672 if (target)
6673 return target;
6674 break;
6675
6676 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6677 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6678 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6679 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6680 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6681 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6682 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6683 ignore, BUILT_IN_NONE);
6684 if (target)
6685 return target;
6686 break;
6687
6688 case BUILT_IN_ATOMIC_FETCH_AND_1:
6689 case BUILT_IN_ATOMIC_FETCH_AND_2:
6690 case BUILT_IN_ATOMIC_FETCH_AND_4:
6691 case BUILT_IN_ATOMIC_FETCH_AND_8:
6692 case BUILT_IN_ATOMIC_FETCH_AND_16:
6693 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6694 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6695 ignore, BUILT_IN_NONE);
6696 if (target)
6697 return target;
6698 break;
6699
6700 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6701 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6702 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6703 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6704 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6705 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6706 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6707 ignore, BUILT_IN_NONE);
6708 if (target)
6709 return target;
6710 break;
6711
6712 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6713 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6714 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6715 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6716 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6717 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6718 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6719 ignore, BUILT_IN_NONE);
6720 if (target)
6721 return target;
6722 break;
6723
6724 case BUILT_IN_ATOMIC_FETCH_OR_1:
6725 case BUILT_IN_ATOMIC_FETCH_OR_2:
6726 case BUILT_IN_ATOMIC_FETCH_OR_4:
6727 case BUILT_IN_ATOMIC_FETCH_OR_8:
6728 case BUILT_IN_ATOMIC_FETCH_OR_16:
6729 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6730 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6731 ignore, BUILT_IN_NONE);
6732 if (target)
6733 return target;
6734 break;
6735
6736 case BUILT_IN_ATOMIC_TEST_AND_SET:
6737 return expand_builtin_atomic_test_and_set (exp, target);
6738
6739 case BUILT_IN_ATOMIC_CLEAR:
6740 return expand_builtin_atomic_clear (exp);
6741
6742 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6743 return expand_builtin_atomic_always_lock_free (exp);
6744
6745 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6746 target = expand_builtin_atomic_is_lock_free (exp);
6747 if (target)
6748 return target;
6749 break;
6750
6751 case BUILT_IN_ATOMIC_THREAD_FENCE:
6752 expand_builtin_atomic_thread_fence (exp);
6753 return const0_rtx;
6754
6755 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6756 expand_builtin_atomic_signal_fence (exp);
6757 return const0_rtx;
6758
6759 case BUILT_IN_OBJECT_SIZE:
6760 return expand_builtin_object_size (exp);
6761
6762 case BUILT_IN_MEMCPY_CHK:
6763 case BUILT_IN_MEMPCPY_CHK:
6764 case BUILT_IN_MEMMOVE_CHK:
6765 case BUILT_IN_MEMSET_CHK:
6766 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6767 if (target)
6768 return target;
6769 break;
6770
6771 case BUILT_IN_STRCPY_CHK:
6772 case BUILT_IN_STPCPY_CHK:
6773 case BUILT_IN_STRNCPY_CHK:
6774 case BUILT_IN_STPNCPY_CHK:
6775 case BUILT_IN_STRCAT_CHK:
6776 case BUILT_IN_STRNCAT_CHK:
6777 case BUILT_IN_SNPRINTF_CHK:
6778 case BUILT_IN_VSNPRINTF_CHK:
6779 maybe_emit_chk_warning (exp, fcode);
6780 break;
6781
6782 case BUILT_IN_SPRINTF_CHK:
6783 case BUILT_IN_VSPRINTF_CHK:
6784 maybe_emit_sprintf_chk_warning (exp, fcode);
6785 break;
6786
6787 case BUILT_IN_FREE:
6788 if (warn_free_nonheap_object)
6789 maybe_emit_free_warning (exp);
6790 break;
6791
6792 case BUILT_IN_THREAD_POINTER:
6793 return expand_builtin_thread_pointer (exp, target);
6794
6795 case BUILT_IN_SET_THREAD_POINTER:
6796 expand_builtin_set_thread_pointer (exp);
6797 return const0_rtx;
6798
6799 case BUILT_IN_CILK_DETACH:
6800 expand_builtin_cilk_detach (exp);
6801 return const0_rtx;
6802
6803 case BUILT_IN_CILK_POP_FRAME:
6804 expand_builtin_cilk_pop_frame (exp);
6805 return const0_rtx;
6806
6807 default: /* just do library call, if unknown builtin */
6808 break;
6809 }
6810
6811 /* The switch statement above can drop through to cause the function
6812 to be called normally. */
6813 return expand_call (exp, target, ignore);
6814 }
6815
6816 /* Determine whether a tree node represents a call to a built-in
6817 function. If the tree T is a call to a built-in function with
6818 the right number of arguments of the appropriate types, return
6819 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6820 Otherwise the return value is END_BUILTINS. */
6821
6822 enum built_in_function
6823 builtin_mathfn_code (const_tree t)
6824 {
6825 const_tree fndecl, arg, parmlist;
6826 const_tree argtype, parmtype;
6827 const_call_expr_arg_iterator iter;
6828
6829 if (TREE_CODE (t) != CALL_EXPR
6830 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6831 return END_BUILTINS;
6832
6833 fndecl = get_callee_fndecl (t);
6834 if (fndecl == NULL_TREE
6835 || TREE_CODE (fndecl) != FUNCTION_DECL
6836 || ! DECL_BUILT_IN (fndecl)
6837 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6838 return END_BUILTINS;
6839
6840 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6841 init_const_call_expr_arg_iterator (t, &iter);
6842 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6843 {
6844 /* If a function doesn't take a variable number of arguments,
6845 the last element in the list will have type `void'. */
6846 parmtype = TREE_VALUE (parmlist);
6847 if (VOID_TYPE_P (parmtype))
6848 {
6849 if (more_const_call_expr_args_p (&iter))
6850 return END_BUILTINS;
6851 return DECL_FUNCTION_CODE (fndecl);
6852 }
6853
6854 if (! more_const_call_expr_args_p (&iter))
6855 return END_BUILTINS;
6856
6857 arg = next_const_call_expr_arg (&iter);
6858 argtype = TREE_TYPE (arg);
6859
6860 if (SCALAR_FLOAT_TYPE_P (parmtype))
6861 {
6862 if (! SCALAR_FLOAT_TYPE_P (argtype))
6863 return END_BUILTINS;
6864 }
6865 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6866 {
6867 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6868 return END_BUILTINS;
6869 }
6870 else if (POINTER_TYPE_P (parmtype))
6871 {
6872 if (! POINTER_TYPE_P (argtype))
6873 return END_BUILTINS;
6874 }
6875 else if (INTEGRAL_TYPE_P (parmtype))
6876 {
6877 if (! INTEGRAL_TYPE_P (argtype))
6878 return END_BUILTINS;
6879 }
6880 else
6881 return END_BUILTINS;
6882 }
6883
6884 /* Variable-length argument list. */
6885 return DECL_FUNCTION_CODE (fndecl);
6886 }
6887
6888 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6889 evaluate to a constant. */
6890
6891 static tree
6892 fold_builtin_constant_p (tree arg)
6893 {
6894 /* We return 1 for a numeric type that's known to be a constant
6895 value at compile-time or for an aggregate type that's a
6896 literal constant. */
6897 STRIP_NOPS (arg);
6898
6899 /* If we know this is a constant, emit the constant of one. */
6900 if (CONSTANT_CLASS_P (arg)
6901 || (TREE_CODE (arg) == CONSTRUCTOR
6902 && TREE_CONSTANT (arg)))
6903 return integer_one_node;
6904 if (TREE_CODE (arg) == ADDR_EXPR)
6905 {
6906 tree op = TREE_OPERAND (arg, 0);
6907 if (TREE_CODE (op) == STRING_CST
6908 || (TREE_CODE (op) == ARRAY_REF
6909 && integer_zerop (TREE_OPERAND (op, 1))
6910 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6911 return integer_one_node;
6912 }
6913
6914 /* If this expression has side effects, show we don't know it to be a
6915 constant. Likewise if it's a pointer or aggregate type since in
6916 those case we only want literals, since those are only optimized
6917 when generating RTL, not later.
6918 And finally, if we are compiling an initializer, not code, we
6919 need to return a definite result now; there's not going to be any
6920 more optimization done. */
6921 if (TREE_SIDE_EFFECTS (arg)
6922 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6923 || POINTER_TYPE_P (TREE_TYPE (arg))
6924 || cfun == 0
6925 || folding_initializer
6926 || force_folding_builtin_constant_p)
6927 return integer_zero_node;
6928
6929 return NULL_TREE;
6930 }
6931
6932 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6933 return it as a truthvalue. */
6934
6935 static tree
6936 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
6937 tree predictor)
6938 {
6939 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6940
6941 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6942 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6943 ret_type = TREE_TYPE (TREE_TYPE (fn));
6944 pred_type = TREE_VALUE (arg_types);
6945 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6946
6947 pred = fold_convert_loc (loc, pred_type, pred);
6948 expected = fold_convert_loc (loc, expected_type, expected);
6949 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
6950 predictor);
6951
6952 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6953 build_int_cst (ret_type, 0));
6954 }
6955
6956 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6957 NULL_TREE if no simplification is possible. */
6958
6959 tree
6960 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
6961 {
6962 tree inner, fndecl, inner_arg0;
6963 enum tree_code code;
6964
6965 /* Distribute the expected value over short-circuiting operators.
6966 See through the cast from truthvalue_type_node to long. */
6967 inner_arg0 = arg0;
6968 while (TREE_CODE (inner_arg0) == NOP_EXPR
6969 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6970 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6971 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6972
6973 /* If this is a builtin_expect within a builtin_expect keep the
6974 inner one. See through a comparison against a constant. It
6975 might have been added to create a thruthvalue. */
6976 inner = inner_arg0;
6977
6978 if (COMPARISON_CLASS_P (inner)
6979 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6980 inner = TREE_OPERAND (inner, 0);
6981
6982 if (TREE_CODE (inner) == CALL_EXPR
6983 && (fndecl = get_callee_fndecl (inner))
6984 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6985 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6986 return arg0;
6987
6988 inner = inner_arg0;
6989 code = TREE_CODE (inner);
6990 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6991 {
6992 tree op0 = TREE_OPERAND (inner, 0);
6993 tree op1 = TREE_OPERAND (inner, 1);
6994
6995 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
6996 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
6997 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6998
6999 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7000 }
7001
7002 /* If the argument isn't invariant then there's nothing else we can do. */
7003 if (!TREE_CONSTANT (inner_arg0))
7004 return NULL_TREE;
7005
7006 /* If we expect that a comparison against the argument will fold to
7007 a constant return the constant. In practice, this means a true
7008 constant or the address of a non-weak symbol. */
7009 inner = inner_arg0;
7010 STRIP_NOPS (inner);
7011 if (TREE_CODE (inner) == ADDR_EXPR)
7012 {
7013 do
7014 {
7015 inner = TREE_OPERAND (inner, 0);
7016 }
7017 while (TREE_CODE (inner) == COMPONENT_REF
7018 || TREE_CODE (inner) == ARRAY_REF);
7019 if ((TREE_CODE (inner) == VAR_DECL
7020 || TREE_CODE (inner) == FUNCTION_DECL)
7021 && DECL_WEAK (inner))
7022 return NULL_TREE;
7023 }
7024
7025 /* Otherwise, ARG0 already has the proper type for the return value. */
7026 return arg0;
7027 }
7028
7029 /* Fold a call to __builtin_classify_type with argument ARG. */
7030
7031 static tree
7032 fold_builtin_classify_type (tree arg)
7033 {
7034 if (arg == 0)
7035 return build_int_cst (integer_type_node, no_type_class);
7036
7037 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7038 }
7039
7040 /* Fold a call to __builtin_strlen with argument ARG. */
7041
7042 static tree
7043 fold_builtin_strlen (location_t loc, tree type, tree arg)
7044 {
7045 if (!validate_arg (arg, POINTER_TYPE))
7046 return NULL_TREE;
7047 else
7048 {
7049 tree len = c_strlen (arg, 0);
7050
7051 if (len)
7052 return fold_convert_loc (loc, type, len);
7053
7054 return NULL_TREE;
7055 }
7056 }
7057
7058 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7059
7060 static tree
7061 fold_builtin_inf (location_t loc, tree type, int warn)
7062 {
7063 REAL_VALUE_TYPE real;
7064
7065 /* __builtin_inff is intended to be usable to define INFINITY on all
7066 targets. If an infinity is not available, INFINITY expands "to a
7067 positive constant of type float that overflows at translation
7068 time", footnote "In this case, using INFINITY will violate the
7069 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7070 Thus we pedwarn to ensure this constraint violation is
7071 diagnosed. */
7072 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7073 pedwarn (loc, 0, "target format does not support infinity");
7074
7075 real_inf (&real);
7076 return build_real (type, real);
7077 }
7078
7079 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7080
7081 static tree
7082 fold_builtin_nan (tree arg, tree type, int quiet)
7083 {
7084 REAL_VALUE_TYPE real;
7085 const char *str;
7086
7087 if (!validate_arg (arg, POINTER_TYPE))
7088 return NULL_TREE;
7089 str = c_getstr (arg);
7090 if (!str)
7091 return NULL_TREE;
7092
7093 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7094 return NULL_TREE;
7095
7096 return build_real (type, real);
7097 }
7098
7099 /* Return true if the floating point expression T has an integer value.
7100 We also allow +Inf, -Inf and NaN to be considered integer values. */
7101
7102 static bool
7103 integer_valued_real_p (tree t)
7104 {
7105 switch (TREE_CODE (t))
7106 {
7107 case FLOAT_EXPR:
7108 return true;
7109
7110 case ABS_EXPR:
7111 case SAVE_EXPR:
7112 return integer_valued_real_p (TREE_OPERAND (t, 0));
7113
7114 case COMPOUND_EXPR:
7115 case MODIFY_EXPR:
7116 case BIND_EXPR:
7117 return integer_valued_real_p (TREE_OPERAND (t, 1));
7118
7119 case PLUS_EXPR:
7120 case MINUS_EXPR:
7121 case MULT_EXPR:
7122 case MIN_EXPR:
7123 case MAX_EXPR:
7124 return integer_valued_real_p (TREE_OPERAND (t, 0))
7125 && integer_valued_real_p (TREE_OPERAND (t, 1));
7126
7127 case COND_EXPR:
7128 return integer_valued_real_p (TREE_OPERAND (t, 1))
7129 && integer_valued_real_p (TREE_OPERAND (t, 2));
7130
7131 case REAL_CST:
7132 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7133
7134 case NOP_EXPR:
7135 {
7136 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7137 if (TREE_CODE (type) == INTEGER_TYPE)
7138 return true;
7139 if (TREE_CODE (type) == REAL_TYPE)
7140 return integer_valued_real_p (TREE_OPERAND (t, 0));
7141 break;
7142 }
7143
7144 case CALL_EXPR:
7145 switch (builtin_mathfn_code (t))
7146 {
7147 CASE_FLT_FN (BUILT_IN_CEIL):
7148 CASE_FLT_FN (BUILT_IN_FLOOR):
7149 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7150 CASE_FLT_FN (BUILT_IN_RINT):
7151 CASE_FLT_FN (BUILT_IN_ROUND):
7152 CASE_FLT_FN (BUILT_IN_TRUNC):
7153 return true;
7154
7155 CASE_FLT_FN (BUILT_IN_FMIN):
7156 CASE_FLT_FN (BUILT_IN_FMAX):
7157 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7158 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7159
7160 default:
7161 break;
7162 }
7163 break;
7164
7165 default:
7166 break;
7167 }
7168 return false;
7169 }
7170
7171 /* FNDECL is assumed to be a builtin where truncation can be propagated
7172 across (for instance floor((double)f) == (double)floorf (f).
7173 Do the transformation for a call with argument ARG. */
7174
7175 static tree
7176 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7177 {
7178 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7179
7180 if (!validate_arg (arg, REAL_TYPE))
7181 return NULL_TREE;
7182
7183 /* Integer rounding functions are idempotent. */
7184 if (fcode == builtin_mathfn_code (arg))
7185 return arg;
7186
7187 /* If argument is already integer valued, and we don't need to worry
7188 about setting errno, there's no need to perform rounding. */
7189 if (! flag_errno_math && integer_valued_real_p (arg))
7190 return arg;
7191
7192 if (optimize)
7193 {
7194 tree arg0 = strip_float_extensions (arg);
7195 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7196 tree newtype = TREE_TYPE (arg0);
7197 tree decl;
7198
7199 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7200 && (decl = mathfn_built_in (newtype, fcode)))
7201 return fold_convert_loc (loc, ftype,
7202 build_call_expr_loc (loc, decl, 1,
7203 fold_convert_loc (loc,
7204 newtype,
7205 arg0)));
7206 }
7207 return NULL_TREE;
7208 }
7209
7210 /* FNDECL is assumed to be builtin which can narrow the FP type of
7211 the argument, for instance lround((double)f) -> lroundf (f).
7212 Do the transformation for a call with argument ARG. */
7213
7214 static tree
7215 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7216 {
7217 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7218
7219 if (!validate_arg (arg, REAL_TYPE))
7220 return NULL_TREE;
7221
7222 /* If argument is already integer valued, and we don't need to worry
7223 about setting errno, there's no need to perform rounding. */
7224 if (! flag_errno_math && integer_valued_real_p (arg))
7225 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7226 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7227
7228 if (optimize)
7229 {
7230 tree ftype = TREE_TYPE (arg);
7231 tree arg0 = strip_float_extensions (arg);
7232 tree newtype = TREE_TYPE (arg0);
7233 tree decl;
7234
7235 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7236 && (decl = mathfn_built_in (newtype, fcode)))
7237 return build_call_expr_loc (loc, decl, 1,
7238 fold_convert_loc (loc, newtype, arg0));
7239 }
7240
7241 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7242 sizeof (int) == sizeof (long). */
7243 if (TYPE_PRECISION (integer_type_node)
7244 == TYPE_PRECISION (long_integer_type_node))
7245 {
7246 tree newfn = NULL_TREE;
7247 switch (fcode)
7248 {
7249 CASE_FLT_FN (BUILT_IN_ICEIL):
7250 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7251 break;
7252
7253 CASE_FLT_FN (BUILT_IN_IFLOOR):
7254 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7255 break;
7256
7257 CASE_FLT_FN (BUILT_IN_IROUND):
7258 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7259 break;
7260
7261 CASE_FLT_FN (BUILT_IN_IRINT):
7262 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7263 break;
7264
7265 default:
7266 break;
7267 }
7268
7269 if (newfn)
7270 {
7271 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7272 return fold_convert_loc (loc,
7273 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7274 }
7275 }
7276
7277 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7278 sizeof (long long) == sizeof (long). */
7279 if (TYPE_PRECISION (long_long_integer_type_node)
7280 == TYPE_PRECISION (long_integer_type_node))
7281 {
7282 tree newfn = NULL_TREE;
7283 switch (fcode)
7284 {
7285 CASE_FLT_FN (BUILT_IN_LLCEIL):
7286 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7287 break;
7288
7289 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7290 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7291 break;
7292
7293 CASE_FLT_FN (BUILT_IN_LLROUND):
7294 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7295 break;
7296
7297 CASE_FLT_FN (BUILT_IN_LLRINT):
7298 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7299 break;
7300
7301 default:
7302 break;
7303 }
7304
7305 if (newfn)
7306 {
7307 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7308 return fold_convert_loc (loc,
7309 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7310 }
7311 }
7312
7313 return NULL_TREE;
7314 }
7315
7316 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7317 return type. Return NULL_TREE if no simplification can be made. */
7318
7319 static tree
7320 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7321 {
7322 tree res;
7323
7324 if (!validate_arg (arg, COMPLEX_TYPE)
7325 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7326 return NULL_TREE;
7327
7328 /* Calculate the result when the argument is a constant. */
7329 if (TREE_CODE (arg) == COMPLEX_CST
7330 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7331 type, mpfr_hypot)))
7332 return res;
7333
7334 if (TREE_CODE (arg) == COMPLEX_EXPR)
7335 {
7336 tree real = TREE_OPERAND (arg, 0);
7337 tree imag = TREE_OPERAND (arg, 1);
7338
7339 /* If either part is zero, cabs is fabs of the other. */
7340 if (real_zerop (real))
7341 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7342 if (real_zerop (imag))
7343 return fold_build1_loc (loc, ABS_EXPR, type, real);
7344
7345 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7346 if (flag_unsafe_math_optimizations
7347 && operand_equal_p (real, imag, OEP_PURE_SAME))
7348 {
7349 const REAL_VALUE_TYPE sqrt2_trunc
7350 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7351 STRIP_NOPS (real);
7352 return fold_build2_loc (loc, MULT_EXPR, type,
7353 fold_build1_loc (loc, ABS_EXPR, type, real),
7354 build_real (type, sqrt2_trunc));
7355 }
7356 }
7357
7358 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7359 if (TREE_CODE (arg) == NEGATE_EXPR
7360 || TREE_CODE (arg) == CONJ_EXPR)
7361 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7362
7363 /* Don't do this when optimizing for size. */
7364 if (flag_unsafe_math_optimizations
7365 && optimize && optimize_function_for_speed_p (cfun))
7366 {
7367 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7368
7369 if (sqrtfn != NULL_TREE)
7370 {
7371 tree rpart, ipart, result;
7372
7373 arg = builtin_save_expr (arg);
7374
7375 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7376 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7377
7378 rpart = builtin_save_expr (rpart);
7379 ipart = builtin_save_expr (ipart);
7380
7381 result = fold_build2_loc (loc, PLUS_EXPR, type,
7382 fold_build2_loc (loc, MULT_EXPR, type,
7383 rpart, rpart),
7384 fold_build2_loc (loc, MULT_EXPR, type,
7385 ipart, ipart));
7386
7387 return build_call_expr_loc (loc, sqrtfn, 1, result);
7388 }
7389 }
7390
7391 return NULL_TREE;
7392 }
7393
7394 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7395 complex tree type of the result. If NEG is true, the imaginary
7396 zero is negative. */
7397
7398 static tree
7399 build_complex_cproj (tree type, bool neg)
7400 {
7401 REAL_VALUE_TYPE rinf, rzero = dconst0;
7402
7403 real_inf (&rinf);
7404 rzero.sign = neg;
7405 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7406 build_real (TREE_TYPE (type), rzero));
7407 }
7408
7409 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7410 return type. Return NULL_TREE if no simplification can be made. */
7411
7412 static tree
7413 fold_builtin_cproj (location_t loc, tree arg, tree type)
7414 {
7415 if (!validate_arg (arg, COMPLEX_TYPE)
7416 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7417 return NULL_TREE;
7418
7419 /* If there are no infinities, return arg. */
7420 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7421 return non_lvalue_loc (loc, arg);
7422
7423 /* Calculate the result when the argument is a constant. */
7424 if (TREE_CODE (arg) == COMPLEX_CST)
7425 {
7426 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7427 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7428
7429 if (real_isinf (real) || real_isinf (imag))
7430 return build_complex_cproj (type, imag->sign);
7431 else
7432 return arg;
7433 }
7434 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7435 {
7436 tree real = TREE_OPERAND (arg, 0);
7437 tree imag = TREE_OPERAND (arg, 1);
7438
7439 STRIP_NOPS (real);
7440 STRIP_NOPS (imag);
7441
7442 /* If the real part is inf and the imag part is known to be
7443 nonnegative, return (inf + 0i). Remember side-effects are
7444 possible in the imag part. */
7445 if (TREE_CODE (real) == REAL_CST
7446 && real_isinf (TREE_REAL_CST_PTR (real))
7447 && tree_expr_nonnegative_p (imag))
7448 return omit_one_operand_loc (loc, type,
7449 build_complex_cproj (type, false),
7450 arg);
7451
7452 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7453 Remember side-effects are possible in the real part. */
7454 if (TREE_CODE (imag) == REAL_CST
7455 && real_isinf (TREE_REAL_CST_PTR (imag)))
7456 return
7457 omit_one_operand_loc (loc, type,
7458 build_complex_cproj (type, TREE_REAL_CST_PTR
7459 (imag)->sign), arg);
7460 }
7461
7462 return NULL_TREE;
7463 }
7464
7465 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7466 Return NULL_TREE if no simplification can be made. */
7467
7468 static tree
7469 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7470 {
7471
7472 enum built_in_function fcode;
7473 tree res;
7474
7475 if (!validate_arg (arg, REAL_TYPE))
7476 return NULL_TREE;
7477
7478 /* Calculate the result when the argument is a constant. */
7479 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7480 return res;
7481
7482 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7483 fcode = builtin_mathfn_code (arg);
7484 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7485 {
7486 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7487 arg = fold_build2_loc (loc, MULT_EXPR, type,
7488 CALL_EXPR_ARG (arg, 0),
7489 build_real (type, dconsthalf));
7490 return build_call_expr_loc (loc, expfn, 1, arg);
7491 }
7492
7493 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7494 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7495 {
7496 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7497
7498 if (powfn)
7499 {
7500 tree arg0 = CALL_EXPR_ARG (arg, 0);
7501 tree tree_root;
7502 /* The inner root was either sqrt or cbrt. */
7503 /* This was a conditional expression but it triggered a bug
7504 in Sun C 5.5. */
7505 REAL_VALUE_TYPE dconstroot;
7506 if (BUILTIN_SQRT_P (fcode))
7507 dconstroot = dconsthalf;
7508 else
7509 dconstroot = dconst_third ();
7510
7511 /* Adjust for the outer root. */
7512 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7513 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7514 tree_root = build_real (type, dconstroot);
7515 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7516 }
7517 }
7518
7519 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7520 if (flag_unsafe_math_optimizations
7521 && (fcode == BUILT_IN_POW
7522 || fcode == BUILT_IN_POWF
7523 || fcode == BUILT_IN_POWL))
7524 {
7525 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7526 tree arg0 = CALL_EXPR_ARG (arg, 0);
7527 tree arg1 = CALL_EXPR_ARG (arg, 1);
7528 tree narg1;
7529 if (!tree_expr_nonnegative_p (arg0))
7530 arg0 = build1 (ABS_EXPR, type, arg0);
7531 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7532 build_real (type, dconsthalf));
7533 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7534 }
7535
7536 return NULL_TREE;
7537 }
7538
7539 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7540 Return NULL_TREE if no simplification can be made. */
7541
7542 static tree
7543 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7544 {
7545 const enum built_in_function fcode = builtin_mathfn_code (arg);
7546 tree res;
7547
7548 if (!validate_arg (arg, REAL_TYPE))
7549 return NULL_TREE;
7550
7551 /* Calculate the result when the argument is a constant. */
7552 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7553 return res;
7554
7555 if (flag_unsafe_math_optimizations)
7556 {
7557 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7558 if (BUILTIN_EXPONENT_P (fcode))
7559 {
7560 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7561 const REAL_VALUE_TYPE third_trunc =
7562 real_value_truncate (TYPE_MODE (type), dconst_third ());
7563 arg = fold_build2_loc (loc, MULT_EXPR, type,
7564 CALL_EXPR_ARG (arg, 0),
7565 build_real (type, third_trunc));
7566 return build_call_expr_loc (loc, expfn, 1, arg);
7567 }
7568
7569 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7570 if (BUILTIN_SQRT_P (fcode))
7571 {
7572 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7573
7574 if (powfn)
7575 {
7576 tree arg0 = CALL_EXPR_ARG (arg, 0);
7577 tree tree_root;
7578 REAL_VALUE_TYPE dconstroot = dconst_third ();
7579
7580 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7581 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7582 tree_root = build_real (type, dconstroot);
7583 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7584 }
7585 }
7586
7587 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7588 if (BUILTIN_CBRT_P (fcode))
7589 {
7590 tree arg0 = CALL_EXPR_ARG (arg, 0);
7591 if (tree_expr_nonnegative_p (arg0))
7592 {
7593 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7594
7595 if (powfn)
7596 {
7597 tree tree_root;
7598 REAL_VALUE_TYPE dconstroot;
7599
7600 real_arithmetic (&dconstroot, MULT_EXPR,
7601 dconst_third_ptr (), dconst_third_ptr ());
7602 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7603 tree_root = build_real (type, dconstroot);
7604 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7605 }
7606 }
7607 }
7608
7609 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7610 if (fcode == BUILT_IN_POW
7611 || fcode == BUILT_IN_POWF
7612 || fcode == BUILT_IN_POWL)
7613 {
7614 tree arg00 = CALL_EXPR_ARG (arg, 0);
7615 tree arg01 = CALL_EXPR_ARG (arg, 1);
7616 if (tree_expr_nonnegative_p (arg00))
7617 {
7618 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7619 const REAL_VALUE_TYPE dconstroot
7620 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7621 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7622 build_real (type, dconstroot));
7623 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7624 }
7625 }
7626 }
7627 return NULL_TREE;
7628 }
7629
7630 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7631 TYPE is the type of the return value. Return NULL_TREE if no
7632 simplification can be made. */
7633
7634 static tree
7635 fold_builtin_cos (location_t loc,
7636 tree arg, tree type, tree fndecl)
7637 {
7638 tree res, narg;
7639
7640 if (!validate_arg (arg, REAL_TYPE))
7641 return NULL_TREE;
7642
7643 /* Calculate the result when the argument is a constant. */
7644 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7645 return res;
7646
7647 /* Optimize cos(-x) into cos (x). */
7648 if ((narg = fold_strip_sign_ops (arg)))
7649 return build_call_expr_loc (loc, fndecl, 1, narg);
7650
7651 return NULL_TREE;
7652 }
7653
7654 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7655 Return NULL_TREE if no simplification can be made. */
7656
7657 static tree
7658 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7659 {
7660 if (validate_arg (arg, REAL_TYPE))
7661 {
7662 tree res, narg;
7663
7664 /* Calculate the result when the argument is a constant. */
7665 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7666 return res;
7667
7668 /* Optimize cosh(-x) into cosh (x). */
7669 if ((narg = fold_strip_sign_ops (arg)))
7670 return build_call_expr_loc (loc, fndecl, 1, narg);
7671 }
7672
7673 return NULL_TREE;
7674 }
7675
7676 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7677 argument ARG. TYPE is the type of the return value. Return
7678 NULL_TREE if no simplification can be made. */
7679
7680 static tree
7681 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7682 bool hyper)
7683 {
7684 if (validate_arg (arg, COMPLEX_TYPE)
7685 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7686 {
7687 tree tmp;
7688
7689 /* Calculate the result when the argument is a constant. */
7690 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7691 return tmp;
7692
7693 /* Optimize fn(-x) into fn(x). */
7694 if ((tmp = fold_strip_sign_ops (arg)))
7695 return build_call_expr_loc (loc, fndecl, 1, tmp);
7696 }
7697
7698 return NULL_TREE;
7699 }
7700
7701 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7702 Return NULL_TREE if no simplification can be made. */
7703
7704 static tree
7705 fold_builtin_tan (tree arg, tree type)
7706 {
7707 enum built_in_function fcode;
7708 tree res;
7709
7710 if (!validate_arg (arg, REAL_TYPE))
7711 return NULL_TREE;
7712
7713 /* Calculate the result when the argument is a constant. */
7714 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7715 return res;
7716
7717 /* Optimize tan(atan(x)) = x. */
7718 fcode = builtin_mathfn_code (arg);
7719 if (flag_unsafe_math_optimizations
7720 && (fcode == BUILT_IN_ATAN
7721 || fcode == BUILT_IN_ATANF
7722 || fcode == BUILT_IN_ATANL))
7723 return CALL_EXPR_ARG (arg, 0);
7724
7725 return NULL_TREE;
7726 }
7727
7728 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7729 NULL_TREE if no simplification can be made. */
7730
7731 static tree
7732 fold_builtin_sincos (location_t loc,
7733 tree arg0, tree arg1, tree arg2)
7734 {
7735 tree type;
7736 tree res, fn, call;
7737
7738 if (!validate_arg (arg0, REAL_TYPE)
7739 || !validate_arg (arg1, POINTER_TYPE)
7740 || !validate_arg (arg2, POINTER_TYPE))
7741 return NULL_TREE;
7742
7743 type = TREE_TYPE (arg0);
7744
7745 /* Calculate the result when the argument is a constant. */
7746 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7747 return res;
7748
7749 /* Canonicalize sincos to cexpi. */
7750 if (!targetm.libc_has_function (function_c99_math_complex))
7751 return NULL_TREE;
7752 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7753 if (!fn)
7754 return NULL_TREE;
7755
7756 call = build_call_expr_loc (loc, fn, 1, arg0);
7757 call = builtin_save_expr (call);
7758
7759 return build2 (COMPOUND_EXPR, void_type_node,
7760 build2 (MODIFY_EXPR, void_type_node,
7761 build_fold_indirect_ref_loc (loc, arg1),
7762 build1 (IMAGPART_EXPR, type, call)),
7763 build2 (MODIFY_EXPR, void_type_node,
7764 build_fold_indirect_ref_loc (loc, arg2),
7765 build1 (REALPART_EXPR, type, call)));
7766 }
7767
7768 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7769 NULL_TREE if no simplification can be made. */
7770
7771 static tree
7772 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7773 {
7774 tree rtype;
7775 tree realp, imagp, ifn;
7776 tree res;
7777
7778 if (!validate_arg (arg0, COMPLEX_TYPE)
7779 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7780 return NULL_TREE;
7781
7782 /* Calculate the result when the argument is a constant. */
7783 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7784 return res;
7785
7786 rtype = TREE_TYPE (TREE_TYPE (arg0));
7787
7788 /* In case we can figure out the real part of arg0 and it is constant zero
7789 fold to cexpi. */
7790 if (!targetm.libc_has_function (function_c99_math_complex))
7791 return NULL_TREE;
7792 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7793 if (!ifn)
7794 return NULL_TREE;
7795
7796 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7797 && real_zerop (realp))
7798 {
7799 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7800 return build_call_expr_loc (loc, ifn, 1, narg);
7801 }
7802
7803 /* In case we can easily decompose real and imaginary parts split cexp
7804 to exp (r) * cexpi (i). */
7805 if (flag_unsafe_math_optimizations
7806 && realp)
7807 {
7808 tree rfn, rcall, icall;
7809
7810 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7811 if (!rfn)
7812 return NULL_TREE;
7813
7814 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7815 if (!imagp)
7816 return NULL_TREE;
7817
7818 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7819 icall = builtin_save_expr (icall);
7820 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7821 rcall = builtin_save_expr (rcall);
7822 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7823 fold_build2_loc (loc, MULT_EXPR, rtype,
7824 rcall,
7825 fold_build1_loc (loc, REALPART_EXPR,
7826 rtype, icall)),
7827 fold_build2_loc (loc, MULT_EXPR, rtype,
7828 rcall,
7829 fold_build1_loc (loc, IMAGPART_EXPR,
7830 rtype, icall)));
7831 }
7832
7833 return NULL_TREE;
7834 }
7835
7836 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7837 Return NULL_TREE if no simplification can be made. */
7838
7839 static tree
7840 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7841 {
7842 if (!validate_arg (arg, REAL_TYPE))
7843 return NULL_TREE;
7844
7845 /* Optimize trunc of constant value. */
7846 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7847 {
7848 REAL_VALUE_TYPE r, x;
7849 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7850
7851 x = TREE_REAL_CST (arg);
7852 real_trunc (&r, TYPE_MODE (type), &x);
7853 return build_real (type, r);
7854 }
7855
7856 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7857 }
7858
7859 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7860 Return NULL_TREE if no simplification can be made. */
7861
7862 static tree
7863 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7864 {
7865 if (!validate_arg (arg, REAL_TYPE))
7866 return NULL_TREE;
7867
7868 /* Optimize floor of constant value. */
7869 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7870 {
7871 REAL_VALUE_TYPE x;
7872
7873 x = TREE_REAL_CST (arg);
7874 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7875 {
7876 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7877 REAL_VALUE_TYPE r;
7878
7879 real_floor (&r, TYPE_MODE (type), &x);
7880 return build_real (type, r);
7881 }
7882 }
7883
7884 /* Fold floor (x) where x is nonnegative to trunc (x). */
7885 if (tree_expr_nonnegative_p (arg))
7886 {
7887 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7888 if (truncfn)
7889 return build_call_expr_loc (loc, truncfn, 1, arg);
7890 }
7891
7892 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7893 }
7894
7895 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7896 Return NULL_TREE if no simplification can be made. */
7897
7898 static tree
7899 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7900 {
7901 if (!validate_arg (arg, REAL_TYPE))
7902 return NULL_TREE;
7903
7904 /* Optimize ceil of constant value. */
7905 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7906 {
7907 REAL_VALUE_TYPE x;
7908
7909 x = TREE_REAL_CST (arg);
7910 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7911 {
7912 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7913 REAL_VALUE_TYPE r;
7914
7915 real_ceil (&r, TYPE_MODE (type), &x);
7916 return build_real (type, r);
7917 }
7918 }
7919
7920 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7921 }
7922
7923 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7924 Return NULL_TREE if no simplification can be made. */
7925
7926 static tree
7927 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7928 {
7929 if (!validate_arg (arg, REAL_TYPE))
7930 return NULL_TREE;
7931
7932 /* Optimize round of constant value. */
7933 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7934 {
7935 REAL_VALUE_TYPE x;
7936
7937 x = TREE_REAL_CST (arg);
7938 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7939 {
7940 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7941 REAL_VALUE_TYPE r;
7942
7943 real_round (&r, TYPE_MODE (type), &x);
7944 return build_real (type, r);
7945 }
7946 }
7947
7948 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7949 }
7950
7951 /* Fold function call to builtin lround, lroundf or lroundl (or the
7952 corresponding long long versions) and other rounding functions. ARG
7953 is the argument to the call. Return NULL_TREE if no simplification
7954 can be made. */
7955
7956 static tree
7957 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7958 {
7959 if (!validate_arg (arg, REAL_TYPE))
7960 return NULL_TREE;
7961
7962 /* Optimize lround of constant value. */
7963 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7964 {
7965 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7966
7967 if (real_isfinite (&x))
7968 {
7969 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7970 tree ftype = TREE_TYPE (arg);
7971 REAL_VALUE_TYPE r;
7972 bool fail = false;
7973
7974 switch (DECL_FUNCTION_CODE (fndecl))
7975 {
7976 CASE_FLT_FN (BUILT_IN_IFLOOR):
7977 CASE_FLT_FN (BUILT_IN_LFLOOR):
7978 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7979 real_floor (&r, TYPE_MODE (ftype), &x);
7980 break;
7981
7982 CASE_FLT_FN (BUILT_IN_ICEIL):
7983 CASE_FLT_FN (BUILT_IN_LCEIL):
7984 CASE_FLT_FN (BUILT_IN_LLCEIL):
7985 real_ceil (&r, TYPE_MODE (ftype), &x);
7986 break;
7987
7988 CASE_FLT_FN (BUILT_IN_IROUND):
7989 CASE_FLT_FN (BUILT_IN_LROUND):
7990 CASE_FLT_FN (BUILT_IN_LLROUND):
7991 real_round (&r, TYPE_MODE (ftype), &x);
7992 break;
7993
7994 default:
7995 gcc_unreachable ();
7996 }
7997
7998 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
7999 if (!fail)
8000 return wide_int_to_tree (itype, val);
8001 }
8002 }
8003
8004 switch (DECL_FUNCTION_CODE (fndecl))
8005 {
8006 CASE_FLT_FN (BUILT_IN_LFLOOR):
8007 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8008 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8009 if (tree_expr_nonnegative_p (arg))
8010 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8011 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8012 break;
8013 default:;
8014 }
8015
8016 return fold_fixed_mathfn (loc, fndecl, arg);
8017 }
8018
8019 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8020 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8021 the argument to the call. Return NULL_TREE if no simplification can
8022 be made. */
8023
8024 static tree
8025 fold_builtin_bitop (tree fndecl, tree arg)
8026 {
8027 if (!validate_arg (arg, INTEGER_TYPE))
8028 return NULL_TREE;
8029
8030 /* Optimize for constant argument. */
8031 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8032 {
8033 tree type = TREE_TYPE (arg);
8034 int result;
8035
8036 switch (DECL_FUNCTION_CODE (fndecl))
8037 {
8038 CASE_INT_FN (BUILT_IN_FFS):
8039 result = wi::ffs (arg);
8040 break;
8041
8042 CASE_INT_FN (BUILT_IN_CLZ):
8043 if (wi::ne_p (arg, 0))
8044 result = wi::clz (arg);
8045 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8046 result = TYPE_PRECISION (type);
8047 break;
8048
8049 CASE_INT_FN (BUILT_IN_CTZ):
8050 if (wi::ne_p (arg, 0))
8051 result = wi::ctz (arg);
8052 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8053 result = TYPE_PRECISION (type);
8054 break;
8055
8056 CASE_INT_FN (BUILT_IN_CLRSB):
8057 result = wi::clrsb (arg);
8058 break;
8059
8060 CASE_INT_FN (BUILT_IN_POPCOUNT):
8061 result = wi::popcount (arg);
8062 break;
8063
8064 CASE_INT_FN (BUILT_IN_PARITY):
8065 result = wi::parity (arg);
8066 break;
8067
8068 default:
8069 gcc_unreachable ();
8070 }
8071
8072 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8073 }
8074
8075 return NULL_TREE;
8076 }
8077
8078 /* Fold function call to builtin_bswap and the short, long and long long
8079 variants. Return NULL_TREE if no simplification can be made. */
8080 static tree
8081 fold_builtin_bswap (tree fndecl, tree arg)
8082 {
8083 if (! validate_arg (arg, INTEGER_TYPE))
8084 return NULL_TREE;
8085
8086 /* Optimize constant value. */
8087 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8088 {
8089 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8090
8091 switch (DECL_FUNCTION_CODE (fndecl))
8092 {
8093 case BUILT_IN_BSWAP16:
8094 case BUILT_IN_BSWAP32:
8095 case BUILT_IN_BSWAP64:
8096 {
8097 signop sgn = TYPE_SIGN (type);
8098 tree result =
8099 wide_int_to_tree (type,
8100 wide_int::from (arg, TYPE_PRECISION (type),
8101 sgn).bswap ());
8102 return result;
8103 }
8104 default:
8105 gcc_unreachable ();
8106 }
8107 }
8108
8109 return NULL_TREE;
8110 }
8111
8112 /* A subroutine of fold_builtin to fold the various logarithmic
8113 functions. Return NULL_TREE if no simplification can me made.
8114 FUNC is the corresponding MPFR logarithm function. */
8115
8116 static tree
8117 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8118 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8119 {
8120 if (validate_arg (arg, REAL_TYPE))
8121 {
8122 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8123 tree res;
8124 const enum built_in_function fcode = builtin_mathfn_code (arg);
8125
8126 /* Calculate the result when the argument is a constant. */
8127 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8128 return res;
8129
8130 /* Special case, optimize logN(expN(x)) = x. */
8131 if (flag_unsafe_math_optimizations
8132 && ((func == mpfr_log
8133 && (fcode == BUILT_IN_EXP
8134 || fcode == BUILT_IN_EXPF
8135 || fcode == BUILT_IN_EXPL))
8136 || (func == mpfr_log2
8137 && (fcode == BUILT_IN_EXP2
8138 || fcode == BUILT_IN_EXP2F
8139 || fcode == BUILT_IN_EXP2L))
8140 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8141 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8142
8143 /* Optimize logN(func()) for various exponential functions. We
8144 want to determine the value "x" and the power "exponent" in
8145 order to transform logN(x**exponent) into exponent*logN(x). */
8146 if (flag_unsafe_math_optimizations)
8147 {
8148 tree exponent = 0, x = 0;
8149
8150 switch (fcode)
8151 {
8152 CASE_FLT_FN (BUILT_IN_EXP):
8153 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8154 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8155 dconst_e ()));
8156 exponent = CALL_EXPR_ARG (arg, 0);
8157 break;
8158 CASE_FLT_FN (BUILT_IN_EXP2):
8159 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8160 x = build_real (type, dconst2);
8161 exponent = CALL_EXPR_ARG (arg, 0);
8162 break;
8163 CASE_FLT_FN (BUILT_IN_EXP10):
8164 CASE_FLT_FN (BUILT_IN_POW10):
8165 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8166 {
8167 REAL_VALUE_TYPE dconst10;
8168 real_from_integer (&dconst10, VOIDmode, 10, SIGNED);
8169 x = build_real (type, dconst10);
8170 }
8171 exponent = CALL_EXPR_ARG (arg, 0);
8172 break;
8173 CASE_FLT_FN (BUILT_IN_SQRT):
8174 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8175 x = CALL_EXPR_ARG (arg, 0);
8176 exponent = build_real (type, dconsthalf);
8177 break;
8178 CASE_FLT_FN (BUILT_IN_CBRT):
8179 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8180 x = CALL_EXPR_ARG (arg, 0);
8181 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8182 dconst_third ()));
8183 break;
8184 CASE_FLT_FN (BUILT_IN_POW):
8185 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8186 x = CALL_EXPR_ARG (arg, 0);
8187 exponent = CALL_EXPR_ARG (arg, 1);
8188 break;
8189 default:
8190 break;
8191 }
8192
8193 /* Now perform the optimization. */
8194 if (x && exponent)
8195 {
8196 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8197 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8198 }
8199 }
8200 }
8201
8202 return NULL_TREE;
8203 }
8204
8205 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8206 NULL_TREE if no simplification can be made. */
8207
8208 static tree
8209 fold_builtin_hypot (location_t loc, tree fndecl,
8210 tree arg0, tree arg1, tree type)
8211 {
8212 tree res, narg0, narg1;
8213
8214 if (!validate_arg (arg0, REAL_TYPE)
8215 || !validate_arg (arg1, REAL_TYPE))
8216 return NULL_TREE;
8217
8218 /* Calculate the result when the argument is a constant. */
8219 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8220 return res;
8221
8222 /* If either argument to hypot has a negate or abs, strip that off.
8223 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8224 narg0 = fold_strip_sign_ops (arg0);
8225 narg1 = fold_strip_sign_ops (arg1);
8226 if (narg0 || narg1)
8227 {
8228 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8229 narg1 ? narg1 : arg1);
8230 }
8231
8232 /* If either argument is zero, hypot is fabs of the other. */
8233 if (real_zerop (arg0))
8234 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8235 else if (real_zerop (arg1))
8236 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8237
8238 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8239 if (flag_unsafe_math_optimizations
8240 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8241 {
8242 const REAL_VALUE_TYPE sqrt2_trunc
8243 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8244 return fold_build2_loc (loc, MULT_EXPR, type,
8245 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8246 build_real (type, sqrt2_trunc));
8247 }
8248
8249 return NULL_TREE;
8250 }
8251
8252
8253 /* Fold a builtin function call to pow, powf, or powl. Return
8254 NULL_TREE if no simplification can be made. */
8255 static tree
8256 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8257 {
8258 tree res;
8259
8260 if (!validate_arg (arg0, REAL_TYPE)
8261 || !validate_arg (arg1, REAL_TYPE))
8262 return NULL_TREE;
8263
8264 /* Calculate the result when the argument is a constant. */
8265 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8266 return res;
8267
8268 /* Optimize pow(1.0,y) = 1.0. */
8269 if (real_onep (arg0))
8270 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8271
8272 if (TREE_CODE (arg1) == REAL_CST
8273 && !TREE_OVERFLOW (arg1))
8274 {
8275 REAL_VALUE_TYPE cint;
8276 REAL_VALUE_TYPE c;
8277 HOST_WIDE_INT n;
8278
8279 c = TREE_REAL_CST (arg1);
8280
8281 /* Optimize pow(x,0.0) = 1.0. */
8282 if (REAL_VALUES_EQUAL (c, dconst0))
8283 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8284 arg0);
8285
8286 /* Optimize pow(x,1.0) = x. */
8287 if (REAL_VALUES_EQUAL (c, dconst1))
8288 return arg0;
8289
8290 /* Optimize pow(x,-1.0) = 1.0/x. */
8291 if (REAL_VALUES_EQUAL (c, dconstm1))
8292 return fold_build2_loc (loc, RDIV_EXPR, type,
8293 build_real (type, dconst1), arg0);
8294
8295 /* Optimize pow(x,0.5) = sqrt(x). */
8296 if (flag_unsafe_math_optimizations
8297 && REAL_VALUES_EQUAL (c, dconsthalf))
8298 {
8299 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8300
8301 if (sqrtfn != NULL_TREE)
8302 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8303 }
8304
8305 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8306 if (flag_unsafe_math_optimizations)
8307 {
8308 const REAL_VALUE_TYPE dconstroot
8309 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8310
8311 if (REAL_VALUES_EQUAL (c, dconstroot))
8312 {
8313 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8314 if (cbrtfn != NULL_TREE)
8315 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8316 }
8317 }
8318
8319 /* Check for an integer exponent. */
8320 n = real_to_integer (&c);
8321 real_from_integer (&cint, VOIDmode, n, SIGNED);
8322 if (real_identical (&c, &cint))
8323 {
8324 /* Attempt to evaluate pow at compile-time, unless this should
8325 raise an exception. */
8326 if (TREE_CODE (arg0) == REAL_CST
8327 && !TREE_OVERFLOW (arg0)
8328 && (n > 0
8329 || (!flag_trapping_math && !flag_errno_math)
8330 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8331 {
8332 REAL_VALUE_TYPE x;
8333 bool inexact;
8334
8335 x = TREE_REAL_CST (arg0);
8336 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8337 if (flag_unsafe_math_optimizations || !inexact)
8338 return build_real (type, x);
8339 }
8340
8341 /* Strip sign ops from even integer powers. */
8342 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8343 {
8344 tree narg0 = fold_strip_sign_ops (arg0);
8345 if (narg0)
8346 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8347 }
8348 }
8349 }
8350
8351 if (flag_unsafe_math_optimizations)
8352 {
8353 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8354
8355 /* Optimize pow(expN(x),y) = expN(x*y). */
8356 if (BUILTIN_EXPONENT_P (fcode))
8357 {
8358 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8359 tree arg = CALL_EXPR_ARG (arg0, 0);
8360 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8361 return build_call_expr_loc (loc, expfn, 1, arg);
8362 }
8363
8364 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8365 if (BUILTIN_SQRT_P (fcode))
8366 {
8367 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8368 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8369 build_real (type, dconsthalf));
8370 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8371 }
8372
8373 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8374 if (BUILTIN_CBRT_P (fcode))
8375 {
8376 tree arg = CALL_EXPR_ARG (arg0, 0);
8377 if (tree_expr_nonnegative_p (arg))
8378 {
8379 const REAL_VALUE_TYPE dconstroot
8380 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8381 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8382 build_real (type, dconstroot));
8383 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8384 }
8385 }
8386
8387 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8388 if (fcode == BUILT_IN_POW
8389 || fcode == BUILT_IN_POWF
8390 || fcode == BUILT_IN_POWL)
8391 {
8392 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8393 if (tree_expr_nonnegative_p (arg00))
8394 {
8395 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8396 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8397 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8398 }
8399 }
8400 }
8401
8402 return NULL_TREE;
8403 }
8404
8405 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8406 Return NULL_TREE if no simplification can be made. */
8407 static tree
8408 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8409 tree arg0, tree arg1, tree type)
8410 {
8411 if (!validate_arg (arg0, REAL_TYPE)
8412 || !validate_arg (arg1, INTEGER_TYPE))
8413 return NULL_TREE;
8414
8415 /* Optimize pow(1.0,y) = 1.0. */
8416 if (real_onep (arg0))
8417 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8418
8419 if (tree_fits_shwi_p (arg1))
8420 {
8421 HOST_WIDE_INT c = tree_to_shwi (arg1);
8422
8423 /* Evaluate powi at compile-time. */
8424 if (TREE_CODE (arg0) == REAL_CST
8425 && !TREE_OVERFLOW (arg0))
8426 {
8427 REAL_VALUE_TYPE x;
8428 x = TREE_REAL_CST (arg0);
8429 real_powi (&x, TYPE_MODE (type), &x, c);
8430 return build_real (type, x);
8431 }
8432
8433 /* Optimize pow(x,0) = 1.0. */
8434 if (c == 0)
8435 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8436 arg0);
8437
8438 /* Optimize pow(x,1) = x. */
8439 if (c == 1)
8440 return arg0;
8441
8442 /* Optimize pow(x,-1) = 1.0/x. */
8443 if (c == -1)
8444 return fold_build2_loc (loc, RDIV_EXPR, type,
8445 build_real (type, dconst1), arg0);
8446 }
8447
8448 return NULL_TREE;
8449 }
8450
8451 /* A subroutine of fold_builtin to fold the various exponent
8452 functions. Return NULL_TREE if no simplification can be made.
8453 FUNC is the corresponding MPFR exponent function. */
8454
8455 static tree
8456 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8457 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8458 {
8459 if (validate_arg (arg, REAL_TYPE))
8460 {
8461 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8462 tree res;
8463
8464 /* Calculate the result when the argument is a constant. */
8465 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8466 return res;
8467
8468 /* Optimize expN(logN(x)) = x. */
8469 if (flag_unsafe_math_optimizations)
8470 {
8471 const enum built_in_function fcode = builtin_mathfn_code (arg);
8472
8473 if ((func == mpfr_exp
8474 && (fcode == BUILT_IN_LOG
8475 || fcode == BUILT_IN_LOGF
8476 || fcode == BUILT_IN_LOGL))
8477 || (func == mpfr_exp2
8478 && (fcode == BUILT_IN_LOG2
8479 || fcode == BUILT_IN_LOG2F
8480 || fcode == BUILT_IN_LOG2L))
8481 || (func == mpfr_exp10
8482 && (fcode == BUILT_IN_LOG10
8483 || fcode == BUILT_IN_LOG10F
8484 || fcode == BUILT_IN_LOG10L)))
8485 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8486 }
8487 }
8488
8489 return NULL_TREE;
8490 }
8491
8492 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8493 Return NULL_TREE if no simplification can be made. */
8494
8495 static tree
8496 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8497 {
8498 tree fn, len, lenp1, call, type;
8499
8500 if (!validate_arg (dest, POINTER_TYPE)
8501 || !validate_arg (src, POINTER_TYPE))
8502 return NULL_TREE;
8503
8504 len = c_strlen (src, 1);
8505 if (!len
8506 || TREE_CODE (len) != INTEGER_CST)
8507 return NULL_TREE;
8508
8509 if (optimize_function_for_size_p (cfun)
8510 /* If length is zero it's small enough. */
8511 && !integer_zerop (len))
8512 return NULL_TREE;
8513
8514 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8515 if (!fn)
8516 return NULL_TREE;
8517
8518 lenp1 = size_binop_loc (loc, PLUS_EXPR,
8519 fold_convert_loc (loc, size_type_node, len),
8520 build_int_cst (size_type_node, 1));
8521 /* We use dest twice in building our expression. Save it from
8522 multiple expansions. */
8523 dest = builtin_save_expr (dest);
8524 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8525
8526 type = TREE_TYPE (TREE_TYPE (fndecl));
8527 dest = fold_build_pointer_plus_loc (loc, dest, len);
8528 dest = fold_convert_loc (loc, type, dest);
8529 dest = omit_one_operand_loc (loc, type, dest, call);
8530 return dest;
8531 }
8532
8533 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8534 arguments to the call, and TYPE is its return type.
8535 Return NULL_TREE if no simplification can be made. */
8536
8537 static tree
8538 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8539 {
8540 if (!validate_arg (arg1, POINTER_TYPE)
8541 || !validate_arg (arg2, INTEGER_TYPE)
8542 || !validate_arg (len, INTEGER_TYPE))
8543 return NULL_TREE;
8544 else
8545 {
8546 const char *p1;
8547
8548 if (TREE_CODE (arg2) != INTEGER_CST
8549 || !tree_fits_uhwi_p (len))
8550 return NULL_TREE;
8551
8552 p1 = c_getstr (arg1);
8553 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8554 {
8555 char c;
8556 const char *r;
8557 tree tem;
8558
8559 if (target_char_cast (arg2, &c))
8560 return NULL_TREE;
8561
8562 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8563
8564 if (r == NULL)
8565 return build_int_cst (TREE_TYPE (arg1), 0);
8566
8567 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8568 return fold_convert_loc (loc, type, tem);
8569 }
8570 return NULL_TREE;
8571 }
8572 }
8573
8574 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8575 Return NULL_TREE if no simplification can be made. */
8576
8577 static tree
8578 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8579 {
8580 const char *p1, *p2;
8581
8582 if (!validate_arg (arg1, POINTER_TYPE)
8583 || !validate_arg (arg2, POINTER_TYPE)
8584 || !validate_arg (len, INTEGER_TYPE))
8585 return NULL_TREE;
8586
8587 /* If the LEN parameter is zero, return zero. */
8588 if (integer_zerop (len))
8589 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8590 arg1, arg2);
8591
8592 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8593 if (operand_equal_p (arg1, arg2, 0))
8594 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8595
8596 p1 = c_getstr (arg1);
8597 p2 = c_getstr (arg2);
8598
8599 /* If all arguments are constant, and the value of len is not greater
8600 than the lengths of arg1 and arg2, evaluate at compile-time. */
8601 if (tree_fits_uhwi_p (len) && p1 && p2
8602 && compare_tree_int (len, strlen (p1) + 1) <= 0
8603 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8604 {
8605 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8606
8607 if (r > 0)
8608 return integer_one_node;
8609 else if (r < 0)
8610 return integer_minus_one_node;
8611 else
8612 return integer_zero_node;
8613 }
8614
8615 /* If len parameter is one, return an expression corresponding to
8616 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8617 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8618 {
8619 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8620 tree cst_uchar_ptr_node
8621 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8622
8623 tree ind1
8624 = fold_convert_loc (loc, integer_type_node,
8625 build1 (INDIRECT_REF, cst_uchar_node,
8626 fold_convert_loc (loc,
8627 cst_uchar_ptr_node,
8628 arg1)));
8629 tree ind2
8630 = fold_convert_loc (loc, integer_type_node,
8631 build1 (INDIRECT_REF, cst_uchar_node,
8632 fold_convert_loc (loc,
8633 cst_uchar_ptr_node,
8634 arg2)));
8635 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8636 }
8637
8638 return NULL_TREE;
8639 }
8640
8641 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8642 Return NULL_TREE if no simplification can be made. */
8643
8644 static tree
8645 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8646 {
8647 const char *p1, *p2;
8648
8649 if (!validate_arg (arg1, POINTER_TYPE)
8650 || !validate_arg (arg2, POINTER_TYPE))
8651 return NULL_TREE;
8652
8653 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8654 if (operand_equal_p (arg1, arg2, 0))
8655 return integer_zero_node;
8656
8657 p1 = c_getstr (arg1);
8658 p2 = c_getstr (arg2);
8659
8660 if (p1 && p2)
8661 {
8662 const int i = strcmp (p1, p2);
8663 if (i < 0)
8664 return integer_minus_one_node;
8665 else if (i > 0)
8666 return integer_one_node;
8667 else
8668 return integer_zero_node;
8669 }
8670
8671 /* If the second arg is "", return *(const unsigned char*)arg1. */
8672 if (p2 && *p2 == '\0')
8673 {
8674 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8675 tree cst_uchar_ptr_node
8676 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8677
8678 return fold_convert_loc (loc, integer_type_node,
8679 build1 (INDIRECT_REF, cst_uchar_node,
8680 fold_convert_loc (loc,
8681 cst_uchar_ptr_node,
8682 arg1)));
8683 }
8684
8685 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8686 if (p1 && *p1 == '\0')
8687 {
8688 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8689 tree cst_uchar_ptr_node
8690 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8691
8692 tree temp
8693 = fold_convert_loc (loc, integer_type_node,
8694 build1 (INDIRECT_REF, cst_uchar_node,
8695 fold_convert_loc (loc,
8696 cst_uchar_ptr_node,
8697 arg2)));
8698 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8699 }
8700
8701 return NULL_TREE;
8702 }
8703
8704 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8705 Return NULL_TREE if no simplification can be made. */
8706
8707 static tree
8708 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8709 {
8710 const char *p1, *p2;
8711
8712 if (!validate_arg (arg1, POINTER_TYPE)
8713 || !validate_arg (arg2, POINTER_TYPE)
8714 || !validate_arg (len, INTEGER_TYPE))
8715 return NULL_TREE;
8716
8717 /* If the LEN parameter is zero, return zero. */
8718 if (integer_zerop (len))
8719 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8720 arg1, arg2);
8721
8722 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8723 if (operand_equal_p (arg1, arg2, 0))
8724 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8725
8726 p1 = c_getstr (arg1);
8727 p2 = c_getstr (arg2);
8728
8729 if (tree_fits_uhwi_p (len) && p1 && p2)
8730 {
8731 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8732 if (i > 0)
8733 return integer_one_node;
8734 else if (i < 0)
8735 return integer_minus_one_node;
8736 else
8737 return integer_zero_node;
8738 }
8739
8740 /* If the second arg is "", and the length is greater than zero,
8741 return *(const unsigned char*)arg1. */
8742 if (p2 && *p2 == '\0'
8743 && TREE_CODE (len) == INTEGER_CST
8744 && tree_int_cst_sgn (len) == 1)
8745 {
8746 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8747 tree cst_uchar_ptr_node
8748 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8749
8750 return fold_convert_loc (loc, integer_type_node,
8751 build1 (INDIRECT_REF, cst_uchar_node,
8752 fold_convert_loc (loc,
8753 cst_uchar_ptr_node,
8754 arg1)));
8755 }
8756
8757 /* If the first arg is "", and the length is greater than zero,
8758 return -*(const unsigned char*)arg2. */
8759 if (p1 && *p1 == '\0'
8760 && TREE_CODE (len) == INTEGER_CST
8761 && tree_int_cst_sgn (len) == 1)
8762 {
8763 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8764 tree cst_uchar_ptr_node
8765 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8766
8767 tree temp = fold_convert_loc (loc, integer_type_node,
8768 build1 (INDIRECT_REF, cst_uchar_node,
8769 fold_convert_loc (loc,
8770 cst_uchar_ptr_node,
8771 arg2)));
8772 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8773 }
8774
8775 /* If len parameter is one, return an expression corresponding to
8776 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8777 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8778 {
8779 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8780 tree cst_uchar_ptr_node
8781 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8782
8783 tree ind1 = fold_convert_loc (loc, integer_type_node,
8784 build1 (INDIRECT_REF, cst_uchar_node,
8785 fold_convert_loc (loc,
8786 cst_uchar_ptr_node,
8787 arg1)));
8788 tree ind2 = fold_convert_loc (loc, integer_type_node,
8789 build1 (INDIRECT_REF, cst_uchar_node,
8790 fold_convert_loc (loc,
8791 cst_uchar_ptr_node,
8792 arg2)));
8793 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8794 }
8795
8796 return NULL_TREE;
8797 }
8798
8799 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8800 ARG. Return NULL_TREE if no simplification can be made. */
8801
8802 static tree
8803 fold_builtin_signbit (location_t loc, tree arg, tree type)
8804 {
8805 if (!validate_arg (arg, REAL_TYPE))
8806 return NULL_TREE;
8807
8808 /* If ARG is a compile-time constant, determine the result. */
8809 if (TREE_CODE (arg) == REAL_CST
8810 && !TREE_OVERFLOW (arg))
8811 {
8812 REAL_VALUE_TYPE c;
8813
8814 c = TREE_REAL_CST (arg);
8815 return (REAL_VALUE_NEGATIVE (c)
8816 ? build_one_cst (type)
8817 : build_zero_cst (type));
8818 }
8819
8820 /* If ARG is non-negative, the result is always zero. */
8821 if (tree_expr_nonnegative_p (arg))
8822 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8823
8824 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8825 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8826 return fold_convert (type,
8827 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8828 build_real (TREE_TYPE (arg), dconst0)));
8829
8830 return NULL_TREE;
8831 }
8832
8833 /* Fold function call to builtin copysign, copysignf or copysignl with
8834 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8835 be made. */
8836
8837 static tree
8838 fold_builtin_copysign (location_t loc, tree fndecl,
8839 tree arg1, tree arg2, tree type)
8840 {
8841 tree tem;
8842
8843 if (!validate_arg (arg1, REAL_TYPE)
8844 || !validate_arg (arg2, REAL_TYPE))
8845 return NULL_TREE;
8846
8847 /* copysign(X,X) is X. */
8848 if (operand_equal_p (arg1, arg2, 0))
8849 return fold_convert_loc (loc, type, arg1);
8850
8851 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8852 if (TREE_CODE (arg1) == REAL_CST
8853 && TREE_CODE (arg2) == REAL_CST
8854 && !TREE_OVERFLOW (arg1)
8855 && !TREE_OVERFLOW (arg2))
8856 {
8857 REAL_VALUE_TYPE c1, c2;
8858
8859 c1 = TREE_REAL_CST (arg1);
8860 c2 = TREE_REAL_CST (arg2);
8861 /* c1.sign := c2.sign. */
8862 real_copysign (&c1, &c2);
8863 return build_real (type, c1);
8864 }
8865
8866 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8867 Remember to evaluate Y for side-effects. */
8868 if (tree_expr_nonnegative_p (arg2))
8869 return omit_one_operand_loc (loc, type,
8870 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8871 arg2);
8872
8873 /* Strip sign changing operations for the first argument. */
8874 tem = fold_strip_sign_ops (arg1);
8875 if (tem)
8876 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8877
8878 return NULL_TREE;
8879 }
8880
8881 /* Fold a call to builtin isascii with argument ARG. */
8882
8883 static tree
8884 fold_builtin_isascii (location_t loc, tree arg)
8885 {
8886 if (!validate_arg (arg, INTEGER_TYPE))
8887 return NULL_TREE;
8888 else
8889 {
8890 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8891 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8892 build_int_cst (integer_type_node,
8893 ~ (unsigned HOST_WIDE_INT) 0x7f));
8894 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8895 arg, integer_zero_node);
8896 }
8897 }
8898
8899 /* Fold a call to builtin toascii with argument ARG. */
8900
8901 static tree
8902 fold_builtin_toascii (location_t loc, tree arg)
8903 {
8904 if (!validate_arg (arg, INTEGER_TYPE))
8905 return NULL_TREE;
8906
8907 /* Transform toascii(c) -> (c & 0x7f). */
8908 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8909 build_int_cst (integer_type_node, 0x7f));
8910 }
8911
8912 /* Fold a call to builtin isdigit with argument ARG. */
8913
8914 static tree
8915 fold_builtin_isdigit (location_t loc, tree arg)
8916 {
8917 if (!validate_arg (arg, INTEGER_TYPE))
8918 return NULL_TREE;
8919 else
8920 {
8921 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8922 /* According to the C standard, isdigit is unaffected by locale.
8923 However, it definitely is affected by the target character set. */
8924 unsigned HOST_WIDE_INT target_digit0
8925 = lang_hooks.to_target_charset ('0');
8926
8927 if (target_digit0 == 0)
8928 return NULL_TREE;
8929
8930 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8931 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8932 build_int_cst (unsigned_type_node, target_digit0));
8933 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8934 build_int_cst (unsigned_type_node, 9));
8935 }
8936 }
8937
8938 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8939
8940 static tree
8941 fold_builtin_fabs (location_t loc, tree arg, tree type)
8942 {
8943 if (!validate_arg (arg, REAL_TYPE))
8944 return NULL_TREE;
8945
8946 arg = fold_convert_loc (loc, type, arg);
8947 if (TREE_CODE (arg) == REAL_CST)
8948 return fold_abs_const (arg, type);
8949 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8950 }
8951
8952 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8953
8954 static tree
8955 fold_builtin_abs (location_t loc, tree arg, tree type)
8956 {
8957 if (!validate_arg (arg, INTEGER_TYPE))
8958 return NULL_TREE;
8959
8960 arg = fold_convert_loc (loc, type, arg);
8961 if (TREE_CODE (arg) == INTEGER_CST)
8962 return fold_abs_const (arg, type);
8963 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8964 }
8965
8966 /* Fold a fma operation with arguments ARG[012]. */
8967
8968 tree
8969 fold_fma (location_t loc ATTRIBUTE_UNUSED,
8970 tree type, tree arg0, tree arg1, tree arg2)
8971 {
8972 if (TREE_CODE (arg0) == REAL_CST
8973 && TREE_CODE (arg1) == REAL_CST
8974 && TREE_CODE (arg2) == REAL_CST)
8975 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
8976
8977 return NULL_TREE;
8978 }
8979
8980 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8981
8982 static tree
8983 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8984 {
8985 if (validate_arg (arg0, REAL_TYPE)
8986 && validate_arg (arg1, REAL_TYPE)
8987 && validate_arg (arg2, REAL_TYPE))
8988 {
8989 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
8990 if (tem)
8991 return tem;
8992
8993 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8994 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8995 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8996 }
8997 return NULL_TREE;
8998 }
8999
9000 /* Fold a call to builtin fmin or fmax. */
9001
9002 static tree
9003 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9004 tree type, bool max)
9005 {
9006 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9007 {
9008 /* Calculate the result when the argument is a constant. */
9009 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9010
9011 if (res)
9012 return res;
9013
9014 /* If either argument is NaN, return the other one. Avoid the
9015 transformation if we get (and honor) a signalling NaN. Using
9016 omit_one_operand() ensures we create a non-lvalue. */
9017 if (TREE_CODE (arg0) == REAL_CST
9018 && real_isnan (&TREE_REAL_CST (arg0))
9019 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9020 || ! TREE_REAL_CST (arg0).signalling))
9021 return omit_one_operand_loc (loc, type, arg1, arg0);
9022 if (TREE_CODE (arg1) == REAL_CST
9023 && real_isnan (&TREE_REAL_CST (arg1))
9024 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9025 || ! TREE_REAL_CST (arg1).signalling))
9026 return omit_one_operand_loc (loc, type, arg0, arg1);
9027
9028 /* Transform fmin/fmax(x,x) -> x. */
9029 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9030 return omit_one_operand_loc (loc, type, arg0, arg1);
9031
9032 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9033 functions to return the numeric arg if the other one is NaN.
9034 These tree codes don't honor that, so only transform if
9035 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9036 handled, so we don't have to worry about it either. */
9037 if (flag_finite_math_only)
9038 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9039 fold_convert_loc (loc, type, arg0),
9040 fold_convert_loc (loc, type, arg1));
9041 }
9042 return NULL_TREE;
9043 }
9044
9045 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9046
9047 static tree
9048 fold_builtin_carg (location_t loc, tree arg, tree type)
9049 {
9050 if (validate_arg (arg, COMPLEX_TYPE)
9051 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9052 {
9053 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9054
9055 if (atan2_fn)
9056 {
9057 tree new_arg = builtin_save_expr (arg);
9058 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9059 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9060 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9061 }
9062 }
9063
9064 return NULL_TREE;
9065 }
9066
9067 /* Fold a call to builtin logb/ilogb. */
9068
9069 static tree
9070 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9071 {
9072 if (! validate_arg (arg, REAL_TYPE))
9073 return NULL_TREE;
9074
9075 STRIP_NOPS (arg);
9076
9077 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9078 {
9079 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9080
9081 switch (value->cl)
9082 {
9083 case rvc_nan:
9084 case rvc_inf:
9085 /* If arg is Inf or NaN and we're logb, return it. */
9086 if (TREE_CODE (rettype) == REAL_TYPE)
9087 {
9088 /* For logb(-Inf) we have to return +Inf. */
9089 if (real_isinf (value) && real_isneg (value))
9090 {
9091 REAL_VALUE_TYPE tem;
9092 real_inf (&tem);
9093 return build_real (rettype, tem);
9094 }
9095 return fold_convert_loc (loc, rettype, arg);
9096 }
9097 /* Fall through... */
9098 case rvc_zero:
9099 /* Zero may set errno and/or raise an exception for logb, also
9100 for ilogb we don't know FP_ILOGB0. */
9101 return NULL_TREE;
9102 case rvc_normal:
9103 /* For normal numbers, proceed iff radix == 2. In GCC,
9104 normalized significands are in the range [0.5, 1.0). We
9105 want the exponent as if they were [1.0, 2.0) so get the
9106 exponent and subtract 1. */
9107 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9108 return fold_convert_loc (loc, rettype,
9109 build_int_cst (integer_type_node,
9110 REAL_EXP (value)-1));
9111 break;
9112 }
9113 }
9114
9115 return NULL_TREE;
9116 }
9117
9118 /* Fold a call to builtin significand, if radix == 2. */
9119
9120 static tree
9121 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9122 {
9123 if (! validate_arg (arg, REAL_TYPE))
9124 return NULL_TREE;
9125
9126 STRIP_NOPS (arg);
9127
9128 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9129 {
9130 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9131
9132 switch (value->cl)
9133 {
9134 case rvc_zero:
9135 case rvc_nan:
9136 case rvc_inf:
9137 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9138 return fold_convert_loc (loc, rettype, arg);
9139 case rvc_normal:
9140 /* For normal numbers, proceed iff radix == 2. */
9141 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9142 {
9143 REAL_VALUE_TYPE result = *value;
9144 /* In GCC, normalized significands are in the range [0.5,
9145 1.0). We want them to be [1.0, 2.0) so set the
9146 exponent to 1. */
9147 SET_REAL_EXP (&result, 1);
9148 return build_real (rettype, result);
9149 }
9150 break;
9151 }
9152 }
9153
9154 return NULL_TREE;
9155 }
9156
9157 /* Fold a call to builtin frexp, we can assume the base is 2. */
9158
9159 static tree
9160 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9161 {
9162 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9163 return NULL_TREE;
9164
9165 STRIP_NOPS (arg0);
9166
9167 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9168 return NULL_TREE;
9169
9170 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9171
9172 /* Proceed if a valid pointer type was passed in. */
9173 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9174 {
9175 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9176 tree frac, exp;
9177
9178 switch (value->cl)
9179 {
9180 case rvc_zero:
9181 /* For +-0, return (*exp = 0, +-0). */
9182 exp = integer_zero_node;
9183 frac = arg0;
9184 break;
9185 case rvc_nan:
9186 case rvc_inf:
9187 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9188 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9189 case rvc_normal:
9190 {
9191 /* Since the frexp function always expects base 2, and in
9192 GCC normalized significands are already in the range
9193 [0.5, 1.0), we have exactly what frexp wants. */
9194 REAL_VALUE_TYPE frac_rvt = *value;
9195 SET_REAL_EXP (&frac_rvt, 0);
9196 frac = build_real (rettype, frac_rvt);
9197 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9198 }
9199 break;
9200 default:
9201 gcc_unreachable ();
9202 }
9203
9204 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9205 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9206 TREE_SIDE_EFFECTS (arg1) = 1;
9207 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9208 }
9209
9210 return NULL_TREE;
9211 }
9212
9213 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9214 then we can assume the base is two. If it's false, then we have to
9215 check the mode of the TYPE parameter in certain cases. */
9216
9217 static tree
9218 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9219 tree type, bool ldexp)
9220 {
9221 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9222 {
9223 STRIP_NOPS (arg0);
9224 STRIP_NOPS (arg1);
9225
9226 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9227 if (real_zerop (arg0) || integer_zerop (arg1)
9228 || (TREE_CODE (arg0) == REAL_CST
9229 && !real_isfinite (&TREE_REAL_CST (arg0))))
9230 return omit_one_operand_loc (loc, type, arg0, arg1);
9231
9232 /* If both arguments are constant, then try to evaluate it. */
9233 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9234 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9235 && tree_fits_shwi_p (arg1))
9236 {
9237 /* Bound the maximum adjustment to twice the range of the
9238 mode's valid exponents. Use abs to ensure the range is
9239 positive as a sanity check. */
9240 const long max_exp_adj = 2 *
9241 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9242 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9243
9244 /* Get the user-requested adjustment. */
9245 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9246
9247 /* The requested adjustment must be inside this range. This
9248 is a preliminary cap to avoid things like overflow, we
9249 may still fail to compute the result for other reasons. */
9250 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9251 {
9252 REAL_VALUE_TYPE initial_result;
9253
9254 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9255
9256 /* Ensure we didn't overflow. */
9257 if (! real_isinf (&initial_result))
9258 {
9259 const REAL_VALUE_TYPE trunc_result
9260 = real_value_truncate (TYPE_MODE (type), initial_result);
9261
9262 /* Only proceed if the target mode can hold the
9263 resulting value. */
9264 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9265 return build_real (type, trunc_result);
9266 }
9267 }
9268 }
9269 }
9270
9271 return NULL_TREE;
9272 }
9273
9274 /* Fold a call to builtin modf. */
9275
9276 static tree
9277 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9278 {
9279 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9280 return NULL_TREE;
9281
9282 STRIP_NOPS (arg0);
9283
9284 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9285 return NULL_TREE;
9286
9287 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9288
9289 /* Proceed if a valid pointer type was passed in. */
9290 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9291 {
9292 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9293 REAL_VALUE_TYPE trunc, frac;
9294
9295 switch (value->cl)
9296 {
9297 case rvc_nan:
9298 case rvc_zero:
9299 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9300 trunc = frac = *value;
9301 break;
9302 case rvc_inf:
9303 /* For +-Inf, return (*arg1 = arg0, +-0). */
9304 frac = dconst0;
9305 frac.sign = value->sign;
9306 trunc = *value;
9307 break;
9308 case rvc_normal:
9309 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9310 real_trunc (&trunc, VOIDmode, value);
9311 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9312 /* If the original number was negative and already
9313 integral, then the fractional part is -0.0. */
9314 if (value->sign && frac.cl == rvc_zero)
9315 frac.sign = value->sign;
9316 break;
9317 }
9318
9319 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9320 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9321 build_real (rettype, trunc));
9322 TREE_SIDE_EFFECTS (arg1) = 1;
9323 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9324 build_real (rettype, frac));
9325 }
9326
9327 return NULL_TREE;
9328 }
9329
9330 /* Given a location LOC, an interclass builtin function decl FNDECL
9331 and its single argument ARG, return an folded expression computing
9332 the same, or NULL_TREE if we either couldn't or didn't want to fold
9333 (the latter happen if there's an RTL instruction available). */
9334
9335 static tree
9336 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9337 {
9338 enum machine_mode mode;
9339
9340 if (!validate_arg (arg, REAL_TYPE))
9341 return NULL_TREE;
9342
9343 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9344 return NULL_TREE;
9345
9346 mode = TYPE_MODE (TREE_TYPE (arg));
9347
9348 /* If there is no optab, try generic code. */
9349 switch (DECL_FUNCTION_CODE (fndecl))
9350 {
9351 tree result;
9352
9353 CASE_FLT_FN (BUILT_IN_ISINF):
9354 {
9355 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9356 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9357 tree const type = TREE_TYPE (arg);
9358 REAL_VALUE_TYPE r;
9359 char buf[128];
9360
9361 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9362 real_from_string (&r, buf);
9363 result = build_call_expr (isgr_fn, 2,
9364 fold_build1_loc (loc, ABS_EXPR, type, arg),
9365 build_real (type, r));
9366 return result;
9367 }
9368 CASE_FLT_FN (BUILT_IN_FINITE):
9369 case BUILT_IN_ISFINITE:
9370 {
9371 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9372 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9373 tree const type = TREE_TYPE (arg);
9374 REAL_VALUE_TYPE r;
9375 char buf[128];
9376
9377 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9378 real_from_string (&r, buf);
9379 result = build_call_expr (isle_fn, 2,
9380 fold_build1_loc (loc, ABS_EXPR, type, arg),
9381 build_real (type, r));
9382 /*result = fold_build2_loc (loc, UNGT_EXPR,
9383 TREE_TYPE (TREE_TYPE (fndecl)),
9384 fold_build1_loc (loc, ABS_EXPR, type, arg),
9385 build_real (type, r));
9386 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9387 TREE_TYPE (TREE_TYPE (fndecl)),
9388 result);*/
9389 return result;
9390 }
9391 case BUILT_IN_ISNORMAL:
9392 {
9393 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9394 islessequal(fabs(x),DBL_MAX). */
9395 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9396 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9397 tree const type = TREE_TYPE (arg);
9398 REAL_VALUE_TYPE rmax, rmin;
9399 char buf[128];
9400
9401 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9402 real_from_string (&rmax, buf);
9403 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9404 real_from_string (&rmin, buf);
9405 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9406 result = build_call_expr (isle_fn, 2, arg,
9407 build_real (type, rmax));
9408 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9409 build_call_expr (isge_fn, 2, arg,
9410 build_real (type, rmin)));
9411 return result;
9412 }
9413 default:
9414 break;
9415 }
9416
9417 return NULL_TREE;
9418 }
9419
9420 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9421 ARG is the argument for the call. */
9422
9423 static tree
9424 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9425 {
9426 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9427 REAL_VALUE_TYPE r;
9428
9429 if (!validate_arg (arg, REAL_TYPE))
9430 return NULL_TREE;
9431
9432 switch (builtin_index)
9433 {
9434 case BUILT_IN_ISINF:
9435 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9436 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9437
9438 if (TREE_CODE (arg) == REAL_CST)
9439 {
9440 r = TREE_REAL_CST (arg);
9441 if (real_isinf (&r))
9442 return real_compare (GT_EXPR, &r, &dconst0)
9443 ? integer_one_node : integer_minus_one_node;
9444 else
9445 return integer_zero_node;
9446 }
9447
9448 return NULL_TREE;
9449
9450 case BUILT_IN_ISINF_SIGN:
9451 {
9452 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9453 /* In a boolean context, GCC will fold the inner COND_EXPR to
9454 1. So e.g. "if (isinf_sign(x))" would be folded to just
9455 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9456 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9457 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9458 tree tmp = NULL_TREE;
9459
9460 arg = builtin_save_expr (arg);
9461
9462 if (signbit_fn && isinf_fn)
9463 {
9464 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9465 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9466
9467 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9468 signbit_call, integer_zero_node);
9469 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9470 isinf_call, integer_zero_node);
9471
9472 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9473 integer_minus_one_node, integer_one_node);
9474 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9475 isinf_call, tmp,
9476 integer_zero_node);
9477 }
9478
9479 return tmp;
9480 }
9481
9482 case BUILT_IN_ISFINITE:
9483 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9484 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9485 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9486
9487 if (TREE_CODE (arg) == REAL_CST)
9488 {
9489 r = TREE_REAL_CST (arg);
9490 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9491 }
9492
9493 return NULL_TREE;
9494
9495 case BUILT_IN_ISNAN:
9496 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9497 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9498
9499 if (TREE_CODE (arg) == REAL_CST)
9500 {
9501 r = TREE_REAL_CST (arg);
9502 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9503 }
9504
9505 arg = builtin_save_expr (arg);
9506 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9507
9508 default:
9509 gcc_unreachable ();
9510 }
9511 }
9512
9513 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9514 This builtin will generate code to return the appropriate floating
9515 point classification depending on the value of the floating point
9516 number passed in. The possible return values must be supplied as
9517 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9518 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9519 one floating point argument which is "type generic". */
9520
9521 static tree
9522 fold_builtin_fpclassify (location_t loc, tree exp)
9523 {
9524 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9525 arg, type, res, tmp;
9526 enum machine_mode mode;
9527 REAL_VALUE_TYPE r;
9528 char buf[128];
9529
9530 /* Verify the required arguments in the original call. */
9531 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9532 INTEGER_TYPE, INTEGER_TYPE,
9533 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9534 return NULL_TREE;
9535
9536 fp_nan = CALL_EXPR_ARG (exp, 0);
9537 fp_infinite = CALL_EXPR_ARG (exp, 1);
9538 fp_normal = CALL_EXPR_ARG (exp, 2);
9539 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9540 fp_zero = CALL_EXPR_ARG (exp, 4);
9541 arg = CALL_EXPR_ARG (exp, 5);
9542 type = TREE_TYPE (arg);
9543 mode = TYPE_MODE (type);
9544 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9545
9546 /* fpclassify(x) ->
9547 isnan(x) ? FP_NAN :
9548 (fabs(x) == Inf ? FP_INFINITE :
9549 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9550 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9551
9552 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9553 build_real (type, dconst0));
9554 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9555 tmp, fp_zero, fp_subnormal);
9556
9557 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9558 real_from_string (&r, buf);
9559 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9560 arg, build_real (type, r));
9561 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9562
9563 if (HONOR_INFINITIES (mode))
9564 {
9565 real_inf (&r);
9566 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9567 build_real (type, r));
9568 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9569 fp_infinite, res);
9570 }
9571
9572 if (HONOR_NANS (mode))
9573 {
9574 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9575 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9576 }
9577
9578 return res;
9579 }
9580
9581 /* Fold a call to an unordered comparison function such as
9582 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9583 being called and ARG0 and ARG1 are the arguments for the call.
9584 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9585 the opposite of the desired result. UNORDERED_CODE is used
9586 for modes that can hold NaNs and ORDERED_CODE is used for
9587 the rest. */
9588
9589 static tree
9590 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9591 enum tree_code unordered_code,
9592 enum tree_code ordered_code)
9593 {
9594 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9595 enum tree_code code;
9596 tree type0, type1;
9597 enum tree_code code0, code1;
9598 tree cmp_type = NULL_TREE;
9599
9600 type0 = TREE_TYPE (arg0);
9601 type1 = TREE_TYPE (arg1);
9602
9603 code0 = TREE_CODE (type0);
9604 code1 = TREE_CODE (type1);
9605
9606 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9607 /* Choose the wider of two real types. */
9608 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9609 ? type0 : type1;
9610 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9611 cmp_type = type0;
9612 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9613 cmp_type = type1;
9614
9615 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9616 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9617
9618 if (unordered_code == UNORDERED_EXPR)
9619 {
9620 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9621 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9622 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9623 }
9624
9625 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9626 : ordered_code;
9627 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9628 fold_build2_loc (loc, code, type, arg0, arg1));
9629 }
9630
9631 /* Fold a call to built-in function FNDECL with 0 arguments.
9632 IGNORE is true if the result of the function call is ignored. This
9633 function returns NULL_TREE if no simplification was possible. */
9634
9635 static tree
9636 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9637 {
9638 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9639 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9640 switch (fcode)
9641 {
9642 CASE_FLT_FN (BUILT_IN_INF):
9643 case BUILT_IN_INFD32:
9644 case BUILT_IN_INFD64:
9645 case BUILT_IN_INFD128:
9646 return fold_builtin_inf (loc, type, true);
9647
9648 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9649 return fold_builtin_inf (loc, type, false);
9650
9651 case BUILT_IN_CLASSIFY_TYPE:
9652 return fold_builtin_classify_type (NULL_TREE);
9653
9654 case BUILT_IN_UNREACHABLE:
9655 if (flag_sanitize & SANITIZE_UNREACHABLE
9656 && (current_function_decl == NULL
9657 || !lookup_attribute ("no_sanitize_undefined",
9658 DECL_ATTRIBUTES (current_function_decl))))
9659 return ubsan_instrument_unreachable (loc);
9660 break;
9661
9662 default:
9663 break;
9664 }
9665 return NULL_TREE;
9666 }
9667
9668 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9669 IGNORE is true if the result of the function call is ignored. This
9670 function returns NULL_TREE if no simplification was possible. */
9671
9672 static tree
9673 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9674 {
9675 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9676 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9677 switch (fcode)
9678 {
9679 case BUILT_IN_CONSTANT_P:
9680 {
9681 tree val = fold_builtin_constant_p (arg0);
9682
9683 /* Gimplification will pull the CALL_EXPR for the builtin out of
9684 an if condition. When not optimizing, we'll not CSE it back.
9685 To avoid link error types of regressions, return false now. */
9686 if (!val && !optimize)
9687 val = integer_zero_node;
9688
9689 return val;
9690 }
9691
9692 case BUILT_IN_CLASSIFY_TYPE:
9693 return fold_builtin_classify_type (arg0);
9694
9695 case BUILT_IN_STRLEN:
9696 return fold_builtin_strlen (loc, type, arg0);
9697
9698 CASE_FLT_FN (BUILT_IN_FABS):
9699 case BUILT_IN_FABSD32:
9700 case BUILT_IN_FABSD64:
9701 case BUILT_IN_FABSD128:
9702 return fold_builtin_fabs (loc, arg0, type);
9703
9704 case BUILT_IN_ABS:
9705 case BUILT_IN_LABS:
9706 case BUILT_IN_LLABS:
9707 case BUILT_IN_IMAXABS:
9708 return fold_builtin_abs (loc, arg0, type);
9709
9710 CASE_FLT_FN (BUILT_IN_CONJ):
9711 if (validate_arg (arg0, COMPLEX_TYPE)
9712 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9713 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9714 break;
9715
9716 CASE_FLT_FN (BUILT_IN_CREAL):
9717 if (validate_arg (arg0, COMPLEX_TYPE)
9718 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9719 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9720 break;
9721
9722 CASE_FLT_FN (BUILT_IN_CIMAG):
9723 if (validate_arg (arg0, COMPLEX_TYPE)
9724 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9725 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9726 break;
9727
9728 CASE_FLT_FN (BUILT_IN_CCOS):
9729 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9730
9731 CASE_FLT_FN (BUILT_IN_CCOSH):
9732 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9733
9734 CASE_FLT_FN (BUILT_IN_CPROJ):
9735 return fold_builtin_cproj (loc, arg0, type);
9736
9737 CASE_FLT_FN (BUILT_IN_CSIN):
9738 if (validate_arg (arg0, COMPLEX_TYPE)
9739 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9740 return do_mpc_arg1 (arg0, type, mpc_sin);
9741 break;
9742
9743 CASE_FLT_FN (BUILT_IN_CSINH):
9744 if (validate_arg (arg0, COMPLEX_TYPE)
9745 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9746 return do_mpc_arg1 (arg0, type, mpc_sinh);
9747 break;
9748
9749 CASE_FLT_FN (BUILT_IN_CTAN):
9750 if (validate_arg (arg0, COMPLEX_TYPE)
9751 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9752 return do_mpc_arg1 (arg0, type, mpc_tan);
9753 break;
9754
9755 CASE_FLT_FN (BUILT_IN_CTANH):
9756 if (validate_arg (arg0, COMPLEX_TYPE)
9757 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9758 return do_mpc_arg1 (arg0, type, mpc_tanh);
9759 break;
9760
9761 CASE_FLT_FN (BUILT_IN_CLOG):
9762 if (validate_arg (arg0, COMPLEX_TYPE)
9763 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9764 return do_mpc_arg1 (arg0, type, mpc_log);
9765 break;
9766
9767 CASE_FLT_FN (BUILT_IN_CSQRT):
9768 if (validate_arg (arg0, COMPLEX_TYPE)
9769 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9770 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9771 break;
9772
9773 CASE_FLT_FN (BUILT_IN_CASIN):
9774 if (validate_arg (arg0, COMPLEX_TYPE)
9775 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9776 return do_mpc_arg1 (arg0, type, mpc_asin);
9777 break;
9778
9779 CASE_FLT_FN (BUILT_IN_CACOS):
9780 if (validate_arg (arg0, COMPLEX_TYPE)
9781 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9782 return do_mpc_arg1 (arg0, type, mpc_acos);
9783 break;
9784
9785 CASE_FLT_FN (BUILT_IN_CATAN):
9786 if (validate_arg (arg0, COMPLEX_TYPE)
9787 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9788 return do_mpc_arg1 (arg0, type, mpc_atan);
9789 break;
9790
9791 CASE_FLT_FN (BUILT_IN_CASINH):
9792 if (validate_arg (arg0, COMPLEX_TYPE)
9793 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9794 return do_mpc_arg1 (arg0, type, mpc_asinh);
9795 break;
9796
9797 CASE_FLT_FN (BUILT_IN_CACOSH):
9798 if (validate_arg (arg0, COMPLEX_TYPE)
9799 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9800 return do_mpc_arg1 (arg0, type, mpc_acosh);
9801 break;
9802
9803 CASE_FLT_FN (BUILT_IN_CATANH):
9804 if (validate_arg (arg0, COMPLEX_TYPE)
9805 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9806 return do_mpc_arg1 (arg0, type, mpc_atanh);
9807 break;
9808
9809 CASE_FLT_FN (BUILT_IN_CABS):
9810 return fold_builtin_cabs (loc, arg0, type, fndecl);
9811
9812 CASE_FLT_FN (BUILT_IN_CARG):
9813 return fold_builtin_carg (loc, arg0, type);
9814
9815 CASE_FLT_FN (BUILT_IN_SQRT):
9816 return fold_builtin_sqrt (loc, arg0, type);
9817
9818 CASE_FLT_FN (BUILT_IN_CBRT):
9819 return fold_builtin_cbrt (loc, arg0, type);
9820
9821 CASE_FLT_FN (BUILT_IN_ASIN):
9822 if (validate_arg (arg0, REAL_TYPE))
9823 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9824 &dconstm1, &dconst1, true);
9825 break;
9826
9827 CASE_FLT_FN (BUILT_IN_ACOS):
9828 if (validate_arg (arg0, REAL_TYPE))
9829 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9830 &dconstm1, &dconst1, true);
9831 break;
9832
9833 CASE_FLT_FN (BUILT_IN_ATAN):
9834 if (validate_arg (arg0, REAL_TYPE))
9835 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9836 break;
9837
9838 CASE_FLT_FN (BUILT_IN_ASINH):
9839 if (validate_arg (arg0, REAL_TYPE))
9840 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9841 break;
9842
9843 CASE_FLT_FN (BUILT_IN_ACOSH):
9844 if (validate_arg (arg0, REAL_TYPE))
9845 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9846 &dconst1, NULL, true);
9847 break;
9848
9849 CASE_FLT_FN (BUILT_IN_ATANH):
9850 if (validate_arg (arg0, REAL_TYPE))
9851 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9852 &dconstm1, &dconst1, false);
9853 break;
9854
9855 CASE_FLT_FN (BUILT_IN_SIN):
9856 if (validate_arg (arg0, REAL_TYPE))
9857 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9858 break;
9859
9860 CASE_FLT_FN (BUILT_IN_COS):
9861 return fold_builtin_cos (loc, arg0, type, fndecl);
9862
9863 CASE_FLT_FN (BUILT_IN_TAN):
9864 return fold_builtin_tan (arg0, type);
9865
9866 CASE_FLT_FN (BUILT_IN_CEXP):
9867 return fold_builtin_cexp (loc, arg0, type);
9868
9869 CASE_FLT_FN (BUILT_IN_CEXPI):
9870 if (validate_arg (arg0, REAL_TYPE))
9871 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9872 break;
9873
9874 CASE_FLT_FN (BUILT_IN_SINH):
9875 if (validate_arg (arg0, REAL_TYPE))
9876 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9877 break;
9878
9879 CASE_FLT_FN (BUILT_IN_COSH):
9880 return fold_builtin_cosh (loc, arg0, type, fndecl);
9881
9882 CASE_FLT_FN (BUILT_IN_TANH):
9883 if (validate_arg (arg0, REAL_TYPE))
9884 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9885 break;
9886
9887 CASE_FLT_FN (BUILT_IN_ERF):
9888 if (validate_arg (arg0, REAL_TYPE))
9889 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9890 break;
9891
9892 CASE_FLT_FN (BUILT_IN_ERFC):
9893 if (validate_arg (arg0, REAL_TYPE))
9894 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9895 break;
9896
9897 CASE_FLT_FN (BUILT_IN_TGAMMA):
9898 if (validate_arg (arg0, REAL_TYPE))
9899 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9900 break;
9901
9902 CASE_FLT_FN (BUILT_IN_EXP):
9903 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9904
9905 CASE_FLT_FN (BUILT_IN_EXP2):
9906 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9907
9908 CASE_FLT_FN (BUILT_IN_EXP10):
9909 CASE_FLT_FN (BUILT_IN_POW10):
9910 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9911
9912 CASE_FLT_FN (BUILT_IN_EXPM1):
9913 if (validate_arg (arg0, REAL_TYPE))
9914 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9915 break;
9916
9917 CASE_FLT_FN (BUILT_IN_LOG):
9918 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9919
9920 CASE_FLT_FN (BUILT_IN_LOG2):
9921 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9922
9923 CASE_FLT_FN (BUILT_IN_LOG10):
9924 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9925
9926 CASE_FLT_FN (BUILT_IN_LOG1P):
9927 if (validate_arg (arg0, REAL_TYPE))
9928 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9929 &dconstm1, NULL, false);
9930 break;
9931
9932 CASE_FLT_FN (BUILT_IN_J0):
9933 if (validate_arg (arg0, REAL_TYPE))
9934 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9935 NULL, NULL, 0);
9936 break;
9937
9938 CASE_FLT_FN (BUILT_IN_J1):
9939 if (validate_arg (arg0, REAL_TYPE))
9940 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9941 NULL, NULL, 0);
9942 break;
9943
9944 CASE_FLT_FN (BUILT_IN_Y0):
9945 if (validate_arg (arg0, REAL_TYPE))
9946 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9947 &dconst0, NULL, false);
9948 break;
9949
9950 CASE_FLT_FN (BUILT_IN_Y1):
9951 if (validate_arg (arg0, REAL_TYPE))
9952 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9953 &dconst0, NULL, false);
9954 break;
9955
9956 CASE_FLT_FN (BUILT_IN_NAN):
9957 case BUILT_IN_NAND32:
9958 case BUILT_IN_NAND64:
9959 case BUILT_IN_NAND128:
9960 return fold_builtin_nan (arg0, type, true);
9961
9962 CASE_FLT_FN (BUILT_IN_NANS):
9963 return fold_builtin_nan (arg0, type, false);
9964
9965 CASE_FLT_FN (BUILT_IN_FLOOR):
9966 return fold_builtin_floor (loc, fndecl, arg0);
9967
9968 CASE_FLT_FN (BUILT_IN_CEIL):
9969 return fold_builtin_ceil (loc, fndecl, arg0);
9970
9971 CASE_FLT_FN (BUILT_IN_TRUNC):
9972 return fold_builtin_trunc (loc, fndecl, arg0);
9973
9974 CASE_FLT_FN (BUILT_IN_ROUND):
9975 return fold_builtin_round (loc, fndecl, arg0);
9976
9977 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9978 CASE_FLT_FN (BUILT_IN_RINT):
9979 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9980
9981 CASE_FLT_FN (BUILT_IN_ICEIL):
9982 CASE_FLT_FN (BUILT_IN_LCEIL):
9983 CASE_FLT_FN (BUILT_IN_LLCEIL):
9984 CASE_FLT_FN (BUILT_IN_LFLOOR):
9985 CASE_FLT_FN (BUILT_IN_IFLOOR):
9986 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9987 CASE_FLT_FN (BUILT_IN_IROUND):
9988 CASE_FLT_FN (BUILT_IN_LROUND):
9989 CASE_FLT_FN (BUILT_IN_LLROUND):
9990 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9991
9992 CASE_FLT_FN (BUILT_IN_IRINT):
9993 CASE_FLT_FN (BUILT_IN_LRINT):
9994 CASE_FLT_FN (BUILT_IN_LLRINT):
9995 return fold_fixed_mathfn (loc, fndecl, arg0);
9996
9997 case BUILT_IN_BSWAP16:
9998 case BUILT_IN_BSWAP32:
9999 case BUILT_IN_BSWAP64:
10000 return fold_builtin_bswap (fndecl, arg0);
10001
10002 CASE_INT_FN (BUILT_IN_FFS):
10003 CASE_INT_FN (BUILT_IN_CLZ):
10004 CASE_INT_FN (BUILT_IN_CTZ):
10005 CASE_INT_FN (BUILT_IN_CLRSB):
10006 CASE_INT_FN (BUILT_IN_POPCOUNT):
10007 CASE_INT_FN (BUILT_IN_PARITY):
10008 return fold_builtin_bitop (fndecl, arg0);
10009
10010 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10011 return fold_builtin_signbit (loc, arg0, type);
10012
10013 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10014 return fold_builtin_significand (loc, arg0, type);
10015
10016 CASE_FLT_FN (BUILT_IN_ILOGB):
10017 CASE_FLT_FN (BUILT_IN_LOGB):
10018 return fold_builtin_logb (loc, arg0, type);
10019
10020 case BUILT_IN_ISASCII:
10021 return fold_builtin_isascii (loc, arg0);
10022
10023 case BUILT_IN_TOASCII:
10024 return fold_builtin_toascii (loc, arg0);
10025
10026 case BUILT_IN_ISDIGIT:
10027 return fold_builtin_isdigit (loc, arg0);
10028
10029 CASE_FLT_FN (BUILT_IN_FINITE):
10030 case BUILT_IN_FINITED32:
10031 case BUILT_IN_FINITED64:
10032 case BUILT_IN_FINITED128:
10033 case BUILT_IN_ISFINITE:
10034 {
10035 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10036 if (ret)
10037 return ret;
10038 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10039 }
10040
10041 CASE_FLT_FN (BUILT_IN_ISINF):
10042 case BUILT_IN_ISINFD32:
10043 case BUILT_IN_ISINFD64:
10044 case BUILT_IN_ISINFD128:
10045 {
10046 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10047 if (ret)
10048 return ret;
10049 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10050 }
10051
10052 case BUILT_IN_ISNORMAL:
10053 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10054
10055 case BUILT_IN_ISINF_SIGN:
10056 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10057
10058 CASE_FLT_FN (BUILT_IN_ISNAN):
10059 case BUILT_IN_ISNAND32:
10060 case BUILT_IN_ISNAND64:
10061 case BUILT_IN_ISNAND128:
10062 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10063
10064 case BUILT_IN_PRINTF:
10065 case BUILT_IN_PRINTF_UNLOCKED:
10066 case BUILT_IN_VPRINTF:
10067 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10068
10069 case BUILT_IN_FREE:
10070 if (integer_zerop (arg0))
10071 return build_empty_stmt (loc);
10072 break;
10073
10074 default:
10075 break;
10076 }
10077
10078 return NULL_TREE;
10079
10080 }
10081
10082 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10083 IGNORE is true if the result of the function call is ignored. This
10084 function returns NULL_TREE if no simplification was possible. */
10085
10086 static tree
10087 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10088 {
10089 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10090 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10091
10092 switch (fcode)
10093 {
10094 CASE_FLT_FN (BUILT_IN_JN):
10095 if (validate_arg (arg0, INTEGER_TYPE)
10096 && validate_arg (arg1, REAL_TYPE))
10097 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10098 break;
10099
10100 CASE_FLT_FN (BUILT_IN_YN):
10101 if (validate_arg (arg0, INTEGER_TYPE)
10102 && validate_arg (arg1, REAL_TYPE))
10103 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10104 &dconst0, false);
10105 break;
10106
10107 CASE_FLT_FN (BUILT_IN_DREM):
10108 CASE_FLT_FN (BUILT_IN_REMAINDER):
10109 if (validate_arg (arg0, REAL_TYPE)
10110 && validate_arg (arg1, REAL_TYPE))
10111 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10112 break;
10113
10114 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10115 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10116 if (validate_arg (arg0, REAL_TYPE)
10117 && validate_arg (arg1, POINTER_TYPE))
10118 return do_mpfr_lgamma_r (arg0, arg1, type);
10119 break;
10120
10121 CASE_FLT_FN (BUILT_IN_ATAN2):
10122 if (validate_arg (arg0, REAL_TYPE)
10123 && validate_arg (arg1, REAL_TYPE))
10124 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10125 break;
10126
10127 CASE_FLT_FN (BUILT_IN_FDIM):
10128 if (validate_arg (arg0, REAL_TYPE)
10129 && validate_arg (arg1, REAL_TYPE))
10130 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10131 break;
10132
10133 CASE_FLT_FN (BUILT_IN_HYPOT):
10134 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10135
10136 CASE_FLT_FN (BUILT_IN_CPOW):
10137 if (validate_arg (arg0, COMPLEX_TYPE)
10138 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10139 && validate_arg (arg1, COMPLEX_TYPE)
10140 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10141 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10142 break;
10143
10144 CASE_FLT_FN (BUILT_IN_LDEXP):
10145 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10146 CASE_FLT_FN (BUILT_IN_SCALBN):
10147 CASE_FLT_FN (BUILT_IN_SCALBLN):
10148 return fold_builtin_load_exponent (loc, arg0, arg1,
10149 type, /*ldexp=*/false);
10150
10151 CASE_FLT_FN (BUILT_IN_FREXP):
10152 return fold_builtin_frexp (loc, arg0, arg1, type);
10153
10154 CASE_FLT_FN (BUILT_IN_MODF):
10155 return fold_builtin_modf (loc, arg0, arg1, type);
10156
10157 case BUILT_IN_STRSTR:
10158 return fold_builtin_strstr (loc, arg0, arg1, type);
10159
10160 case BUILT_IN_STRSPN:
10161 return fold_builtin_strspn (loc, arg0, arg1);
10162
10163 case BUILT_IN_STRCSPN:
10164 return fold_builtin_strcspn (loc, arg0, arg1);
10165
10166 case BUILT_IN_STRCHR:
10167 case BUILT_IN_INDEX:
10168 return fold_builtin_strchr (loc, arg0, arg1, type);
10169
10170 case BUILT_IN_STRRCHR:
10171 case BUILT_IN_RINDEX:
10172 return fold_builtin_strrchr (loc, arg0, arg1, type);
10173
10174 case BUILT_IN_STPCPY:
10175 if (ignore)
10176 {
10177 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10178 if (!fn)
10179 break;
10180
10181 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10182 }
10183 else
10184 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10185 break;
10186
10187 case BUILT_IN_STRCMP:
10188 return fold_builtin_strcmp (loc, arg0, arg1);
10189
10190 case BUILT_IN_STRPBRK:
10191 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10192
10193 case BUILT_IN_EXPECT:
10194 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10195
10196 CASE_FLT_FN (BUILT_IN_POW):
10197 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10198
10199 CASE_FLT_FN (BUILT_IN_POWI):
10200 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10201
10202 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10203 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10204
10205 CASE_FLT_FN (BUILT_IN_FMIN):
10206 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10207
10208 CASE_FLT_FN (BUILT_IN_FMAX):
10209 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10210
10211 case BUILT_IN_ISGREATER:
10212 return fold_builtin_unordered_cmp (loc, fndecl,
10213 arg0, arg1, UNLE_EXPR, LE_EXPR);
10214 case BUILT_IN_ISGREATEREQUAL:
10215 return fold_builtin_unordered_cmp (loc, fndecl,
10216 arg0, arg1, UNLT_EXPR, LT_EXPR);
10217 case BUILT_IN_ISLESS:
10218 return fold_builtin_unordered_cmp (loc, fndecl,
10219 arg0, arg1, UNGE_EXPR, GE_EXPR);
10220 case BUILT_IN_ISLESSEQUAL:
10221 return fold_builtin_unordered_cmp (loc, fndecl,
10222 arg0, arg1, UNGT_EXPR, GT_EXPR);
10223 case BUILT_IN_ISLESSGREATER:
10224 return fold_builtin_unordered_cmp (loc, fndecl,
10225 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10226 case BUILT_IN_ISUNORDERED:
10227 return fold_builtin_unordered_cmp (loc, fndecl,
10228 arg0, arg1, UNORDERED_EXPR,
10229 NOP_EXPR);
10230
10231 /* We do the folding for va_start in the expander. */
10232 case BUILT_IN_VA_START:
10233 break;
10234
10235 case BUILT_IN_OBJECT_SIZE:
10236 return fold_builtin_object_size (arg0, arg1);
10237
10238 case BUILT_IN_PRINTF:
10239 case BUILT_IN_PRINTF_UNLOCKED:
10240 case BUILT_IN_VPRINTF:
10241 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10242
10243 case BUILT_IN_PRINTF_CHK:
10244 case BUILT_IN_VPRINTF_CHK:
10245 if (!validate_arg (arg0, INTEGER_TYPE)
10246 || TREE_SIDE_EFFECTS (arg0))
10247 return NULL_TREE;
10248 else
10249 return fold_builtin_printf (loc, fndecl,
10250 arg1, NULL_TREE, ignore, fcode);
10251 break;
10252
10253 case BUILT_IN_FPRINTF:
10254 case BUILT_IN_FPRINTF_UNLOCKED:
10255 case BUILT_IN_VFPRINTF:
10256 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10257 ignore, fcode);
10258
10259 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10260 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10261
10262 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10263 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10264
10265 default:
10266 break;
10267 }
10268 return NULL_TREE;
10269 }
10270
10271 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10272 and ARG2. IGNORE is true if the result of the function call is ignored.
10273 This function returns NULL_TREE if no simplification was possible. */
10274
10275 static tree
10276 fold_builtin_3 (location_t loc, tree fndecl,
10277 tree arg0, tree arg1, tree arg2, bool ignore)
10278 {
10279 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10280 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10281 switch (fcode)
10282 {
10283
10284 CASE_FLT_FN (BUILT_IN_SINCOS):
10285 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10286
10287 CASE_FLT_FN (BUILT_IN_FMA):
10288 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10289 break;
10290
10291 CASE_FLT_FN (BUILT_IN_REMQUO):
10292 if (validate_arg (arg0, REAL_TYPE)
10293 && validate_arg (arg1, REAL_TYPE)
10294 && validate_arg (arg2, POINTER_TYPE))
10295 return do_mpfr_remquo (arg0, arg1, arg2);
10296 break;
10297
10298 case BUILT_IN_STRNCAT:
10299 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10300
10301 case BUILT_IN_STRNCMP:
10302 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10303
10304 case BUILT_IN_MEMCHR:
10305 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10306
10307 case BUILT_IN_BCMP:
10308 case BUILT_IN_MEMCMP:
10309 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10310
10311 case BUILT_IN_STRCAT_CHK:
10312 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10313
10314 case BUILT_IN_PRINTF_CHK:
10315 case BUILT_IN_VPRINTF_CHK:
10316 if (!validate_arg (arg0, INTEGER_TYPE)
10317 || TREE_SIDE_EFFECTS (arg0))
10318 return NULL_TREE;
10319 else
10320 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10321 break;
10322
10323 case BUILT_IN_FPRINTF:
10324 case BUILT_IN_FPRINTF_UNLOCKED:
10325 case BUILT_IN_VFPRINTF:
10326 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10327 ignore, fcode);
10328
10329 case BUILT_IN_FPRINTF_CHK:
10330 case BUILT_IN_VFPRINTF_CHK:
10331 if (!validate_arg (arg1, INTEGER_TYPE)
10332 || TREE_SIDE_EFFECTS (arg1))
10333 return NULL_TREE;
10334 else
10335 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10336 ignore, fcode);
10337
10338 case BUILT_IN_EXPECT:
10339 return fold_builtin_expect (loc, arg0, arg1, arg2);
10340
10341 default:
10342 break;
10343 }
10344 return NULL_TREE;
10345 }
10346
10347 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10348 ARG2, and ARG3. IGNORE is true if the result of the function call is
10349 ignored. This function returns NULL_TREE if no simplification was
10350 possible. */
10351
10352 static tree
10353 fold_builtin_4 (location_t loc, tree fndecl,
10354 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10355 {
10356 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10357
10358 switch (fcode)
10359 {
10360 case BUILT_IN_STRNCAT_CHK:
10361 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10362
10363 case BUILT_IN_FPRINTF_CHK:
10364 case BUILT_IN_VFPRINTF_CHK:
10365 if (!validate_arg (arg1, INTEGER_TYPE)
10366 || TREE_SIDE_EFFECTS (arg1))
10367 return NULL_TREE;
10368 else
10369 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10370 ignore, fcode);
10371 break;
10372
10373 default:
10374 break;
10375 }
10376 return NULL_TREE;
10377 }
10378
10379 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10380 arguments, where NARGS <= 4. IGNORE is true if the result of the
10381 function call is ignored. This function returns NULL_TREE if no
10382 simplification was possible. Note that this only folds builtins with
10383 fixed argument patterns. Foldings that do varargs-to-varargs
10384 transformations, or that match calls with more than 4 arguments,
10385 need to be handled with fold_builtin_varargs instead. */
10386
10387 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10388
10389 static tree
10390 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10391 {
10392 tree ret = NULL_TREE;
10393
10394 switch (nargs)
10395 {
10396 case 0:
10397 ret = fold_builtin_0 (loc, fndecl, ignore);
10398 break;
10399 case 1:
10400 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10401 break;
10402 case 2:
10403 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10404 break;
10405 case 3:
10406 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10407 break;
10408 case 4:
10409 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10410 ignore);
10411 break;
10412 default:
10413 break;
10414 }
10415 if (ret)
10416 {
10417 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10418 SET_EXPR_LOCATION (ret, loc);
10419 TREE_NO_WARNING (ret) = 1;
10420 return ret;
10421 }
10422 return NULL_TREE;
10423 }
10424
10425 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10426 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10427 of arguments in ARGS to be omitted. OLDNARGS is the number of
10428 elements in ARGS. */
10429
10430 static tree
10431 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10432 int skip, tree fndecl, int n, va_list newargs)
10433 {
10434 int nargs = oldnargs - skip + n;
10435 tree *buffer;
10436
10437 if (n > 0)
10438 {
10439 int i, j;
10440
10441 buffer = XALLOCAVEC (tree, nargs);
10442 for (i = 0; i < n; i++)
10443 buffer[i] = va_arg (newargs, tree);
10444 for (j = skip; j < oldnargs; j++, i++)
10445 buffer[i] = args[j];
10446 }
10447 else
10448 buffer = args + skip;
10449
10450 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10451 }
10452
10453 /* Return true if FNDECL shouldn't be folded right now.
10454 If a built-in function has an inline attribute always_inline
10455 wrapper, defer folding it after always_inline functions have
10456 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10457 might not be performed. */
10458
10459 bool
10460 avoid_folding_inline_builtin (tree fndecl)
10461 {
10462 return (DECL_DECLARED_INLINE_P (fndecl)
10463 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10464 && cfun
10465 && !cfun->always_inline_functions_inlined
10466 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10467 }
10468
10469 /* A wrapper function for builtin folding that prevents warnings for
10470 "statement without effect" and the like, caused by removing the
10471 call node earlier than the warning is generated. */
10472
10473 tree
10474 fold_call_expr (location_t loc, tree exp, bool ignore)
10475 {
10476 tree ret = NULL_TREE;
10477 tree fndecl = get_callee_fndecl (exp);
10478 if (fndecl
10479 && TREE_CODE (fndecl) == FUNCTION_DECL
10480 && DECL_BUILT_IN (fndecl)
10481 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10482 yet. Defer folding until we see all the arguments
10483 (after inlining). */
10484 && !CALL_EXPR_VA_ARG_PACK (exp))
10485 {
10486 int nargs = call_expr_nargs (exp);
10487
10488 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10489 instead last argument is __builtin_va_arg_pack (). Defer folding
10490 even in that case, until arguments are finalized. */
10491 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10492 {
10493 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10494 if (fndecl2
10495 && TREE_CODE (fndecl2) == FUNCTION_DECL
10496 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10497 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10498 return NULL_TREE;
10499 }
10500
10501 if (avoid_folding_inline_builtin (fndecl))
10502 return NULL_TREE;
10503
10504 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10505 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10506 CALL_EXPR_ARGP (exp), ignore);
10507 else
10508 {
10509 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10510 {
10511 tree *args = CALL_EXPR_ARGP (exp);
10512 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10513 }
10514 if (!ret)
10515 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10516 if (ret)
10517 return ret;
10518 }
10519 }
10520 return NULL_TREE;
10521 }
10522
10523 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10524 N arguments are passed in the array ARGARRAY. */
10525
10526 tree
10527 fold_builtin_call_array (location_t loc, tree type,
10528 tree fn,
10529 int n,
10530 tree *argarray)
10531 {
10532 tree ret = NULL_TREE;
10533 tree exp;
10534
10535 if (TREE_CODE (fn) == ADDR_EXPR)
10536 {
10537 tree fndecl = TREE_OPERAND (fn, 0);
10538 if (TREE_CODE (fndecl) == FUNCTION_DECL
10539 && DECL_BUILT_IN (fndecl))
10540 {
10541 /* If last argument is __builtin_va_arg_pack (), arguments to this
10542 function are not finalized yet. Defer folding until they are. */
10543 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10544 {
10545 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10546 if (fndecl2
10547 && TREE_CODE (fndecl2) == FUNCTION_DECL
10548 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10549 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10550 return build_call_array_loc (loc, type, fn, n, argarray);
10551 }
10552 if (avoid_folding_inline_builtin (fndecl))
10553 return build_call_array_loc (loc, type, fn, n, argarray);
10554 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10555 {
10556 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10557 if (ret)
10558 return ret;
10559
10560 return build_call_array_loc (loc, type, fn, n, argarray);
10561 }
10562 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10563 {
10564 /* First try the transformations that don't require consing up
10565 an exp. */
10566 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10567 if (ret)
10568 return ret;
10569 }
10570
10571 /* If we got this far, we need to build an exp. */
10572 exp = build_call_array_loc (loc, type, fn, n, argarray);
10573 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10574 return ret ? ret : exp;
10575 }
10576 }
10577
10578 return build_call_array_loc (loc, type, fn, n, argarray);
10579 }
10580
10581 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10582 along with N new arguments specified as the "..." parameters. SKIP
10583 is the number of arguments in EXP to be omitted. This function is used
10584 to do varargs-to-varargs transformations. */
10585
10586 static tree
10587 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10588 {
10589 va_list ap;
10590 tree t;
10591
10592 va_start (ap, n);
10593 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10594 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10595 va_end (ap);
10596
10597 return t;
10598 }
10599
10600 /* Validate a single argument ARG against a tree code CODE representing
10601 a type. */
10602
10603 static bool
10604 validate_arg (const_tree arg, enum tree_code code)
10605 {
10606 if (!arg)
10607 return false;
10608 else if (code == POINTER_TYPE)
10609 return POINTER_TYPE_P (TREE_TYPE (arg));
10610 else if (code == INTEGER_TYPE)
10611 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10612 return code == TREE_CODE (TREE_TYPE (arg));
10613 }
10614
10615 /* This function validates the types of a function call argument list
10616 against a specified list of tree_codes. If the last specifier is a 0,
10617 that represents an ellipses, otherwise the last specifier must be a
10618 VOID_TYPE.
10619
10620 This is the GIMPLE version of validate_arglist. Eventually we want to
10621 completely convert builtins.c to work from GIMPLEs and the tree based
10622 validate_arglist will then be removed. */
10623
10624 bool
10625 validate_gimple_arglist (const_gimple call, ...)
10626 {
10627 enum tree_code code;
10628 bool res = 0;
10629 va_list ap;
10630 const_tree arg;
10631 size_t i;
10632
10633 va_start (ap, call);
10634 i = 0;
10635
10636 do
10637 {
10638 code = (enum tree_code) va_arg (ap, int);
10639 switch (code)
10640 {
10641 case 0:
10642 /* This signifies an ellipses, any further arguments are all ok. */
10643 res = true;
10644 goto end;
10645 case VOID_TYPE:
10646 /* This signifies an endlink, if no arguments remain, return
10647 true, otherwise return false. */
10648 res = (i == gimple_call_num_args (call));
10649 goto end;
10650 default:
10651 /* If no parameters remain or the parameter's code does not
10652 match the specified code, return false. Otherwise continue
10653 checking any remaining arguments. */
10654 arg = gimple_call_arg (call, i++);
10655 if (!validate_arg (arg, code))
10656 goto end;
10657 break;
10658 }
10659 }
10660 while (1);
10661
10662 /* We need gotos here since we can only have one VA_CLOSE in a
10663 function. */
10664 end: ;
10665 va_end (ap);
10666
10667 return res;
10668 }
10669
10670 /* Default target-specific builtin expander that does nothing. */
10671
10672 rtx
10673 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10674 rtx target ATTRIBUTE_UNUSED,
10675 rtx subtarget ATTRIBUTE_UNUSED,
10676 enum machine_mode mode ATTRIBUTE_UNUSED,
10677 int ignore ATTRIBUTE_UNUSED)
10678 {
10679 return NULL_RTX;
10680 }
10681
10682 /* Returns true is EXP represents data that would potentially reside
10683 in a readonly section. */
10684
10685 bool
10686 readonly_data_expr (tree exp)
10687 {
10688 STRIP_NOPS (exp);
10689
10690 if (TREE_CODE (exp) != ADDR_EXPR)
10691 return false;
10692
10693 exp = get_base_address (TREE_OPERAND (exp, 0));
10694 if (!exp)
10695 return false;
10696
10697 /* Make sure we call decl_readonly_section only for trees it
10698 can handle (since it returns true for everything it doesn't
10699 understand). */
10700 if (TREE_CODE (exp) == STRING_CST
10701 || TREE_CODE (exp) == CONSTRUCTOR
10702 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10703 return decl_readonly_section (exp, 0);
10704 else
10705 return false;
10706 }
10707
10708 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10709 to the call, and TYPE is its return type.
10710
10711 Return NULL_TREE if no simplification was possible, otherwise return the
10712 simplified form of the call as a tree.
10713
10714 The simplified form may be a constant or other expression which
10715 computes the same value, but in a more efficient manner (including
10716 calls to other builtin functions).
10717
10718 The call may contain arguments which need to be evaluated, but
10719 which are not useful to determine the result of the call. In
10720 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10721 COMPOUND_EXPR will be an argument which must be evaluated.
10722 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10723 COMPOUND_EXPR in the chain will contain the tree for the simplified
10724 form of the builtin function call. */
10725
10726 static tree
10727 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10728 {
10729 if (!validate_arg (s1, POINTER_TYPE)
10730 || !validate_arg (s2, POINTER_TYPE))
10731 return NULL_TREE;
10732 else
10733 {
10734 tree fn;
10735 const char *p1, *p2;
10736
10737 p2 = c_getstr (s2);
10738 if (p2 == NULL)
10739 return NULL_TREE;
10740
10741 p1 = c_getstr (s1);
10742 if (p1 != NULL)
10743 {
10744 const char *r = strstr (p1, p2);
10745 tree tem;
10746
10747 if (r == NULL)
10748 return build_int_cst (TREE_TYPE (s1), 0);
10749
10750 /* Return an offset into the constant string argument. */
10751 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10752 return fold_convert_loc (loc, type, tem);
10753 }
10754
10755 /* The argument is const char *, and the result is char *, so we need
10756 a type conversion here to avoid a warning. */
10757 if (p2[0] == '\0')
10758 return fold_convert_loc (loc, type, s1);
10759
10760 if (p2[1] != '\0')
10761 return NULL_TREE;
10762
10763 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10764 if (!fn)
10765 return NULL_TREE;
10766
10767 /* New argument list transforming strstr(s1, s2) to
10768 strchr(s1, s2[0]). */
10769 return build_call_expr_loc (loc, fn, 2, s1,
10770 build_int_cst (integer_type_node, p2[0]));
10771 }
10772 }
10773
10774 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10775 the call, and TYPE is its return type.
10776
10777 Return NULL_TREE if no simplification was possible, otherwise return the
10778 simplified form of the call as a tree.
10779
10780 The simplified form may be a constant or other expression which
10781 computes the same value, but in a more efficient manner (including
10782 calls to other builtin functions).
10783
10784 The call may contain arguments which need to be evaluated, but
10785 which are not useful to determine the result of the call. In
10786 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10787 COMPOUND_EXPR will be an argument which must be evaluated.
10788 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10789 COMPOUND_EXPR in the chain will contain the tree for the simplified
10790 form of the builtin function call. */
10791
10792 static tree
10793 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10794 {
10795 if (!validate_arg (s1, POINTER_TYPE)
10796 || !validate_arg (s2, INTEGER_TYPE))
10797 return NULL_TREE;
10798 else
10799 {
10800 const char *p1;
10801
10802 if (TREE_CODE (s2) != INTEGER_CST)
10803 return NULL_TREE;
10804
10805 p1 = c_getstr (s1);
10806 if (p1 != NULL)
10807 {
10808 char c;
10809 const char *r;
10810 tree tem;
10811
10812 if (target_char_cast (s2, &c))
10813 return NULL_TREE;
10814
10815 r = strchr (p1, c);
10816
10817 if (r == NULL)
10818 return build_int_cst (TREE_TYPE (s1), 0);
10819
10820 /* Return an offset into the constant string argument. */
10821 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10822 return fold_convert_loc (loc, type, tem);
10823 }
10824 return NULL_TREE;
10825 }
10826 }
10827
10828 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10829 the call, and TYPE is its return type.
10830
10831 Return NULL_TREE if no simplification was possible, otherwise return the
10832 simplified form of the call as a tree.
10833
10834 The simplified form may be a constant or other expression which
10835 computes the same value, but in a more efficient manner (including
10836 calls to other builtin functions).
10837
10838 The call may contain arguments which need to be evaluated, but
10839 which are not useful to determine the result of the call. In
10840 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10841 COMPOUND_EXPR will be an argument which must be evaluated.
10842 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10843 COMPOUND_EXPR in the chain will contain the tree for the simplified
10844 form of the builtin function call. */
10845
10846 static tree
10847 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10848 {
10849 if (!validate_arg (s1, POINTER_TYPE)
10850 || !validate_arg (s2, INTEGER_TYPE))
10851 return NULL_TREE;
10852 else
10853 {
10854 tree fn;
10855 const char *p1;
10856
10857 if (TREE_CODE (s2) != INTEGER_CST)
10858 return NULL_TREE;
10859
10860 p1 = c_getstr (s1);
10861 if (p1 != NULL)
10862 {
10863 char c;
10864 const char *r;
10865 tree tem;
10866
10867 if (target_char_cast (s2, &c))
10868 return NULL_TREE;
10869
10870 r = strrchr (p1, c);
10871
10872 if (r == NULL)
10873 return build_int_cst (TREE_TYPE (s1), 0);
10874
10875 /* Return an offset into the constant string argument. */
10876 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10877 return fold_convert_loc (loc, type, tem);
10878 }
10879
10880 if (! integer_zerop (s2))
10881 return NULL_TREE;
10882
10883 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10884 if (!fn)
10885 return NULL_TREE;
10886
10887 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10888 return build_call_expr_loc (loc, fn, 2, s1, s2);
10889 }
10890 }
10891
10892 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10893 to the call, and TYPE is its return type.
10894
10895 Return NULL_TREE if no simplification was possible, otherwise return the
10896 simplified form of the call as a tree.
10897
10898 The simplified form may be a constant or other expression which
10899 computes the same value, but in a more efficient manner (including
10900 calls to other builtin functions).
10901
10902 The call may contain arguments which need to be evaluated, but
10903 which are not useful to determine the result of the call. In
10904 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10905 COMPOUND_EXPR will be an argument which must be evaluated.
10906 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10907 COMPOUND_EXPR in the chain will contain the tree for the simplified
10908 form of the builtin function call. */
10909
10910 static tree
10911 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10912 {
10913 if (!validate_arg (s1, POINTER_TYPE)
10914 || !validate_arg (s2, POINTER_TYPE))
10915 return NULL_TREE;
10916 else
10917 {
10918 tree fn;
10919 const char *p1, *p2;
10920
10921 p2 = c_getstr (s2);
10922 if (p2 == NULL)
10923 return NULL_TREE;
10924
10925 p1 = c_getstr (s1);
10926 if (p1 != NULL)
10927 {
10928 const char *r = strpbrk (p1, p2);
10929 tree tem;
10930
10931 if (r == NULL)
10932 return build_int_cst (TREE_TYPE (s1), 0);
10933
10934 /* Return an offset into the constant string argument. */
10935 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10936 return fold_convert_loc (loc, type, tem);
10937 }
10938
10939 if (p2[0] == '\0')
10940 /* strpbrk(x, "") == NULL.
10941 Evaluate and ignore s1 in case it had side-effects. */
10942 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
10943
10944 if (p2[1] != '\0')
10945 return NULL_TREE; /* Really call strpbrk. */
10946
10947 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10948 if (!fn)
10949 return NULL_TREE;
10950
10951 /* New argument list transforming strpbrk(s1, s2) to
10952 strchr(s1, s2[0]). */
10953 return build_call_expr_loc (loc, fn, 2, s1,
10954 build_int_cst (integer_type_node, p2[0]));
10955 }
10956 }
10957
10958 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10959 arguments to the call.
10960
10961 Return NULL_TREE if no simplification was possible, otherwise return the
10962 simplified form of the call as a tree.
10963
10964 The simplified form may be a constant or other expression which
10965 computes the same value, but in a more efficient manner (including
10966 calls to other builtin functions).
10967
10968 The call may contain arguments which need to be evaluated, but
10969 which are not useful to determine the result of the call. In
10970 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10971 COMPOUND_EXPR will be an argument which must be evaluated.
10972 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10973 COMPOUND_EXPR in the chain will contain the tree for the simplified
10974 form of the builtin function call. */
10975
10976 static tree
10977 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
10978 {
10979 if (!validate_arg (dst, POINTER_TYPE)
10980 || !validate_arg (src, POINTER_TYPE)
10981 || !validate_arg (len, INTEGER_TYPE))
10982 return NULL_TREE;
10983 else
10984 {
10985 const char *p = c_getstr (src);
10986
10987 /* If the requested length is zero, or the src parameter string
10988 length is zero, return the dst parameter. */
10989 if (integer_zerop (len) || (p && *p == '\0'))
10990 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
10991
10992 /* If the requested len is greater than or equal to the string
10993 length, call strcat. */
10994 if (TREE_CODE (len) == INTEGER_CST && p
10995 && compare_tree_int (len, strlen (p)) >= 0)
10996 {
10997 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
10998
10999 /* If the replacement _DECL isn't initialized, don't do the
11000 transformation. */
11001 if (!fn)
11002 return NULL_TREE;
11003
11004 return build_call_expr_loc (loc, fn, 2, dst, src);
11005 }
11006 return NULL_TREE;
11007 }
11008 }
11009
11010 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11011 to the call.
11012
11013 Return NULL_TREE if no simplification was possible, otherwise return the
11014 simplified form of the call as a tree.
11015
11016 The simplified form may be a constant or other expression which
11017 computes the same value, but in a more efficient manner (including
11018 calls to other builtin functions).
11019
11020 The call may contain arguments which need to be evaluated, but
11021 which are not useful to determine the result of the call. In
11022 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11023 COMPOUND_EXPR will be an argument which must be evaluated.
11024 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11025 COMPOUND_EXPR in the chain will contain the tree for the simplified
11026 form of the builtin function call. */
11027
11028 static tree
11029 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11030 {
11031 if (!validate_arg (s1, POINTER_TYPE)
11032 || !validate_arg (s2, POINTER_TYPE))
11033 return NULL_TREE;
11034 else
11035 {
11036 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11037
11038 /* If both arguments are constants, evaluate at compile-time. */
11039 if (p1 && p2)
11040 {
11041 const size_t r = strspn (p1, p2);
11042 return build_int_cst (size_type_node, r);
11043 }
11044
11045 /* If either argument is "", return NULL_TREE. */
11046 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11047 /* Evaluate and ignore both arguments in case either one has
11048 side-effects. */
11049 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11050 s1, s2);
11051 return NULL_TREE;
11052 }
11053 }
11054
11055 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11056 to the call.
11057
11058 Return NULL_TREE if no simplification was possible, otherwise return the
11059 simplified form of the call as a tree.
11060
11061 The simplified form may be a constant or other expression which
11062 computes the same value, but in a more efficient manner (including
11063 calls to other builtin functions).
11064
11065 The call may contain arguments which need to be evaluated, but
11066 which are not useful to determine the result of the call. In
11067 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11068 COMPOUND_EXPR will be an argument which must be evaluated.
11069 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11070 COMPOUND_EXPR in the chain will contain the tree for the simplified
11071 form of the builtin function call. */
11072
11073 static tree
11074 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11075 {
11076 if (!validate_arg (s1, POINTER_TYPE)
11077 || !validate_arg (s2, POINTER_TYPE))
11078 return NULL_TREE;
11079 else
11080 {
11081 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11082
11083 /* If both arguments are constants, evaluate at compile-time. */
11084 if (p1 && p2)
11085 {
11086 const size_t r = strcspn (p1, p2);
11087 return build_int_cst (size_type_node, r);
11088 }
11089
11090 /* If the first argument is "", return NULL_TREE. */
11091 if (p1 && *p1 == '\0')
11092 {
11093 /* Evaluate and ignore argument s2 in case it has
11094 side-effects. */
11095 return omit_one_operand_loc (loc, size_type_node,
11096 size_zero_node, s2);
11097 }
11098
11099 /* If the second argument is "", return __builtin_strlen(s1). */
11100 if (p2 && *p2 == '\0')
11101 {
11102 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11103
11104 /* If the replacement _DECL isn't initialized, don't do the
11105 transformation. */
11106 if (!fn)
11107 return NULL_TREE;
11108
11109 return build_call_expr_loc (loc, fn, 1, s1);
11110 }
11111 return NULL_TREE;
11112 }
11113 }
11114
11115 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11116 produced. False otherwise. This is done so that we don't output the error
11117 or warning twice or three times. */
11118
11119 bool
11120 fold_builtin_next_arg (tree exp, bool va_start_p)
11121 {
11122 tree fntype = TREE_TYPE (current_function_decl);
11123 int nargs = call_expr_nargs (exp);
11124 tree arg;
11125 /* There is good chance the current input_location points inside the
11126 definition of the va_start macro (perhaps on the token for
11127 builtin) in a system header, so warnings will not be emitted.
11128 Use the location in real source code. */
11129 source_location current_location =
11130 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11131 NULL);
11132
11133 if (!stdarg_p (fntype))
11134 {
11135 error ("%<va_start%> used in function with fixed args");
11136 return true;
11137 }
11138
11139 if (va_start_p)
11140 {
11141 if (va_start_p && (nargs != 2))
11142 {
11143 error ("wrong number of arguments to function %<va_start%>");
11144 return true;
11145 }
11146 arg = CALL_EXPR_ARG (exp, 1);
11147 }
11148 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11149 when we checked the arguments and if needed issued a warning. */
11150 else
11151 {
11152 if (nargs == 0)
11153 {
11154 /* Evidently an out of date version of <stdarg.h>; can't validate
11155 va_start's second argument, but can still work as intended. */
11156 warning_at (current_location,
11157 OPT_Wvarargs,
11158 "%<__builtin_next_arg%> called without an argument");
11159 return true;
11160 }
11161 else if (nargs > 1)
11162 {
11163 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11164 return true;
11165 }
11166 arg = CALL_EXPR_ARG (exp, 0);
11167 }
11168
11169 if (TREE_CODE (arg) == SSA_NAME)
11170 arg = SSA_NAME_VAR (arg);
11171
11172 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11173 or __builtin_next_arg (0) the first time we see it, after checking
11174 the arguments and if needed issuing a warning. */
11175 if (!integer_zerop (arg))
11176 {
11177 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11178
11179 /* Strip off all nops for the sake of the comparison. This
11180 is not quite the same as STRIP_NOPS. It does more.
11181 We must also strip off INDIRECT_EXPR for C++ reference
11182 parameters. */
11183 while (CONVERT_EXPR_P (arg)
11184 || TREE_CODE (arg) == INDIRECT_REF)
11185 arg = TREE_OPERAND (arg, 0);
11186 if (arg != last_parm)
11187 {
11188 /* FIXME: Sometimes with the tree optimizers we can get the
11189 not the last argument even though the user used the last
11190 argument. We just warn and set the arg to be the last
11191 argument so that we will get wrong-code because of
11192 it. */
11193 warning_at (current_location,
11194 OPT_Wvarargs,
11195 "second parameter of %<va_start%> not last named argument");
11196 }
11197
11198 /* Undefined by C99 7.15.1.4p4 (va_start):
11199 "If the parameter parmN is declared with the register storage
11200 class, with a function or array type, or with a type that is
11201 not compatible with the type that results after application of
11202 the default argument promotions, the behavior is undefined."
11203 */
11204 else if (DECL_REGISTER (arg))
11205 {
11206 warning_at (current_location,
11207 OPT_Wvarargs,
11208 "undefined behaviour when second parameter of "
11209 "%<va_start%> is declared with %<register%> storage");
11210 }
11211
11212 /* We want to verify the second parameter just once before the tree
11213 optimizers are run and then avoid keeping it in the tree,
11214 as otherwise we could warn even for correct code like:
11215 void foo (int i, ...)
11216 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11217 if (va_start_p)
11218 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11219 else
11220 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11221 }
11222 return false;
11223 }
11224
11225
11226 /* Expand a call EXP to __builtin_object_size. */
11227
11228 static rtx
11229 expand_builtin_object_size (tree exp)
11230 {
11231 tree ost;
11232 int object_size_type;
11233 tree fndecl = get_callee_fndecl (exp);
11234
11235 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11236 {
11237 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11238 exp, fndecl);
11239 expand_builtin_trap ();
11240 return const0_rtx;
11241 }
11242
11243 ost = CALL_EXPR_ARG (exp, 1);
11244 STRIP_NOPS (ost);
11245
11246 if (TREE_CODE (ost) != INTEGER_CST
11247 || tree_int_cst_sgn (ost) < 0
11248 || compare_tree_int (ost, 3) > 0)
11249 {
11250 error ("%Klast argument of %D is not integer constant between 0 and 3",
11251 exp, fndecl);
11252 expand_builtin_trap ();
11253 return const0_rtx;
11254 }
11255
11256 object_size_type = tree_to_shwi (ost);
11257
11258 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11259 }
11260
11261 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11262 FCODE is the BUILT_IN_* to use.
11263 Return NULL_RTX if we failed; the caller should emit a normal call,
11264 otherwise try to get the result in TARGET, if convenient (and in
11265 mode MODE if that's convenient). */
11266
11267 static rtx
11268 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11269 enum built_in_function fcode)
11270 {
11271 tree dest, src, len, size;
11272
11273 if (!validate_arglist (exp,
11274 POINTER_TYPE,
11275 fcode == BUILT_IN_MEMSET_CHK
11276 ? INTEGER_TYPE : POINTER_TYPE,
11277 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11278 return NULL_RTX;
11279
11280 dest = CALL_EXPR_ARG (exp, 0);
11281 src = CALL_EXPR_ARG (exp, 1);
11282 len = CALL_EXPR_ARG (exp, 2);
11283 size = CALL_EXPR_ARG (exp, 3);
11284
11285 if (! tree_fits_uhwi_p (size))
11286 return NULL_RTX;
11287
11288 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11289 {
11290 tree fn;
11291
11292 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11293 {
11294 warning_at (tree_nonartificial_location (exp),
11295 0, "%Kcall to %D will always overflow destination buffer",
11296 exp, get_callee_fndecl (exp));
11297 return NULL_RTX;
11298 }
11299
11300 fn = NULL_TREE;
11301 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11302 mem{cpy,pcpy,move,set} is available. */
11303 switch (fcode)
11304 {
11305 case BUILT_IN_MEMCPY_CHK:
11306 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11307 break;
11308 case BUILT_IN_MEMPCPY_CHK:
11309 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11310 break;
11311 case BUILT_IN_MEMMOVE_CHK:
11312 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11313 break;
11314 case BUILT_IN_MEMSET_CHK:
11315 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11316 break;
11317 default:
11318 break;
11319 }
11320
11321 if (! fn)
11322 return NULL_RTX;
11323
11324 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11325 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11326 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11327 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11328 }
11329 else if (fcode == BUILT_IN_MEMSET_CHK)
11330 return NULL_RTX;
11331 else
11332 {
11333 unsigned int dest_align = get_pointer_alignment (dest);
11334
11335 /* If DEST is not a pointer type, call the normal function. */
11336 if (dest_align == 0)
11337 return NULL_RTX;
11338
11339 /* If SRC and DEST are the same (and not volatile), do nothing. */
11340 if (operand_equal_p (src, dest, 0))
11341 {
11342 tree expr;
11343
11344 if (fcode != BUILT_IN_MEMPCPY_CHK)
11345 {
11346 /* Evaluate and ignore LEN in case it has side-effects. */
11347 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11348 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11349 }
11350
11351 expr = fold_build_pointer_plus (dest, len);
11352 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11353 }
11354
11355 /* __memmove_chk special case. */
11356 if (fcode == BUILT_IN_MEMMOVE_CHK)
11357 {
11358 unsigned int src_align = get_pointer_alignment (src);
11359
11360 if (src_align == 0)
11361 return NULL_RTX;
11362
11363 /* If src is categorized for a readonly section we can use
11364 normal __memcpy_chk. */
11365 if (readonly_data_expr (src))
11366 {
11367 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11368 if (!fn)
11369 return NULL_RTX;
11370 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11371 dest, src, len, size);
11372 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11373 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11374 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11375 }
11376 }
11377 return NULL_RTX;
11378 }
11379 }
11380
11381 /* Emit warning if a buffer overflow is detected at compile time. */
11382
11383 static void
11384 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11385 {
11386 int is_strlen = 0;
11387 tree len, size;
11388 location_t loc = tree_nonartificial_location (exp);
11389
11390 switch (fcode)
11391 {
11392 case BUILT_IN_STRCPY_CHK:
11393 case BUILT_IN_STPCPY_CHK:
11394 /* For __strcat_chk the warning will be emitted only if overflowing
11395 by at least strlen (dest) + 1 bytes. */
11396 case BUILT_IN_STRCAT_CHK:
11397 len = CALL_EXPR_ARG (exp, 1);
11398 size = CALL_EXPR_ARG (exp, 2);
11399 is_strlen = 1;
11400 break;
11401 case BUILT_IN_STRNCAT_CHK:
11402 case BUILT_IN_STRNCPY_CHK:
11403 case BUILT_IN_STPNCPY_CHK:
11404 len = CALL_EXPR_ARG (exp, 2);
11405 size = CALL_EXPR_ARG (exp, 3);
11406 break;
11407 case BUILT_IN_SNPRINTF_CHK:
11408 case BUILT_IN_VSNPRINTF_CHK:
11409 len = CALL_EXPR_ARG (exp, 1);
11410 size = CALL_EXPR_ARG (exp, 3);
11411 break;
11412 default:
11413 gcc_unreachable ();
11414 }
11415
11416 if (!len || !size)
11417 return;
11418
11419 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11420 return;
11421
11422 if (is_strlen)
11423 {
11424 len = c_strlen (len, 1);
11425 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11426 return;
11427 }
11428 else if (fcode == BUILT_IN_STRNCAT_CHK)
11429 {
11430 tree src = CALL_EXPR_ARG (exp, 1);
11431 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11432 return;
11433 src = c_strlen (src, 1);
11434 if (! src || ! tree_fits_uhwi_p (src))
11435 {
11436 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11437 exp, get_callee_fndecl (exp));
11438 return;
11439 }
11440 else if (tree_int_cst_lt (src, size))
11441 return;
11442 }
11443 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11444 return;
11445
11446 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11447 exp, get_callee_fndecl (exp));
11448 }
11449
11450 /* Emit warning if a buffer overflow is detected at compile time
11451 in __sprintf_chk/__vsprintf_chk calls. */
11452
11453 static void
11454 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11455 {
11456 tree size, len, fmt;
11457 const char *fmt_str;
11458 int nargs = call_expr_nargs (exp);
11459
11460 /* Verify the required arguments in the original call. */
11461
11462 if (nargs < 4)
11463 return;
11464 size = CALL_EXPR_ARG (exp, 2);
11465 fmt = CALL_EXPR_ARG (exp, 3);
11466
11467 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11468 return;
11469
11470 /* Check whether the format is a literal string constant. */
11471 fmt_str = c_getstr (fmt);
11472 if (fmt_str == NULL)
11473 return;
11474
11475 if (!init_target_chars ())
11476 return;
11477
11478 /* If the format doesn't contain % args or %%, we know its size. */
11479 if (strchr (fmt_str, target_percent) == 0)
11480 len = build_int_cstu (size_type_node, strlen (fmt_str));
11481 /* If the format is "%s" and first ... argument is a string literal,
11482 we know it too. */
11483 else if (fcode == BUILT_IN_SPRINTF_CHK
11484 && strcmp (fmt_str, target_percent_s) == 0)
11485 {
11486 tree arg;
11487
11488 if (nargs < 5)
11489 return;
11490 arg = CALL_EXPR_ARG (exp, 4);
11491 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11492 return;
11493
11494 len = c_strlen (arg, 1);
11495 if (!len || ! tree_fits_uhwi_p (len))
11496 return;
11497 }
11498 else
11499 return;
11500
11501 if (! tree_int_cst_lt (len, size))
11502 warning_at (tree_nonartificial_location (exp),
11503 0, "%Kcall to %D will always overflow destination buffer",
11504 exp, get_callee_fndecl (exp));
11505 }
11506
11507 /* Emit warning if a free is called with address of a variable. */
11508
11509 static void
11510 maybe_emit_free_warning (tree exp)
11511 {
11512 tree arg = CALL_EXPR_ARG (exp, 0);
11513
11514 STRIP_NOPS (arg);
11515 if (TREE_CODE (arg) != ADDR_EXPR)
11516 return;
11517
11518 arg = get_base_address (TREE_OPERAND (arg, 0));
11519 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11520 return;
11521
11522 if (SSA_VAR_P (arg))
11523 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11524 "%Kattempt to free a non-heap object %qD", exp, arg);
11525 else
11526 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11527 "%Kattempt to free a non-heap object", exp);
11528 }
11529
11530 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11531 if possible. */
11532
11533 static tree
11534 fold_builtin_object_size (tree ptr, tree ost)
11535 {
11536 unsigned HOST_WIDE_INT bytes;
11537 int object_size_type;
11538
11539 if (!validate_arg (ptr, POINTER_TYPE)
11540 || !validate_arg (ost, INTEGER_TYPE))
11541 return NULL_TREE;
11542
11543 STRIP_NOPS (ost);
11544
11545 if (TREE_CODE (ost) != INTEGER_CST
11546 || tree_int_cst_sgn (ost) < 0
11547 || compare_tree_int (ost, 3) > 0)
11548 return NULL_TREE;
11549
11550 object_size_type = tree_to_shwi (ost);
11551
11552 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11553 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11554 and (size_t) 0 for types 2 and 3. */
11555 if (TREE_SIDE_EFFECTS (ptr))
11556 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11557
11558 if (TREE_CODE (ptr) == ADDR_EXPR)
11559 {
11560 bytes = compute_builtin_object_size (ptr, object_size_type);
11561 if (wi::fits_to_tree_p (bytes, size_type_node))
11562 return build_int_cstu (size_type_node, bytes);
11563 }
11564 else if (TREE_CODE (ptr) == SSA_NAME)
11565 {
11566 /* If object size is not known yet, delay folding until
11567 later. Maybe subsequent passes will help determining
11568 it. */
11569 bytes = compute_builtin_object_size (ptr, object_size_type);
11570 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11571 && wi::fits_to_tree_p (bytes, size_type_node))
11572 return build_int_cstu (size_type_node, bytes);
11573 }
11574
11575 return NULL_TREE;
11576 }
11577
11578 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
11579 are the arguments to the call. */
11580
11581 static tree
11582 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
11583 tree src, tree size)
11584 {
11585 tree fn;
11586 const char *p;
11587
11588 if (!validate_arg (dest, POINTER_TYPE)
11589 || !validate_arg (src, POINTER_TYPE)
11590 || !validate_arg (size, INTEGER_TYPE))
11591 return NULL_TREE;
11592
11593 p = c_getstr (src);
11594 /* If the SRC parameter is "", return DEST. */
11595 if (p && *p == '\0')
11596 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11597
11598 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
11599 return NULL_TREE;
11600
11601 /* If __builtin_strcat_chk is used, assume strcat is available. */
11602 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
11603 if (!fn)
11604 return NULL_TREE;
11605
11606 return build_call_expr_loc (loc, fn, 2, dest, src);
11607 }
11608
11609 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11610 LEN, and SIZE. */
11611
11612 static tree
11613 fold_builtin_strncat_chk (location_t loc, tree fndecl,
11614 tree dest, tree src, tree len, tree size)
11615 {
11616 tree fn;
11617 const char *p;
11618
11619 if (!validate_arg (dest, POINTER_TYPE)
11620 || !validate_arg (src, POINTER_TYPE)
11621 || !validate_arg (size, INTEGER_TYPE)
11622 || !validate_arg (size, INTEGER_TYPE))
11623 return NULL_TREE;
11624
11625 p = c_getstr (src);
11626 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
11627 if (p && *p == '\0')
11628 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11629 else if (integer_zerop (len))
11630 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11631
11632 if (! tree_fits_uhwi_p (size))
11633 return NULL_TREE;
11634
11635 if (! integer_all_onesp (size))
11636 {
11637 tree src_len = c_strlen (src, 1);
11638 if (src_len
11639 && tree_fits_uhwi_p (src_len)
11640 && tree_fits_uhwi_p (len)
11641 && ! tree_int_cst_lt (len, src_len))
11642 {
11643 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
11644 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
11645 if (!fn)
11646 return NULL_TREE;
11647
11648 return build_call_expr_loc (loc, fn, 3, dest, src, size);
11649 }
11650 return NULL_TREE;
11651 }
11652
11653 /* If __builtin_strncat_chk is used, assume strncat is available. */
11654 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
11655 if (!fn)
11656 return NULL_TREE;
11657
11658 return build_call_expr_loc (loc, fn, 3, dest, src, len);
11659 }
11660
11661 /* Builtins with folding operations that operate on "..." arguments
11662 need special handling; we need to store the arguments in a convenient
11663 data structure before attempting any folding. Fortunately there are
11664 only a few builtins that fall into this category. FNDECL is the
11665 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11666 result of the function call is ignored. */
11667
11668 static tree
11669 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11670 bool ignore ATTRIBUTE_UNUSED)
11671 {
11672 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11673 tree ret = NULL_TREE;
11674
11675 switch (fcode)
11676 {
11677 case BUILT_IN_FPCLASSIFY:
11678 ret = fold_builtin_fpclassify (loc, exp);
11679 break;
11680
11681 default:
11682 break;
11683 }
11684 if (ret)
11685 {
11686 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11687 SET_EXPR_LOCATION (ret, loc);
11688 TREE_NO_WARNING (ret) = 1;
11689 return ret;
11690 }
11691 return NULL_TREE;
11692 }
11693
11694 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
11695 FMT and ARG are the arguments to the call; we don't fold cases with
11696 more than 2 arguments, and ARG may be null if this is a 1-argument case.
11697
11698 Return NULL_TREE if no simplification was possible, otherwise return the
11699 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11700 code of the function to be simplified. */
11701
11702 static tree
11703 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
11704 tree arg, bool ignore,
11705 enum built_in_function fcode)
11706 {
11707 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
11708 const char *fmt_str = NULL;
11709
11710 /* If the return value is used, don't do the transformation. */
11711 if (! ignore)
11712 return NULL_TREE;
11713
11714 /* Verify the required arguments in the original call. */
11715 if (!validate_arg (fmt, POINTER_TYPE))
11716 return NULL_TREE;
11717
11718 /* Check whether the format is a literal string constant. */
11719 fmt_str = c_getstr (fmt);
11720 if (fmt_str == NULL)
11721 return NULL_TREE;
11722
11723 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
11724 {
11725 /* If we're using an unlocked function, assume the other
11726 unlocked functions exist explicitly. */
11727 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
11728 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
11729 }
11730 else
11731 {
11732 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
11733 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
11734 }
11735
11736 if (!init_target_chars ())
11737 return NULL_TREE;
11738
11739 if (strcmp (fmt_str, target_percent_s) == 0
11740 || strchr (fmt_str, target_percent) == NULL)
11741 {
11742 const char *str;
11743
11744 if (strcmp (fmt_str, target_percent_s) == 0)
11745 {
11746 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11747 return NULL_TREE;
11748
11749 if (!arg || !validate_arg (arg, POINTER_TYPE))
11750 return NULL_TREE;
11751
11752 str = c_getstr (arg);
11753 if (str == NULL)
11754 return NULL_TREE;
11755 }
11756 else
11757 {
11758 /* The format specifier doesn't contain any '%' characters. */
11759 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
11760 && arg)
11761 return NULL_TREE;
11762 str = fmt_str;
11763 }
11764
11765 /* If the string was "", printf does nothing. */
11766 if (str[0] == '\0')
11767 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
11768
11769 /* If the string has length of 1, call putchar. */
11770 if (str[1] == '\0')
11771 {
11772 /* Given printf("c"), (where c is any one character,)
11773 convert "c"[0] to an int and pass that to the replacement
11774 function. */
11775 newarg = build_int_cst (integer_type_node, str[0]);
11776 if (fn_putchar)
11777 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
11778 }
11779 else
11780 {
11781 /* If the string was "string\n", call puts("string"). */
11782 size_t len = strlen (str);
11783 if ((unsigned char)str[len - 1] == target_newline
11784 && (size_t) (int) len == len
11785 && (int) len > 0)
11786 {
11787 char *newstr;
11788 tree offset_node, string_cst;
11789
11790 /* Create a NUL-terminated string that's one char shorter
11791 than the original, stripping off the trailing '\n'. */
11792 newarg = build_string_literal (len, str);
11793 string_cst = string_constant (newarg, &offset_node);
11794 gcc_checking_assert (string_cst
11795 && (TREE_STRING_LENGTH (string_cst)
11796 == (int) len)
11797 && integer_zerop (offset_node)
11798 && (unsigned char)
11799 TREE_STRING_POINTER (string_cst)[len - 1]
11800 == target_newline);
11801 /* build_string_literal creates a new STRING_CST,
11802 modify it in place to avoid double copying. */
11803 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
11804 newstr[len - 1] = '\0';
11805 if (fn_puts)
11806 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
11807 }
11808 else
11809 /* We'd like to arrange to call fputs(string,stdout) here,
11810 but we need stdout and don't have a way to get it yet. */
11811 return NULL_TREE;
11812 }
11813 }
11814
11815 /* The other optimizations can be done only on the non-va_list variants. */
11816 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11817 return NULL_TREE;
11818
11819 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
11820 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
11821 {
11822 if (!arg || !validate_arg (arg, POINTER_TYPE))
11823 return NULL_TREE;
11824 if (fn_puts)
11825 call = build_call_expr_loc (loc, fn_puts, 1, arg);
11826 }
11827
11828 /* If the format specifier was "%c", call __builtin_putchar(arg). */
11829 else if (strcmp (fmt_str, target_percent_c) == 0)
11830 {
11831 if (!arg || !validate_arg (arg, INTEGER_TYPE))
11832 return NULL_TREE;
11833 if (fn_putchar)
11834 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
11835 }
11836
11837 if (!call)
11838 return NULL_TREE;
11839
11840 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
11841 }
11842
11843 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
11844 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
11845 more than 3 arguments, and ARG may be null in the 2-argument case.
11846
11847 Return NULL_TREE if no simplification was possible, otherwise return the
11848 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11849 code of the function to be simplified. */
11850
11851 static tree
11852 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
11853 tree fmt, tree arg, bool ignore,
11854 enum built_in_function fcode)
11855 {
11856 tree fn_fputc, fn_fputs, call = NULL_TREE;
11857 const char *fmt_str = NULL;
11858
11859 /* If the return value is used, don't do the transformation. */
11860 if (! ignore)
11861 return NULL_TREE;
11862
11863 /* Verify the required arguments in the original call. */
11864 if (!validate_arg (fp, POINTER_TYPE))
11865 return NULL_TREE;
11866 if (!validate_arg (fmt, POINTER_TYPE))
11867 return NULL_TREE;
11868
11869 /* Check whether the format is a literal string constant. */
11870 fmt_str = c_getstr (fmt);
11871 if (fmt_str == NULL)
11872 return NULL_TREE;
11873
11874 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
11875 {
11876 /* If we're using an unlocked function, assume the other
11877 unlocked functions exist explicitly. */
11878 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
11879 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
11880 }
11881 else
11882 {
11883 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
11884 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
11885 }
11886
11887 if (!init_target_chars ())
11888 return NULL_TREE;
11889
11890 /* If the format doesn't contain % args or %%, use strcpy. */
11891 if (strchr (fmt_str, target_percent) == NULL)
11892 {
11893 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
11894 && arg)
11895 return NULL_TREE;
11896
11897 /* If the format specifier was "", fprintf does nothing. */
11898 if (fmt_str[0] == '\0')
11899 {
11900 /* If FP has side-effects, just wait until gimplification is
11901 done. */
11902 if (TREE_SIDE_EFFECTS (fp))
11903 return NULL_TREE;
11904
11905 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
11906 }
11907
11908 /* When "string" doesn't contain %, replace all cases of
11909 fprintf (fp, string) with fputs (string, fp). The fputs
11910 builtin will take care of special cases like length == 1. */
11911 if (fn_fputs)
11912 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
11913 }
11914
11915 /* The other optimizations can be done only on the non-va_list variants. */
11916 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
11917 return NULL_TREE;
11918
11919 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
11920 else if (strcmp (fmt_str, target_percent_s) == 0)
11921 {
11922 if (!arg || !validate_arg (arg, POINTER_TYPE))
11923 return NULL_TREE;
11924 if (fn_fputs)
11925 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
11926 }
11927
11928 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
11929 else if (strcmp (fmt_str, target_percent_c) == 0)
11930 {
11931 if (!arg || !validate_arg (arg, INTEGER_TYPE))
11932 return NULL_TREE;
11933 if (fn_fputc)
11934 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
11935 }
11936
11937 if (!call)
11938 return NULL_TREE;
11939 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
11940 }
11941
11942 /* Initialize format string characters in the target charset. */
11943
11944 bool
11945 init_target_chars (void)
11946 {
11947 static bool init;
11948 if (!init)
11949 {
11950 target_newline = lang_hooks.to_target_charset ('\n');
11951 target_percent = lang_hooks.to_target_charset ('%');
11952 target_c = lang_hooks.to_target_charset ('c');
11953 target_s = lang_hooks.to_target_charset ('s');
11954 if (target_newline == 0 || target_percent == 0 || target_c == 0
11955 || target_s == 0)
11956 return false;
11957
11958 target_percent_c[0] = target_percent;
11959 target_percent_c[1] = target_c;
11960 target_percent_c[2] = '\0';
11961
11962 target_percent_s[0] = target_percent;
11963 target_percent_s[1] = target_s;
11964 target_percent_s[2] = '\0';
11965
11966 target_percent_s_newline[0] = target_percent;
11967 target_percent_s_newline[1] = target_s;
11968 target_percent_s_newline[2] = target_newline;
11969 target_percent_s_newline[3] = '\0';
11970
11971 init = true;
11972 }
11973 return true;
11974 }
11975
11976 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11977 and no overflow/underflow occurred. INEXACT is true if M was not
11978 exactly calculated. TYPE is the tree type for the result. This
11979 function assumes that you cleared the MPFR flags and then
11980 calculated M to see if anything subsequently set a flag prior to
11981 entering this function. Return NULL_TREE if any checks fail. */
11982
11983 static tree
11984 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11985 {
11986 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11987 overflow/underflow occurred. If -frounding-math, proceed iff the
11988 result of calling FUNC was exact. */
11989 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11990 && (!flag_rounding_math || !inexact))
11991 {
11992 REAL_VALUE_TYPE rr;
11993
11994 real_from_mpfr (&rr, m, type, GMP_RNDN);
11995 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11996 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11997 but the mpft_t is not, then we underflowed in the
11998 conversion. */
11999 if (real_isfinite (&rr)
12000 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12001 {
12002 REAL_VALUE_TYPE rmode;
12003
12004 real_convert (&rmode, TYPE_MODE (type), &rr);
12005 /* Proceed iff the specified mode can hold the value. */
12006 if (real_identical (&rmode, &rr))
12007 return build_real (type, rmode);
12008 }
12009 }
12010 return NULL_TREE;
12011 }
12012
12013 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12014 number and no overflow/underflow occurred. INEXACT is true if M
12015 was not exactly calculated. TYPE is the tree type for the result.
12016 This function assumes that you cleared the MPFR flags and then
12017 calculated M to see if anything subsequently set a flag prior to
12018 entering this function. Return NULL_TREE if any checks fail, if
12019 FORCE_CONVERT is true, then bypass the checks. */
12020
12021 static tree
12022 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12023 {
12024 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12025 overflow/underflow occurred. If -frounding-math, proceed iff the
12026 result of calling FUNC was exact. */
12027 if (force_convert
12028 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12029 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12030 && (!flag_rounding_math || !inexact)))
12031 {
12032 REAL_VALUE_TYPE re, im;
12033
12034 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12035 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12036 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12037 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12038 but the mpft_t is not, then we underflowed in the
12039 conversion. */
12040 if (force_convert
12041 || (real_isfinite (&re) && real_isfinite (&im)
12042 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12043 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12044 {
12045 REAL_VALUE_TYPE re_mode, im_mode;
12046
12047 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12048 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12049 /* Proceed iff the specified mode can hold the value. */
12050 if (force_convert
12051 || (real_identical (&re_mode, &re)
12052 && real_identical (&im_mode, &im)))
12053 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12054 build_real (TREE_TYPE (type), im_mode));
12055 }
12056 }
12057 return NULL_TREE;
12058 }
12059
12060 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12061 FUNC on it and return the resulting value as a tree with type TYPE.
12062 If MIN and/or MAX are not NULL, then the supplied ARG must be
12063 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12064 acceptable values, otherwise they are not. The mpfr precision is
12065 set to the precision of TYPE. We assume that function FUNC returns
12066 zero if the result could be calculated exactly within the requested
12067 precision. */
12068
12069 static tree
12070 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12071 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12072 bool inclusive)
12073 {
12074 tree result = NULL_TREE;
12075
12076 STRIP_NOPS (arg);
12077
12078 /* To proceed, MPFR must exactly represent the target floating point
12079 format, which only happens when the target base equals two. */
12080 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12081 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12082 {
12083 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12084
12085 if (real_isfinite (ra)
12086 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12087 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12088 {
12089 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12090 const int prec = fmt->p;
12091 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12092 int inexact;
12093 mpfr_t m;
12094
12095 mpfr_init2 (m, prec);
12096 mpfr_from_real (m, ra, GMP_RNDN);
12097 mpfr_clear_flags ();
12098 inexact = func (m, m, rnd);
12099 result = do_mpfr_ckconv (m, type, inexact);
12100 mpfr_clear (m);
12101 }
12102 }
12103
12104 return result;
12105 }
12106
12107 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12108 FUNC on it and return the resulting value as a tree with type TYPE.
12109 The mpfr precision is set to the precision of TYPE. We assume that
12110 function FUNC returns zero if the result could be calculated
12111 exactly within the requested precision. */
12112
12113 static tree
12114 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12115 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12116 {
12117 tree result = NULL_TREE;
12118
12119 STRIP_NOPS (arg1);
12120 STRIP_NOPS (arg2);
12121
12122 /* To proceed, MPFR must exactly represent the target floating point
12123 format, which only happens when the target base equals two. */
12124 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12125 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12126 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12127 {
12128 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12129 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12130
12131 if (real_isfinite (ra1) && real_isfinite (ra2))
12132 {
12133 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12134 const int prec = fmt->p;
12135 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12136 int inexact;
12137 mpfr_t m1, m2;
12138
12139 mpfr_inits2 (prec, m1, m2, NULL);
12140 mpfr_from_real (m1, ra1, GMP_RNDN);
12141 mpfr_from_real (m2, ra2, GMP_RNDN);
12142 mpfr_clear_flags ();
12143 inexact = func (m1, m1, m2, rnd);
12144 result = do_mpfr_ckconv (m1, type, inexact);
12145 mpfr_clears (m1, m2, NULL);
12146 }
12147 }
12148
12149 return result;
12150 }
12151
12152 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12153 FUNC on it and return the resulting value as a tree with type TYPE.
12154 The mpfr precision is set to the precision of TYPE. We assume that
12155 function FUNC returns zero if the result could be calculated
12156 exactly within the requested precision. */
12157
12158 static tree
12159 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12160 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12161 {
12162 tree result = NULL_TREE;
12163
12164 STRIP_NOPS (arg1);
12165 STRIP_NOPS (arg2);
12166 STRIP_NOPS (arg3);
12167
12168 /* To proceed, MPFR must exactly represent the target floating point
12169 format, which only happens when the target base equals two. */
12170 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12171 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12172 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12173 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12174 {
12175 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12176 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12177 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12178
12179 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12180 {
12181 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12182 const int prec = fmt->p;
12183 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12184 int inexact;
12185 mpfr_t m1, m2, m3;
12186
12187 mpfr_inits2 (prec, m1, m2, m3, NULL);
12188 mpfr_from_real (m1, ra1, GMP_RNDN);
12189 mpfr_from_real (m2, ra2, GMP_RNDN);
12190 mpfr_from_real (m3, ra3, GMP_RNDN);
12191 mpfr_clear_flags ();
12192 inexact = func (m1, m1, m2, m3, rnd);
12193 result = do_mpfr_ckconv (m1, type, inexact);
12194 mpfr_clears (m1, m2, m3, NULL);
12195 }
12196 }
12197
12198 return result;
12199 }
12200
12201 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12202 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12203 If ARG_SINP and ARG_COSP are NULL then the result is returned
12204 as a complex value.
12205 The type is taken from the type of ARG and is used for setting the
12206 precision of the calculation and results. */
12207
12208 static tree
12209 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12210 {
12211 tree const type = TREE_TYPE (arg);
12212 tree result = NULL_TREE;
12213
12214 STRIP_NOPS (arg);
12215
12216 /* To proceed, MPFR must exactly represent the target floating point
12217 format, which only happens when the target base equals two. */
12218 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12219 && TREE_CODE (arg) == REAL_CST
12220 && !TREE_OVERFLOW (arg))
12221 {
12222 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12223
12224 if (real_isfinite (ra))
12225 {
12226 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12227 const int prec = fmt->p;
12228 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12229 tree result_s, result_c;
12230 int inexact;
12231 mpfr_t m, ms, mc;
12232
12233 mpfr_inits2 (prec, m, ms, mc, NULL);
12234 mpfr_from_real (m, ra, GMP_RNDN);
12235 mpfr_clear_flags ();
12236 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12237 result_s = do_mpfr_ckconv (ms, type, inexact);
12238 result_c = do_mpfr_ckconv (mc, type, inexact);
12239 mpfr_clears (m, ms, mc, NULL);
12240 if (result_s && result_c)
12241 {
12242 /* If we are to return in a complex value do so. */
12243 if (!arg_sinp && !arg_cosp)
12244 return build_complex (build_complex_type (type),
12245 result_c, result_s);
12246
12247 /* Dereference the sin/cos pointer arguments. */
12248 arg_sinp = build_fold_indirect_ref (arg_sinp);
12249 arg_cosp = build_fold_indirect_ref (arg_cosp);
12250 /* Proceed if valid pointer type were passed in. */
12251 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12252 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12253 {
12254 /* Set the values. */
12255 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12256 result_s);
12257 TREE_SIDE_EFFECTS (result_s) = 1;
12258 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12259 result_c);
12260 TREE_SIDE_EFFECTS (result_c) = 1;
12261 /* Combine the assignments into a compound expr. */
12262 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12263 result_s, result_c));
12264 }
12265 }
12266 }
12267 }
12268 return result;
12269 }
12270
12271 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12272 two-argument mpfr order N Bessel function FUNC on them and return
12273 the resulting value as a tree with type TYPE. The mpfr precision
12274 is set to the precision of TYPE. We assume that function FUNC
12275 returns zero if the result could be calculated exactly within the
12276 requested precision. */
12277 static tree
12278 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12279 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12280 const REAL_VALUE_TYPE *min, bool inclusive)
12281 {
12282 tree result = NULL_TREE;
12283
12284 STRIP_NOPS (arg1);
12285 STRIP_NOPS (arg2);
12286
12287 /* To proceed, MPFR must exactly represent the target floating point
12288 format, which only happens when the target base equals two. */
12289 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12290 && tree_fits_shwi_p (arg1)
12291 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12292 {
12293 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12294 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12295
12296 if (n == (long)n
12297 && real_isfinite (ra)
12298 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12299 {
12300 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12301 const int prec = fmt->p;
12302 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12303 int inexact;
12304 mpfr_t m;
12305
12306 mpfr_init2 (m, prec);
12307 mpfr_from_real (m, ra, GMP_RNDN);
12308 mpfr_clear_flags ();
12309 inexact = func (m, n, m, rnd);
12310 result = do_mpfr_ckconv (m, type, inexact);
12311 mpfr_clear (m);
12312 }
12313 }
12314
12315 return result;
12316 }
12317
12318 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12319 the pointer *(ARG_QUO) and return the result. The type is taken
12320 from the type of ARG0 and is used for setting the precision of the
12321 calculation and results. */
12322
12323 static tree
12324 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12325 {
12326 tree const type = TREE_TYPE (arg0);
12327 tree result = NULL_TREE;
12328
12329 STRIP_NOPS (arg0);
12330 STRIP_NOPS (arg1);
12331
12332 /* To proceed, MPFR must exactly represent the target floating point
12333 format, which only happens when the target base equals two. */
12334 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12335 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12336 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12337 {
12338 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12339 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12340
12341 if (real_isfinite (ra0) && real_isfinite (ra1))
12342 {
12343 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12344 const int prec = fmt->p;
12345 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12346 tree result_rem;
12347 long integer_quo;
12348 mpfr_t m0, m1;
12349
12350 mpfr_inits2 (prec, m0, m1, NULL);
12351 mpfr_from_real (m0, ra0, GMP_RNDN);
12352 mpfr_from_real (m1, ra1, GMP_RNDN);
12353 mpfr_clear_flags ();
12354 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12355 /* Remquo is independent of the rounding mode, so pass
12356 inexact=0 to do_mpfr_ckconv(). */
12357 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12358 mpfr_clears (m0, m1, NULL);
12359 if (result_rem)
12360 {
12361 /* MPFR calculates quo in the host's long so it may
12362 return more bits in quo than the target int can hold
12363 if sizeof(host long) > sizeof(target int). This can
12364 happen even for native compilers in LP64 mode. In
12365 these cases, modulo the quo value with the largest
12366 number that the target int can hold while leaving one
12367 bit for the sign. */
12368 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12369 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12370
12371 /* Dereference the quo pointer argument. */
12372 arg_quo = build_fold_indirect_ref (arg_quo);
12373 /* Proceed iff a valid pointer type was passed in. */
12374 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12375 {
12376 /* Set the value. */
12377 tree result_quo
12378 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12379 build_int_cst (TREE_TYPE (arg_quo),
12380 integer_quo));
12381 TREE_SIDE_EFFECTS (result_quo) = 1;
12382 /* Combine the quo assignment with the rem. */
12383 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12384 result_quo, result_rem));
12385 }
12386 }
12387 }
12388 }
12389 return result;
12390 }
12391
12392 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12393 resulting value as a tree with type TYPE. The mpfr precision is
12394 set to the precision of TYPE. We assume that this mpfr function
12395 returns zero if the result could be calculated exactly within the
12396 requested precision. In addition, the integer pointer represented
12397 by ARG_SG will be dereferenced and set to the appropriate signgam
12398 (-1,1) value. */
12399
12400 static tree
12401 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12402 {
12403 tree result = NULL_TREE;
12404
12405 STRIP_NOPS (arg);
12406
12407 /* To proceed, MPFR must exactly represent the target floating point
12408 format, which only happens when the target base equals two. Also
12409 verify ARG is a constant and that ARG_SG is an int pointer. */
12410 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12411 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12412 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12413 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12414 {
12415 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12416
12417 /* In addition to NaN and Inf, the argument cannot be zero or a
12418 negative integer. */
12419 if (real_isfinite (ra)
12420 && ra->cl != rvc_zero
12421 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12422 {
12423 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12424 const int prec = fmt->p;
12425 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12426 int inexact, sg;
12427 mpfr_t m;
12428 tree result_lg;
12429
12430 mpfr_init2 (m, prec);
12431 mpfr_from_real (m, ra, GMP_RNDN);
12432 mpfr_clear_flags ();
12433 inexact = mpfr_lgamma (m, &sg, m, rnd);
12434 result_lg = do_mpfr_ckconv (m, type, inexact);
12435 mpfr_clear (m);
12436 if (result_lg)
12437 {
12438 tree result_sg;
12439
12440 /* Dereference the arg_sg pointer argument. */
12441 arg_sg = build_fold_indirect_ref (arg_sg);
12442 /* Assign the signgam value into *arg_sg. */
12443 result_sg = fold_build2 (MODIFY_EXPR,
12444 TREE_TYPE (arg_sg), arg_sg,
12445 build_int_cst (TREE_TYPE (arg_sg), sg));
12446 TREE_SIDE_EFFECTS (result_sg) = 1;
12447 /* Combine the signgam assignment with the lgamma result. */
12448 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12449 result_sg, result_lg));
12450 }
12451 }
12452 }
12453
12454 return result;
12455 }
12456
12457 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12458 function FUNC on it and return the resulting value as a tree with
12459 type TYPE. The mpfr precision is set to the precision of TYPE. We
12460 assume that function FUNC returns zero if the result could be
12461 calculated exactly within the requested precision. */
12462
12463 static tree
12464 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12465 {
12466 tree result = NULL_TREE;
12467
12468 STRIP_NOPS (arg);
12469
12470 /* To proceed, MPFR must exactly represent the target floating point
12471 format, which only happens when the target base equals two. */
12472 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12473 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12474 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12475 {
12476 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12477 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12478
12479 if (real_isfinite (re) && real_isfinite (im))
12480 {
12481 const struct real_format *const fmt =
12482 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12483 const int prec = fmt->p;
12484 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12485 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12486 int inexact;
12487 mpc_t m;
12488
12489 mpc_init2 (m, prec);
12490 mpfr_from_real (mpc_realref (m), re, rnd);
12491 mpfr_from_real (mpc_imagref (m), im, rnd);
12492 mpfr_clear_flags ();
12493 inexact = func (m, m, crnd);
12494 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12495 mpc_clear (m);
12496 }
12497 }
12498
12499 return result;
12500 }
12501
12502 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12503 mpc function FUNC on it and return the resulting value as a tree
12504 with type TYPE. The mpfr precision is set to the precision of
12505 TYPE. We assume that function FUNC returns zero if the result
12506 could be calculated exactly within the requested precision. If
12507 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12508 in the arguments and/or results. */
12509
12510 tree
12511 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12512 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12513 {
12514 tree result = NULL_TREE;
12515
12516 STRIP_NOPS (arg0);
12517 STRIP_NOPS (arg1);
12518
12519 /* To proceed, MPFR must exactly represent the target floating point
12520 format, which only happens when the target base equals two. */
12521 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12522 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12523 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12524 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12525 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12526 {
12527 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12528 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12529 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12530 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12531
12532 if (do_nonfinite
12533 || (real_isfinite (re0) && real_isfinite (im0)
12534 && real_isfinite (re1) && real_isfinite (im1)))
12535 {
12536 const struct real_format *const fmt =
12537 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12538 const int prec = fmt->p;
12539 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12540 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12541 int inexact;
12542 mpc_t m0, m1;
12543
12544 mpc_init2 (m0, prec);
12545 mpc_init2 (m1, prec);
12546 mpfr_from_real (mpc_realref (m0), re0, rnd);
12547 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12548 mpfr_from_real (mpc_realref (m1), re1, rnd);
12549 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12550 mpfr_clear_flags ();
12551 inexact = func (m0, m0, m1, crnd);
12552 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12553 mpc_clear (m0);
12554 mpc_clear (m1);
12555 }
12556 }
12557
12558 return result;
12559 }
12560
12561 /* A wrapper function for builtin folding that prevents warnings for
12562 "statement without effect" and the like, caused by removing the
12563 call node earlier than the warning is generated. */
12564
12565 tree
12566 fold_call_stmt (gimple stmt, bool ignore)
12567 {
12568 tree ret = NULL_TREE;
12569 tree fndecl = gimple_call_fndecl (stmt);
12570 location_t loc = gimple_location (stmt);
12571 if (fndecl
12572 && TREE_CODE (fndecl) == FUNCTION_DECL
12573 && DECL_BUILT_IN (fndecl)
12574 && !gimple_call_va_arg_pack_p (stmt))
12575 {
12576 int nargs = gimple_call_num_args (stmt);
12577 tree *args = (nargs > 0
12578 ? gimple_call_arg_ptr (stmt, 0)
12579 : &error_mark_node);
12580
12581 if (avoid_folding_inline_builtin (fndecl))
12582 return NULL_TREE;
12583 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12584 {
12585 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12586 }
12587 else
12588 {
12589 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
12590 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12591 if (ret)
12592 {
12593 /* Propagate location information from original call to
12594 expansion of builtin. Otherwise things like
12595 maybe_emit_chk_warning, that operate on the expansion
12596 of a builtin, will use the wrong location information. */
12597 if (gimple_has_location (stmt))
12598 {
12599 tree realret = ret;
12600 if (TREE_CODE (ret) == NOP_EXPR)
12601 realret = TREE_OPERAND (ret, 0);
12602 if (CAN_HAVE_LOCATION_P (realret)
12603 && !EXPR_HAS_LOCATION (realret))
12604 SET_EXPR_LOCATION (realret, loc);
12605 return realret;
12606 }
12607 return ret;
12608 }
12609 }
12610 }
12611 return NULL_TREE;
12612 }
12613
12614 /* Look up the function in builtin_decl that corresponds to DECL
12615 and set ASMSPEC as its user assembler name. DECL must be a
12616 function decl that declares a builtin. */
12617
12618 void
12619 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12620 {
12621 tree builtin;
12622 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12623 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12624 && asmspec != 0);
12625
12626 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12627 set_user_assembler_name (builtin, asmspec);
12628 switch (DECL_FUNCTION_CODE (decl))
12629 {
12630 case BUILT_IN_MEMCPY:
12631 init_block_move_fn (asmspec);
12632 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12633 break;
12634 case BUILT_IN_MEMSET:
12635 init_block_clear_fn (asmspec);
12636 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12637 break;
12638 case BUILT_IN_MEMMOVE:
12639 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12640 break;
12641 case BUILT_IN_MEMCMP:
12642 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12643 break;
12644 case BUILT_IN_ABORT:
12645 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12646 break;
12647 case BUILT_IN_FFS:
12648 if (INT_TYPE_SIZE < BITS_PER_WORD)
12649 {
12650 set_user_assembler_libfunc ("ffs", asmspec);
12651 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12652 MODE_INT, 0), "ffs");
12653 }
12654 break;
12655 default:
12656 break;
12657 }
12658 }
12659
12660 /* Return true if DECL is a builtin that expands to a constant or similarly
12661 simple code. */
12662 bool
12663 is_simple_builtin (tree decl)
12664 {
12665 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12666 switch (DECL_FUNCTION_CODE (decl))
12667 {
12668 /* Builtins that expand to constants. */
12669 case BUILT_IN_CONSTANT_P:
12670 case BUILT_IN_EXPECT:
12671 case BUILT_IN_OBJECT_SIZE:
12672 case BUILT_IN_UNREACHABLE:
12673 /* Simple register moves or loads from stack. */
12674 case BUILT_IN_ASSUME_ALIGNED:
12675 case BUILT_IN_RETURN_ADDRESS:
12676 case BUILT_IN_EXTRACT_RETURN_ADDR:
12677 case BUILT_IN_FROB_RETURN_ADDR:
12678 case BUILT_IN_RETURN:
12679 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12680 case BUILT_IN_FRAME_ADDRESS:
12681 case BUILT_IN_VA_END:
12682 case BUILT_IN_STACK_SAVE:
12683 case BUILT_IN_STACK_RESTORE:
12684 /* Exception state returns or moves registers around. */
12685 case BUILT_IN_EH_FILTER:
12686 case BUILT_IN_EH_POINTER:
12687 case BUILT_IN_EH_COPY_VALUES:
12688 return true;
12689
12690 default:
12691 return false;
12692 }
12693
12694 return false;
12695 }
12696
12697 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12698 most probably expanded inline into reasonably simple code. This is a
12699 superset of is_simple_builtin. */
12700 bool
12701 is_inexpensive_builtin (tree decl)
12702 {
12703 if (!decl)
12704 return false;
12705 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12706 return true;
12707 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12708 switch (DECL_FUNCTION_CODE (decl))
12709 {
12710 case BUILT_IN_ABS:
12711 case BUILT_IN_ALLOCA:
12712 case BUILT_IN_ALLOCA_WITH_ALIGN:
12713 case BUILT_IN_BSWAP16:
12714 case BUILT_IN_BSWAP32:
12715 case BUILT_IN_BSWAP64:
12716 case BUILT_IN_CLZ:
12717 case BUILT_IN_CLZIMAX:
12718 case BUILT_IN_CLZL:
12719 case BUILT_IN_CLZLL:
12720 case BUILT_IN_CTZ:
12721 case BUILT_IN_CTZIMAX:
12722 case BUILT_IN_CTZL:
12723 case BUILT_IN_CTZLL:
12724 case BUILT_IN_FFS:
12725 case BUILT_IN_FFSIMAX:
12726 case BUILT_IN_FFSL:
12727 case BUILT_IN_FFSLL:
12728 case BUILT_IN_IMAXABS:
12729 case BUILT_IN_FINITE:
12730 case BUILT_IN_FINITEF:
12731 case BUILT_IN_FINITEL:
12732 case BUILT_IN_FINITED32:
12733 case BUILT_IN_FINITED64:
12734 case BUILT_IN_FINITED128:
12735 case BUILT_IN_FPCLASSIFY:
12736 case BUILT_IN_ISFINITE:
12737 case BUILT_IN_ISINF_SIGN:
12738 case BUILT_IN_ISINF:
12739 case BUILT_IN_ISINFF:
12740 case BUILT_IN_ISINFL:
12741 case BUILT_IN_ISINFD32:
12742 case BUILT_IN_ISINFD64:
12743 case BUILT_IN_ISINFD128:
12744 case BUILT_IN_ISNAN:
12745 case BUILT_IN_ISNANF:
12746 case BUILT_IN_ISNANL:
12747 case BUILT_IN_ISNAND32:
12748 case BUILT_IN_ISNAND64:
12749 case BUILT_IN_ISNAND128:
12750 case BUILT_IN_ISNORMAL:
12751 case BUILT_IN_ISGREATER:
12752 case BUILT_IN_ISGREATEREQUAL:
12753 case BUILT_IN_ISLESS:
12754 case BUILT_IN_ISLESSEQUAL:
12755 case BUILT_IN_ISLESSGREATER:
12756 case BUILT_IN_ISUNORDERED:
12757 case BUILT_IN_VA_ARG_PACK:
12758 case BUILT_IN_VA_ARG_PACK_LEN:
12759 case BUILT_IN_VA_COPY:
12760 case BUILT_IN_TRAP:
12761 case BUILT_IN_SAVEREGS:
12762 case BUILT_IN_POPCOUNTL:
12763 case BUILT_IN_POPCOUNTLL:
12764 case BUILT_IN_POPCOUNTIMAX:
12765 case BUILT_IN_POPCOUNT:
12766 case BUILT_IN_PARITYL:
12767 case BUILT_IN_PARITYLL:
12768 case BUILT_IN_PARITYIMAX:
12769 case BUILT_IN_PARITY:
12770 case BUILT_IN_LABS:
12771 case BUILT_IN_LLABS:
12772 case BUILT_IN_PREFETCH:
12773 return true;
12774
12775 default:
12776 return is_simple_builtin (decl);
12777 }
12778
12779 return false;
12780 }