always define DYNAMIC_CHAIN_ADDRESS
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "predict.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "rtl.h"
32 #include "alias.h"
33 #include "fold-const.h"
34 #include "stringpool.h"
35 #include "stor-layout.h"
36 #include "calls.h"
37 #include "varasm.h"
38 #include "tree-object-size.h"
39 #include "realmpfr.h"
40 #include "cfgrtl.h"
41 #include "internal-fn.h"
42 #include "flags.h"
43 #include "regs.h"
44 #include "except.h"
45 #include "insn-config.h"
46 #include "expmed.h"
47 #include "dojump.h"
48 #include "explow.h"
49 #include "emit-rtl.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "insn-codes.h"
53 #include "optabs.h"
54 #include "libfuncs.h"
55 #include "recog.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "tm_p.h"
59 #include "target.h"
60 #include "langhooks.h"
61 #include "tree-ssanames.h"
62 #include "tree-dfa.h"
63 #include "value-prof.h"
64 #include "diagnostic-core.h"
65 #include "builtins.h"
66 #include "asan.h"
67 #include "cilk.h"
68 #include "cgraph.h"
69 #include "tree-chkp.h"
70 #include "rtl-chkp.h"
71
72
73 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
74
75 struct target_builtins default_target_builtins;
76 #if SWITCHABLE_TARGET
77 struct target_builtins *this_target_builtins = &default_target_builtins;
78 #endif
79
80 /* Define the names of the builtin function types and codes. */
81 const char *const built_in_class_names[BUILT_IN_LAST]
82 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83
84 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
85 const char * built_in_names[(int) END_BUILTINS] =
86 {
87 #include "builtins.def"
88 };
89 #undef DEF_BUILTIN
90
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
94
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
97
98 static rtx c_readstr (const char *, machine_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static void expand_errno_check (tree, rtx);
112 static rtx expand_builtin_mathfn (tree, rtx, rtx);
113 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
114 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
115 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
116 static rtx expand_builtin_interclass_mathfn (tree, rtx);
117 static rtx expand_builtin_sincos (tree);
118 static rtx expand_builtin_cexpi (tree, rtx);
119 static rtx expand_builtin_int_roundingfn (tree, rtx);
120 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
121 static rtx expand_builtin_next_arg (void);
122 static rtx expand_builtin_va_start (tree);
123 static rtx expand_builtin_va_end (tree);
124 static rtx expand_builtin_va_copy (tree);
125 static rtx expand_builtin_strcmp (tree, rtx);
126 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
127 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
128 static rtx expand_builtin_memcpy (tree, rtx);
129 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
130 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
131 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
132 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
133 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
134 machine_mode, int, tree);
135 static rtx expand_builtin_strcpy (tree, rtx);
136 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
137 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
138 static rtx expand_builtin_strncpy (tree, rtx);
139 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
140 static rtx expand_builtin_memset (tree, rtx, machine_mode);
141 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_alloca (tree, bool);
146 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static tree stabilize_va_list_loc (location_t, tree, int);
149 static rtx expand_builtin_expect (tree, rtx);
150 static tree fold_builtin_constant_p (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (location_t, tree, tree);
153 static tree fold_builtin_inf (location_t, tree, int);
154 static tree fold_builtin_nan (tree, tree, int);
155 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
156 static bool validate_arg (const_tree, enum tree_code code);
157 static bool integer_valued_real_p (tree);
158 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
159 static rtx expand_builtin_fabs (tree, rtx, rtx);
160 static rtx expand_builtin_signbit (tree, rtx);
161 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
163 static tree fold_builtin_cos (location_t, tree, tree, tree);
164 static tree fold_builtin_cosh (location_t, tree, tree, tree);
165 static tree fold_builtin_tan (tree, tree);
166 static tree fold_builtin_trunc (location_t, tree, tree);
167 static tree fold_builtin_floor (location_t, tree, tree);
168 static tree fold_builtin_ceil (location_t, tree, tree);
169 static tree fold_builtin_round (location_t, tree, tree);
170 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
171 static tree fold_builtin_bitop (tree, tree);
172 static tree fold_builtin_strchr (location_t, tree, tree, tree);
173 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
174 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
175 static tree fold_builtin_strcmp (location_t, tree, tree);
176 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
177 static tree fold_builtin_signbit (location_t, tree, tree);
178 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
179 static tree fold_builtin_isascii (location_t, tree);
180 static tree fold_builtin_toascii (location_t, tree);
181 static tree fold_builtin_isdigit (location_t, tree);
182 static tree fold_builtin_fabs (location_t, tree, tree);
183 static tree fold_builtin_abs (location_t, tree, tree);
184 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
185 enum tree_code);
186 static tree fold_builtin_0 (location_t, tree);
187 static tree fold_builtin_1 (location_t, tree, tree);
188 static tree fold_builtin_2 (location_t, tree, tree, tree);
189 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
190 static tree fold_builtin_varargs (location_t, tree, tree*, int);
191
192 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
193 static tree fold_builtin_strstr (location_t, tree, tree, tree);
194 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
195 static tree fold_builtin_strspn (location_t, tree, tree);
196 static tree fold_builtin_strcspn (location_t, tree, tree);
197
198 static rtx expand_builtin_object_size (tree);
199 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
200 enum built_in_function);
201 static void maybe_emit_chk_warning (tree, enum built_in_function);
202 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
203 static void maybe_emit_free_warning (tree);
204 static tree fold_builtin_object_size (tree, tree);
205
206 unsigned HOST_WIDE_INT target_newline;
207 unsigned HOST_WIDE_INT target_percent;
208 static unsigned HOST_WIDE_INT target_c;
209 static unsigned HOST_WIDE_INT target_s;
210 char target_percent_c[3];
211 char target_percent_s[3];
212 char target_percent_s_newline[4];
213 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
214 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
215 static tree do_mpfr_arg2 (tree, tree, tree,
216 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
217 static tree do_mpfr_arg3 (tree, tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_sincos (tree, tree, tree);
220 static tree do_mpfr_bessel_n (tree, tree, tree,
221 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
222 const REAL_VALUE_TYPE *, bool);
223 static tree do_mpfr_remquo (tree, tree, tree);
224 static tree do_mpfr_lgamma_r (tree, tree, tree);
225 static void expand_builtin_sync_synchronize (void);
226
227 /* Return true if NAME starts with __builtin_ or __sync_. */
228
229 static bool
230 is_builtin_name (const char *name)
231 {
232 if (strncmp (name, "__builtin_", 10) == 0)
233 return true;
234 if (strncmp (name, "__sync_", 7) == 0)
235 return true;
236 if (strncmp (name, "__atomic_", 9) == 0)
237 return true;
238 if (flag_cilkplus
239 && (!strcmp (name, "__cilkrts_detach")
240 || !strcmp (name, "__cilkrts_pop_frame")))
241 return true;
242 return false;
243 }
244
245
246 /* Return true if DECL is a function symbol representing a built-in. */
247
248 bool
249 is_builtin_fn (tree decl)
250 {
251 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
252 }
253
254 /* Return true if NODE should be considered for inline expansion regardless
255 of the optimization level. This means whenever a function is invoked with
256 its "internal" name, which normally contains the prefix "__builtin". */
257
258 static bool
259 called_as_built_in (tree node)
260 {
261 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
262 we want the name used to call the function, not the name it
263 will have. */
264 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
265 return is_builtin_name (name);
266 }
267
268 /* Compute values M and N such that M divides (address of EXP - N) and such
269 that N < M. If these numbers can be determined, store M in alignp and N in
270 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
271 *alignp and any bit-offset to *bitposp.
272
273 Note that the address (and thus the alignment) computed here is based
274 on the address to which a symbol resolves, whereas DECL_ALIGN is based
275 on the address at which an object is actually located. These two
276 addresses are not always the same. For example, on ARM targets,
277 the address &foo of a Thumb function foo() has the lowest bit set,
278 whereas foo() itself starts on an even address.
279
280 If ADDR_P is true we are taking the address of the memory reference EXP
281 and thus cannot rely on the access taking place. */
282
283 static bool
284 get_object_alignment_2 (tree exp, unsigned int *alignp,
285 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
286 {
287 HOST_WIDE_INT bitsize, bitpos;
288 tree offset;
289 machine_mode mode;
290 int unsignedp, volatilep;
291 unsigned int align = BITS_PER_UNIT;
292 bool known_alignment = false;
293
294 /* Get the innermost object and the constant (bitpos) and possibly
295 variable (offset) offset of the access. */
296 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
297 &mode, &unsignedp, &volatilep, true);
298
299 /* Extract alignment information from the innermost object and
300 possibly adjust bitpos and offset. */
301 if (TREE_CODE (exp) == FUNCTION_DECL)
302 {
303 /* Function addresses can encode extra information besides their
304 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
305 allows the low bit to be used as a virtual bit, we know
306 that the address itself must be at least 2-byte aligned. */
307 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
308 align = 2 * BITS_PER_UNIT;
309 }
310 else if (TREE_CODE (exp) == LABEL_DECL)
311 ;
312 else if (TREE_CODE (exp) == CONST_DECL)
313 {
314 /* The alignment of a CONST_DECL is determined by its initializer. */
315 exp = DECL_INITIAL (exp);
316 align = TYPE_ALIGN (TREE_TYPE (exp));
317 if (CONSTANT_CLASS_P (exp))
318 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
319
320 known_alignment = true;
321 }
322 else if (DECL_P (exp))
323 {
324 align = DECL_ALIGN (exp);
325 known_alignment = true;
326 }
327 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
328 {
329 align = TYPE_ALIGN (TREE_TYPE (exp));
330 }
331 else if (TREE_CODE (exp) == INDIRECT_REF
332 || TREE_CODE (exp) == MEM_REF
333 || TREE_CODE (exp) == TARGET_MEM_REF)
334 {
335 tree addr = TREE_OPERAND (exp, 0);
336 unsigned ptr_align;
337 unsigned HOST_WIDE_INT ptr_bitpos;
338 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
339
340 /* If the address is explicitely aligned, handle that. */
341 if (TREE_CODE (addr) == BIT_AND_EXPR
342 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
343 {
344 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
345 ptr_bitmask *= BITS_PER_UNIT;
346 align = ptr_bitmask & -ptr_bitmask;
347 addr = TREE_OPERAND (addr, 0);
348 }
349
350 known_alignment
351 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
352 align = MAX (ptr_align, align);
353
354 /* Re-apply explicit alignment to the bitpos. */
355 ptr_bitpos &= ptr_bitmask;
356
357 /* The alignment of the pointer operand in a TARGET_MEM_REF
358 has to take the variable offset parts into account. */
359 if (TREE_CODE (exp) == TARGET_MEM_REF)
360 {
361 if (TMR_INDEX (exp))
362 {
363 unsigned HOST_WIDE_INT step = 1;
364 if (TMR_STEP (exp))
365 step = TREE_INT_CST_LOW (TMR_STEP (exp));
366 align = MIN (align, (step & -step) * BITS_PER_UNIT);
367 }
368 if (TMR_INDEX2 (exp))
369 align = BITS_PER_UNIT;
370 known_alignment = false;
371 }
372
373 /* When EXP is an actual memory reference then we can use
374 TYPE_ALIGN of a pointer indirection to derive alignment.
375 Do so only if get_pointer_alignment_1 did not reveal absolute
376 alignment knowledge and if using that alignment would
377 improve the situation. */
378 if (!addr_p && !known_alignment
379 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
380 align = TYPE_ALIGN (TREE_TYPE (exp));
381 else
382 {
383 /* Else adjust bitpos accordingly. */
384 bitpos += ptr_bitpos;
385 if (TREE_CODE (exp) == MEM_REF
386 || TREE_CODE (exp) == TARGET_MEM_REF)
387 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
388 }
389 }
390 else if (TREE_CODE (exp) == STRING_CST)
391 {
392 /* STRING_CST are the only constant objects we allow to be not
393 wrapped inside a CONST_DECL. */
394 align = TYPE_ALIGN (TREE_TYPE (exp));
395 if (CONSTANT_CLASS_P (exp))
396 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
397
398 known_alignment = true;
399 }
400
401 /* If there is a non-constant offset part extract the maximum
402 alignment that can prevail. */
403 if (offset)
404 {
405 unsigned int trailing_zeros = tree_ctz (offset);
406 if (trailing_zeros < HOST_BITS_PER_INT)
407 {
408 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
409 if (inner)
410 align = MIN (align, inner);
411 }
412 }
413
414 *alignp = align;
415 *bitposp = bitpos & (*alignp - 1);
416 return known_alignment;
417 }
418
419 /* For a memory reference expression EXP compute values M and N such that M
420 divides (&EXP - N) and such that N < M. If these numbers can be determined,
421 store M in alignp and N in *BITPOSP and return true. Otherwise return false
422 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
423
424 bool
425 get_object_alignment_1 (tree exp, unsigned int *alignp,
426 unsigned HOST_WIDE_INT *bitposp)
427 {
428 return get_object_alignment_2 (exp, alignp, bitposp, false);
429 }
430
431 /* Return the alignment in bits of EXP, an object. */
432
433 unsigned int
434 get_object_alignment (tree exp)
435 {
436 unsigned HOST_WIDE_INT bitpos = 0;
437 unsigned int align;
438
439 get_object_alignment_1 (exp, &align, &bitpos);
440
441 /* align and bitpos now specify known low bits of the pointer.
442 ptr & (align - 1) == bitpos. */
443
444 if (bitpos != 0)
445 align = (bitpos & -bitpos);
446 return align;
447 }
448
449 /* For a pointer valued expression EXP compute values M and N such that M
450 divides (EXP - N) and such that N < M. If these numbers can be determined,
451 store M in alignp and N in *BITPOSP and return true. Return false if
452 the results are just a conservative approximation.
453
454 If EXP is not a pointer, false is returned too. */
455
456 bool
457 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
458 unsigned HOST_WIDE_INT *bitposp)
459 {
460 STRIP_NOPS (exp);
461
462 if (TREE_CODE (exp) == ADDR_EXPR)
463 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
464 alignp, bitposp, true);
465 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
466 {
467 unsigned int align;
468 unsigned HOST_WIDE_INT bitpos;
469 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
470 &align, &bitpos);
471 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
472 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
473 else
474 {
475 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
476 if (trailing_zeros < HOST_BITS_PER_INT)
477 {
478 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
479 if (inner)
480 align = MIN (align, inner);
481 }
482 }
483 *alignp = align;
484 *bitposp = bitpos & (align - 1);
485 return res;
486 }
487 else if (TREE_CODE (exp) == SSA_NAME
488 && POINTER_TYPE_P (TREE_TYPE (exp)))
489 {
490 unsigned int ptr_align, ptr_misalign;
491 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
492
493 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
494 {
495 *bitposp = ptr_misalign * BITS_PER_UNIT;
496 *alignp = ptr_align * BITS_PER_UNIT;
497 /* We cannot really tell whether this result is an approximation. */
498 return true;
499 }
500 else
501 {
502 *bitposp = 0;
503 *alignp = BITS_PER_UNIT;
504 return false;
505 }
506 }
507 else if (TREE_CODE (exp) == INTEGER_CST)
508 {
509 *alignp = BIGGEST_ALIGNMENT;
510 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
511 & (BIGGEST_ALIGNMENT - 1));
512 return true;
513 }
514
515 *bitposp = 0;
516 *alignp = BITS_PER_UNIT;
517 return false;
518 }
519
520 /* Return the alignment in bits of EXP, a pointer valued expression.
521 The alignment returned is, by default, the alignment of the thing that
522 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
523
524 Otherwise, look at the expression to see if we can do better, i.e., if the
525 expression is actually pointing at an object whose alignment is tighter. */
526
527 unsigned int
528 get_pointer_alignment (tree exp)
529 {
530 unsigned HOST_WIDE_INT bitpos = 0;
531 unsigned int align;
532
533 get_pointer_alignment_1 (exp, &align, &bitpos);
534
535 /* align and bitpos now specify known low bits of the pointer.
536 ptr & (align - 1) == bitpos. */
537
538 if (bitpos != 0)
539 align = (bitpos & -bitpos);
540
541 return align;
542 }
543
544 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
545 way, because it could contain a zero byte in the middle.
546 TREE_STRING_LENGTH is the size of the character array, not the string.
547
548 ONLY_VALUE should be nonzero if the result is not going to be emitted
549 into the instruction stream and zero if it is going to be expanded.
550 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
551 is returned, otherwise NULL, since
552 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
553 evaluate the side-effects.
554
555 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
556 accesses. Note that this implies the result is not going to be emitted
557 into the instruction stream.
558
559 The value returned is of type `ssizetype'.
560
561 Unfortunately, string_constant can't access the values of const char
562 arrays with initializers, so neither can we do so here. */
563
564 tree
565 c_strlen (tree src, int only_value)
566 {
567 tree offset_node;
568 HOST_WIDE_INT offset;
569 int max;
570 const char *ptr;
571 location_t loc;
572
573 STRIP_NOPS (src);
574 if (TREE_CODE (src) == COND_EXPR
575 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
576 {
577 tree len1, len2;
578
579 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
580 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
581 if (tree_int_cst_equal (len1, len2))
582 return len1;
583 }
584
585 if (TREE_CODE (src) == COMPOUND_EXPR
586 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
587 return c_strlen (TREE_OPERAND (src, 1), only_value);
588
589 loc = EXPR_LOC_OR_LOC (src, input_location);
590
591 src = string_constant (src, &offset_node);
592 if (src == 0)
593 return NULL_TREE;
594
595 max = TREE_STRING_LENGTH (src) - 1;
596 ptr = TREE_STRING_POINTER (src);
597
598 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
599 {
600 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
601 compute the offset to the following null if we don't know where to
602 start searching for it. */
603 int i;
604
605 for (i = 0; i < max; i++)
606 if (ptr[i] == 0)
607 return NULL_TREE;
608
609 /* We don't know the starting offset, but we do know that the string
610 has no internal zero bytes. We can assume that the offset falls
611 within the bounds of the string; otherwise, the programmer deserves
612 what he gets. Subtract the offset from the length of the string,
613 and return that. This would perhaps not be valid if we were dealing
614 with named arrays in addition to literal string constants. */
615
616 return size_diffop_loc (loc, size_int (max), offset_node);
617 }
618
619 /* We have a known offset into the string. Start searching there for
620 a null character if we can represent it as a single HOST_WIDE_INT. */
621 if (offset_node == 0)
622 offset = 0;
623 else if (! tree_fits_shwi_p (offset_node))
624 offset = -1;
625 else
626 offset = tree_to_shwi (offset_node);
627
628 /* If the offset is known to be out of bounds, warn, and call strlen at
629 runtime. */
630 if (offset < 0 || offset > max)
631 {
632 /* Suppress multiple warnings for propagated constant strings. */
633 if (only_value != 2
634 && !TREE_NO_WARNING (src))
635 {
636 warning_at (loc, 0, "offset outside bounds of constant string");
637 TREE_NO_WARNING (src) = 1;
638 }
639 return NULL_TREE;
640 }
641
642 /* Use strlen to search for the first zero byte. Since any strings
643 constructed with build_string will have nulls appended, we win even
644 if we get handed something like (char[4])"abcd".
645
646 Since OFFSET is our starting index into the string, no further
647 calculation is needed. */
648 return ssize_int (strlen (ptr + offset));
649 }
650
651 /* Return a char pointer for a C string if it is a string constant
652 or sum of string constant and integer constant. */
653
654 const char *
655 c_getstr (tree src)
656 {
657 tree offset_node;
658
659 src = string_constant (src, &offset_node);
660 if (src == 0)
661 return 0;
662
663 if (offset_node == 0)
664 return TREE_STRING_POINTER (src);
665 else if (!tree_fits_uhwi_p (offset_node)
666 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
667 return 0;
668
669 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
670 }
671
672 /* Return a constant integer corresponding to target reading
673 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
674
675 static rtx
676 c_readstr (const char *str, machine_mode mode)
677 {
678 HOST_WIDE_INT ch;
679 unsigned int i, j;
680 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
681
682 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
683 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
684 / HOST_BITS_PER_WIDE_INT;
685
686 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
687 for (i = 0; i < len; i++)
688 tmp[i] = 0;
689
690 ch = 1;
691 for (i = 0; i < GET_MODE_SIZE (mode); i++)
692 {
693 j = i;
694 if (WORDS_BIG_ENDIAN)
695 j = GET_MODE_SIZE (mode) - i - 1;
696 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
697 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
698 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
699 j *= BITS_PER_UNIT;
700
701 if (ch)
702 ch = (unsigned char) str[i];
703 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
704 }
705
706 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
707 return immed_wide_int_const (c, mode);
708 }
709
710 /* Cast a target constant CST to target CHAR and if that value fits into
711 host char type, return zero and put that value into variable pointed to by
712 P. */
713
714 static int
715 target_char_cast (tree cst, char *p)
716 {
717 unsigned HOST_WIDE_INT val, hostval;
718
719 if (TREE_CODE (cst) != INTEGER_CST
720 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
721 return 1;
722
723 /* Do not care if it fits or not right here. */
724 val = TREE_INT_CST_LOW (cst);
725
726 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
727 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
728
729 hostval = val;
730 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
731 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
732
733 if (val != hostval)
734 return 1;
735
736 *p = hostval;
737 return 0;
738 }
739
740 /* Similar to save_expr, but assumes that arbitrary code is not executed
741 in between the multiple evaluations. In particular, we assume that a
742 non-addressable local variable will not be modified. */
743
744 static tree
745 builtin_save_expr (tree exp)
746 {
747 if (TREE_CODE (exp) == SSA_NAME
748 || (TREE_ADDRESSABLE (exp) == 0
749 && (TREE_CODE (exp) == PARM_DECL
750 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
751 return exp;
752
753 return save_expr (exp);
754 }
755
756 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
757 times to get the address of either a higher stack frame, or a return
758 address located within it (depending on FNDECL_CODE). */
759
760 static rtx
761 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
762 {
763 int i;
764 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
765 if (tem == NULL_RTX)
766 {
767 /* For a zero count with __builtin_return_address, we don't care what
768 frame address we return, because target-specific definitions will
769 override us. Therefore frame pointer elimination is OK, and using
770 the soft frame pointer is OK.
771
772 For a nonzero count, or a zero count with __builtin_frame_address,
773 we require a stable offset from the current frame pointer to the
774 previous one, so we must use the hard frame pointer, and
775 we must disable frame pointer elimination. */
776 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
777 tem = frame_pointer_rtx;
778 else
779 {
780 tem = hard_frame_pointer_rtx;
781
782 /* Tell reload not to eliminate the frame pointer. */
783 crtl->accesses_prior_frames = 1;
784 }
785 }
786
787 if (count > 0)
788 SETUP_FRAME_ADDRESSES ();
789
790 /* On the SPARC, the return address is not in the frame, it is in a
791 register. There is no way to access it off of the current frame
792 pointer, but it can be accessed off the previous frame pointer by
793 reading the value from the register window save area. */
794 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
795 count--;
796
797 /* Scan back COUNT frames to the specified frame. */
798 for (i = 0; i < count; i++)
799 {
800 /* Assume the dynamic chain pointer is in the word that the
801 frame address points to, unless otherwise specified. */
802 tem = DYNAMIC_CHAIN_ADDRESS (tem);
803 tem = memory_address (Pmode, tem);
804 tem = gen_frame_mem (Pmode, tem);
805 tem = copy_to_reg (tem);
806 }
807
808 /* For __builtin_frame_address, return what we've got. But, on
809 the SPARC for example, we may have to add a bias. */
810 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
811 #ifdef FRAME_ADDR_RTX
812 return FRAME_ADDR_RTX (tem);
813 #else
814 return tem;
815 #endif
816
817 /* For __builtin_return_address, get the return address from that frame. */
818 #ifdef RETURN_ADDR_RTX
819 tem = RETURN_ADDR_RTX (count, tem);
820 #else
821 tem = memory_address (Pmode,
822 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
823 tem = gen_frame_mem (Pmode, tem);
824 #endif
825 return tem;
826 }
827
828 /* Alias set used for setjmp buffer. */
829 static alias_set_type setjmp_alias_set = -1;
830
831 /* Construct the leading half of a __builtin_setjmp call. Control will
832 return to RECEIVER_LABEL. This is also called directly by the SJLJ
833 exception handling code. */
834
835 void
836 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
837 {
838 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
839 rtx stack_save;
840 rtx mem;
841
842 if (setjmp_alias_set == -1)
843 setjmp_alias_set = new_alias_set ();
844
845 buf_addr = convert_memory_address (Pmode, buf_addr);
846
847 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
848
849 /* We store the frame pointer and the address of receiver_label in
850 the buffer and use the rest of it for the stack save area, which
851 is machine-dependent. */
852
853 mem = gen_rtx_MEM (Pmode, buf_addr);
854 set_mem_alias_set (mem, setjmp_alias_set);
855 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
856
857 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
858 GET_MODE_SIZE (Pmode))),
859 set_mem_alias_set (mem, setjmp_alias_set);
860
861 emit_move_insn (validize_mem (mem),
862 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
863
864 stack_save = gen_rtx_MEM (sa_mode,
865 plus_constant (Pmode, buf_addr,
866 2 * GET_MODE_SIZE (Pmode)));
867 set_mem_alias_set (stack_save, setjmp_alias_set);
868 emit_stack_save (SAVE_NONLOCAL, &stack_save);
869
870 /* If there is further processing to do, do it. */
871 if (targetm.have_builtin_setjmp_setup ())
872 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
873
874 /* We have a nonlocal label. */
875 cfun->has_nonlocal_label = 1;
876 }
877
878 /* Construct the trailing part of a __builtin_setjmp call. This is
879 also called directly by the SJLJ exception handling code.
880 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
881
882 void
883 expand_builtin_setjmp_receiver (rtx receiver_label)
884 {
885 rtx chain;
886
887 /* Mark the FP as used when we get here, so we have to make sure it's
888 marked as used by this function. */
889 emit_use (hard_frame_pointer_rtx);
890
891 /* Mark the static chain as clobbered here so life information
892 doesn't get messed up for it. */
893 chain = targetm.calls.static_chain (current_function_decl, true);
894 if (chain && REG_P (chain))
895 emit_clobber (chain);
896
897 /* Now put in the code to restore the frame pointer, and argument
898 pointer, if needed. */
899 if (! targetm.have_nonlocal_goto ())
900 {
901 /* First adjust our frame pointer to its actual value. It was
902 previously set to the start of the virtual area corresponding to
903 the stacked variables when we branched here and now needs to be
904 adjusted to the actual hardware fp value.
905
906 Assignments to virtual registers are converted by
907 instantiate_virtual_regs into the corresponding assignment
908 to the underlying register (fp in this case) that makes
909 the original assignment true.
910 So the following insn will actually be decrementing fp by
911 STARTING_FRAME_OFFSET. */
912 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
913
914 /* Restoring the frame pointer also modifies the hard frame pointer.
915 Mark it used (so that the previous assignment remains live once
916 the frame pointer is eliminated) and clobbered (to represent the
917 implicit update from the assignment). */
918 emit_use (hard_frame_pointer_rtx);
919 emit_clobber (hard_frame_pointer_rtx);
920 }
921
922 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
923 if (fixed_regs[ARG_POINTER_REGNUM])
924 {
925 #ifdef ELIMINABLE_REGS
926 /* If the argument pointer can be eliminated in favor of the
927 frame pointer, we don't need to restore it. We assume here
928 that if such an elimination is present, it can always be used.
929 This is the case on all known machines; if we don't make this
930 assumption, we do unnecessary saving on many machines. */
931 size_t i;
932 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
933
934 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
935 if (elim_regs[i].from == ARG_POINTER_REGNUM
936 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
937 break;
938
939 if (i == ARRAY_SIZE (elim_regs))
940 #endif
941 {
942 /* Now restore our arg pointer from the address at which it
943 was saved in our stack frame. */
944 emit_move_insn (crtl->args.internal_arg_pointer,
945 copy_to_reg (get_arg_pointer_save_area ()));
946 }
947 }
948 #endif
949
950 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
951 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
952 else if (targetm.have_nonlocal_goto_receiver ())
953 emit_insn (targetm.gen_nonlocal_goto_receiver ());
954 else
955 { /* Nothing */ }
956
957 /* We must not allow the code we just generated to be reordered by
958 scheduling. Specifically, the update of the frame pointer must
959 happen immediately, not later. */
960 emit_insn (gen_blockage ());
961 }
962
963 /* __builtin_longjmp is passed a pointer to an array of five words (not
964 all will be used on all machines). It operates similarly to the C
965 library function of the same name, but is more efficient. Much of
966 the code below is copied from the handling of non-local gotos. */
967
968 static void
969 expand_builtin_longjmp (rtx buf_addr, rtx value)
970 {
971 rtx fp, lab, stack;
972 rtx_insn *insn, *last;
973 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
974
975 /* DRAP is needed for stack realign if longjmp is expanded to current
976 function */
977 if (SUPPORTS_STACK_ALIGNMENT)
978 crtl->need_drap = true;
979
980 if (setjmp_alias_set == -1)
981 setjmp_alias_set = new_alias_set ();
982
983 buf_addr = convert_memory_address (Pmode, buf_addr);
984
985 buf_addr = force_reg (Pmode, buf_addr);
986
987 /* We require that the user must pass a second argument of 1, because
988 that is what builtin_setjmp will return. */
989 gcc_assert (value == const1_rtx);
990
991 last = get_last_insn ();
992 if (targetm.have_builtin_longjmp ())
993 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
994 else
995 {
996 fp = gen_rtx_MEM (Pmode, buf_addr);
997 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
998 GET_MODE_SIZE (Pmode)));
999
1000 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1001 2 * GET_MODE_SIZE (Pmode)));
1002 set_mem_alias_set (fp, setjmp_alias_set);
1003 set_mem_alias_set (lab, setjmp_alias_set);
1004 set_mem_alias_set (stack, setjmp_alias_set);
1005
1006 /* Pick up FP, label, and SP from the block and jump. This code is
1007 from expand_goto in stmt.c; see there for detailed comments. */
1008 if (targetm.have_nonlocal_goto ())
1009 /* We have to pass a value to the nonlocal_goto pattern that will
1010 get copied into the static_chain pointer, but it does not matter
1011 what that value is, because builtin_setjmp does not use it. */
1012 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1013 else
1014 {
1015 lab = copy_to_reg (lab);
1016
1017 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1018 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1019
1020 emit_move_insn (hard_frame_pointer_rtx, fp);
1021 emit_stack_restore (SAVE_NONLOCAL, stack);
1022
1023 emit_use (hard_frame_pointer_rtx);
1024 emit_use (stack_pointer_rtx);
1025 emit_indirect_jump (lab);
1026 }
1027 }
1028
1029 /* Search backwards and mark the jump insn as a non-local goto.
1030 Note that this precludes the use of __builtin_longjmp to a
1031 __builtin_setjmp target in the same function. However, we've
1032 already cautioned the user that these functions are for
1033 internal exception handling use only. */
1034 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1035 {
1036 gcc_assert (insn != last);
1037
1038 if (JUMP_P (insn))
1039 {
1040 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1041 break;
1042 }
1043 else if (CALL_P (insn))
1044 break;
1045 }
1046 }
1047
1048 static inline bool
1049 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1050 {
1051 return (iter->i < iter->n);
1052 }
1053
1054 /* This function validates the types of a function call argument list
1055 against a specified list of tree_codes. If the last specifier is a 0,
1056 that represents an ellipses, otherwise the last specifier must be a
1057 VOID_TYPE. */
1058
1059 static bool
1060 validate_arglist (const_tree callexpr, ...)
1061 {
1062 enum tree_code code;
1063 bool res = 0;
1064 va_list ap;
1065 const_call_expr_arg_iterator iter;
1066 const_tree arg;
1067
1068 va_start (ap, callexpr);
1069 init_const_call_expr_arg_iterator (callexpr, &iter);
1070
1071 do
1072 {
1073 code = (enum tree_code) va_arg (ap, int);
1074 switch (code)
1075 {
1076 case 0:
1077 /* This signifies an ellipses, any further arguments are all ok. */
1078 res = true;
1079 goto end;
1080 case VOID_TYPE:
1081 /* This signifies an endlink, if no arguments remain, return
1082 true, otherwise return false. */
1083 res = !more_const_call_expr_args_p (&iter);
1084 goto end;
1085 default:
1086 /* If no parameters remain or the parameter's code does not
1087 match the specified code, return false. Otherwise continue
1088 checking any remaining arguments. */
1089 arg = next_const_call_expr_arg (&iter);
1090 if (!validate_arg (arg, code))
1091 goto end;
1092 break;
1093 }
1094 }
1095 while (1);
1096
1097 /* We need gotos here since we can only have one VA_CLOSE in a
1098 function. */
1099 end: ;
1100 va_end (ap);
1101
1102 return res;
1103 }
1104
1105 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1106 and the address of the save area. */
1107
1108 static rtx
1109 expand_builtin_nonlocal_goto (tree exp)
1110 {
1111 tree t_label, t_save_area;
1112 rtx r_label, r_save_area, r_fp, r_sp;
1113 rtx_insn *insn;
1114
1115 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1116 return NULL_RTX;
1117
1118 t_label = CALL_EXPR_ARG (exp, 0);
1119 t_save_area = CALL_EXPR_ARG (exp, 1);
1120
1121 r_label = expand_normal (t_label);
1122 r_label = convert_memory_address (Pmode, r_label);
1123 r_save_area = expand_normal (t_save_area);
1124 r_save_area = convert_memory_address (Pmode, r_save_area);
1125 /* Copy the address of the save location to a register just in case it was
1126 based on the frame pointer. */
1127 r_save_area = copy_to_reg (r_save_area);
1128 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1129 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1130 plus_constant (Pmode, r_save_area,
1131 GET_MODE_SIZE (Pmode)));
1132
1133 crtl->has_nonlocal_goto = 1;
1134
1135 /* ??? We no longer need to pass the static chain value, afaik. */
1136 if (targetm.have_nonlocal_goto ())
1137 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1138 else
1139 {
1140 r_label = copy_to_reg (r_label);
1141
1142 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1143 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1144
1145 /* Restore frame pointer for containing function. */
1146 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1147 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1148
1149 /* USE of hard_frame_pointer_rtx added for consistency;
1150 not clear if really needed. */
1151 emit_use (hard_frame_pointer_rtx);
1152 emit_use (stack_pointer_rtx);
1153
1154 /* If the architecture is using a GP register, we must
1155 conservatively assume that the target function makes use of it.
1156 The prologue of functions with nonlocal gotos must therefore
1157 initialize the GP register to the appropriate value, and we
1158 must then make sure that this value is live at the point
1159 of the jump. (Note that this doesn't necessarily apply
1160 to targets with a nonlocal_goto pattern; they are free
1161 to implement it in their own way. Note also that this is
1162 a no-op if the GP register is a global invariant.) */
1163 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1164 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1165 emit_use (pic_offset_table_rtx);
1166
1167 emit_indirect_jump (r_label);
1168 }
1169
1170 /* Search backwards to the jump insn and mark it as a
1171 non-local goto. */
1172 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1173 {
1174 if (JUMP_P (insn))
1175 {
1176 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1177 break;
1178 }
1179 else if (CALL_P (insn))
1180 break;
1181 }
1182
1183 return const0_rtx;
1184 }
1185
1186 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1187 (not all will be used on all machines) that was passed to __builtin_setjmp.
1188 It updates the stack pointer in that block to the current value. This is
1189 also called directly by the SJLJ exception handling code. */
1190
1191 void
1192 expand_builtin_update_setjmp_buf (rtx buf_addr)
1193 {
1194 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1195 rtx stack_save
1196 = gen_rtx_MEM (sa_mode,
1197 memory_address
1198 (sa_mode,
1199 plus_constant (Pmode, buf_addr,
1200 2 * GET_MODE_SIZE (Pmode))));
1201
1202 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1203 }
1204
1205 /* Expand a call to __builtin_prefetch. For a target that does not support
1206 data prefetch, evaluate the memory address argument in case it has side
1207 effects. */
1208
1209 static void
1210 expand_builtin_prefetch (tree exp)
1211 {
1212 tree arg0, arg1, arg2;
1213 int nargs;
1214 rtx op0, op1, op2;
1215
1216 if (!validate_arglist (exp, POINTER_TYPE, 0))
1217 return;
1218
1219 arg0 = CALL_EXPR_ARG (exp, 0);
1220
1221 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1222 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1223 locality). */
1224 nargs = call_expr_nargs (exp);
1225 if (nargs > 1)
1226 arg1 = CALL_EXPR_ARG (exp, 1);
1227 else
1228 arg1 = integer_zero_node;
1229 if (nargs > 2)
1230 arg2 = CALL_EXPR_ARG (exp, 2);
1231 else
1232 arg2 = integer_three_node;
1233
1234 /* Argument 0 is an address. */
1235 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1236
1237 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1238 if (TREE_CODE (arg1) != INTEGER_CST)
1239 {
1240 error ("second argument to %<__builtin_prefetch%> must be a constant");
1241 arg1 = integer_zero_node;
1242 }
1243 op1 = expand_normal (arg1);
1244 /* Argument 1 must be either zero or one. */
1245 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1246 {
1247 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1248 " using zero");
1249 op1 = const0_rtx;
1250 }
1251
1252 /* Argument 2 (locality) must be a compile-time constant int. */
1253 if (TREE_CODE (arg2) != INTEGER_CST)
1254 {
1255 error ("third argument to %<__builtin_prefetch%> must be a constant");
1256 arg2 = integer_zero_node;
1257 }
1258 op2 = expand_normal (arg2);
1259 /* Argument 2 must be 0, 1, 2, or 3. */
1260 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1261 {
1262 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1263 op2 = const0_rtx;
1264 }
1265
1266 if (targetm.have_prefetch ())
1267 {
1268 struct expand_operand ops[3];
1269
1270 create_address_operand (&ops[0], op0);
1271 create_integer_operand (&ops[1], INTVAL (op1));
1272 create_integer_operand (&ops[2], INTVAL (op2));
1273 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1274 return;
1275 }
1276
1277 /* Don't do anything with direct references to volatile memory, but
1278 generate code to handle other side effects. */
1279 if (!MEM_P (op0) && side_effects_p (op0))
1280 emit_insn (op0);
1281 }
1282
1283 /* Get a MEM rtx for expression EXP which is the address of an operand
1284 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1285 the maximum length of the block of memory that might be accessed or
1286 NULL if unknown. */
1287
1288 static rtx
1289 get_memory_rtx (tree exp, tree len)
1290 {
1291 tree orig_exp = exp;
1292 rtx addr, mem;
1293
1294 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1295 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1296 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1297 exp = TREE_OPERAND (exp, 0);
1298
1299 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1300 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1301
1302 /* Get an expression we can use to find the attributes to assign to MEM.
1303 First remove any nops. */
1304 while (CONVERT_EXPR_P (exp)
1305 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1306 exp = TREE_OPERAND (exp, 0);
1307
1308 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1309 (as builtin stringops may alias with anything). */
1310 exp = fold_build2 (MEM_REF,
1311 build_array_type (char_type_node,
1312 build_range_type (sizetype,
1313 size_one_node, len)),
1314 exp, build_int_cst (ptr_type_node, 0));
1315
1316 /* If the MEM_REF has no acceptable address, try to get the base object
1317 from the original address we got, and build an all-aliasing
1318 unknown-sized access to that one. */
1319 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1320 set_mem_attributes (mem, exp, 0);
1321 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1322 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1323 0))))
1324 {
1325 exp = build_fold_addr_expr (exp);
1326 exp = fold_build2 (MEM_REF,
1327 build_array_type (char_type_node,
1328 build_range_type (sizetype,
1329 size_zero_node,
1330 NULL)),
1331 exp, build_int_cst (ptr_type_node, 0));
1332 set_mem_attributes (mem, exp, 0);
1333 }
1334 set_mem_alias_set (mem, 0);
1335 return mem;
1336 }
1337 \f
1338 /* Built-in functions to perform an untyped call and return. */
1339
1340 #define apply_args_mode \
1341 (this_target_builtins->x_apply_args_mode)
1342 #define apply_result_mode \
1343 (this_target_builtins->x_apply_result_mode)
1344
1345 /* Return the size required for the block returned by __builtin_apply_args,
1346 and initialize apply_args_mode. */
1347
1348 static int
1349 apply_args_size (void)
1350 {
1351 static int size = -1;
1352 int align;
1353 unsigned int regno;
1354 machine_mode mode;
1355
1356 /* The values computed by this function never change. */
1357 if (size < 0)
1358 {
1359 /* The first value is the incoming arg-pointer. */
1360 size = GET_MODE_SIZE (Pmode);
1361
1362 /* The second value is the structure value address unless this is
1363 passed as an "invisible" first argument. */
1364 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1365 size += GET_MODE_SIZE (Pmode);
1366
1367 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1368 if (FUNCTION_ARG_REGNO_P (regno))
1369 {
1370 mode = targetm.calls.get_raw_arg_mode (regno);
1371
1372 gcc_assert (mode != VOIDmode);
1373
1374 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1375 if (size % align != 0)
1376 size = CEIL (size, align) * align;
1377 size += GET_MODE_SIZE (mode);
1378 apply_args_mode[regno] = mode;
1379 }
1380 else
1381 {
1382 apply_args_mode[regno] = VOIDmode;
1383 }
1384 }
1385 return size;
1386 }
1387
1388 /* Return the size required for the block returned by __builtin_apply,
1389 and initialize apply_result_mode. */
1390
1391 static int
1392 apply_result_size (void)
1393 {
1394 static int size = -1;
1395 int align, regno;
1396 machine_mode mode;
1397
1398 /* The values computed by this function never change. */
1399 if (size < 0)
1400 {
1401 size = 0;
1402
1403 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1404 if (targetm.calls.function_value_regno_p (regno))
1405 {
1406 mode = targetm.calls.get_raw_result_mode (regno);
1407
1408 gcc_assert (mode != VOIDmode);
1409
1410 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1411 if (size % align != 0)
1412 size = CEIL (size, align) * align;
1413 size += GET_MODE_SIZE (mode);
1414 apply_result_mode[regno] = mode;
1415 }
1416 else
1417 apply_result_mode[regno] = VOIDmode;
1418
1419 /* Allow targets that use untyped_call and untyped_return to override
1420 the size so that machine-specific information can be stored here. */
1421 #ifdef APPLY_RESULT_SIZE
1422 size = APPLY_RESULT_SIZE;
1423 #endif
1424 }
1425 return size;
1426 }
1427
1428 /* Create a vector describing the result block RESULT. If SAVEP is true,
1429 the result block is used to save the values; otherwise it is used to
1430 restore the values. */
1431
1432 static rtx
1433 result_vector (int savep, rtx result)
1434 {
1435 int regno, size, align, nelts;
1436 machine_mode mode;
1437 rtx reg, mem;
1438 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1439
1440 size = nelts = 0;
1441 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1442 if ((mode = apply_result_mode[regno]) != VOIDmode)
1443 {
1444 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1445 if (size % align != 0)
1446 size = CEIL (size, align) * align;
1447 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1448 mem = adjust_address (result, mode, size);
1449 savevec[nelts++] = (savep
1450 ? gen_rtx_SET (mem, reg)
1451 : gen_rtx_SET (reg, mem));
1452 size += GET_MODE_SIZE (mode);
1453 }
1454 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1455 }
1456
1457 /* Save the state required to perform an untyped call with the same
1458 arguments as were passed to the current function. */
1459
1460 static rtx
1461 expand_builtin_apply_args_1 (void)
1462 {
1463 rtx registers, tem;
1464 int size, align, regno;
1465 machine_mode mode;
1466 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1467
1468 /* Create a block where the arg-pointer, structure value address,
1469 and argument registers can be saved. */
1470 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1471
1472 /* Walk past the arg-pointer and structure value address. */
1473 size = GET_MODE_SIZE (Pmode);
1474 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1475 size += GET_MODE_SIZE (Pmode);
1476
1477 /* Save each register used in calling a function to the block. */
1478 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1479 if ((mode = apply_args_mode[regno]) != VOIDmode)
1480 {
1481 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1482 if (size % align != 0)
1483 size = CEIL (size, align) * align;
1484
1485 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1486
1487 emit_move_insn (adjust_address (registers, mode, size), tem);
1488 size += GET_MODE_SIZE (mode);
1489 }
1490
1491 /* Save the arg pointer to the block. */
1492 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1493 /* We need the pointer as the caller actually passed them to us, not
1494 as we might have pretended they were passed. Make sure it's a valid
1495 operand, as emit_move_insn isn't expected to handle a PLUS. */
1496 if (STACK_GROWS_DOWNWARD)
1497 tem
1498 = force_operand (plus_constant (Pmode, tem,
1499 crtl->args.pretend_args_size),
1500 NULL_RTX);
1501 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1502
1503 size = GET_MODE_SIZE (Pmode);
1504
1505 /* Save the structure value address unless this is passed as an
1506 "invisible" first argument. */
1507 if (struct_incoming_value)
1508 {
1509 emit_move_insn (adjust_address (registers, Pmode, size),
1510 copy_to_reg (struct_incoming_value));
1511 size += GET_MODE_SIZE (Pmode);
1512 }
1513
1514 /* Return the address of the block. */
1515 return copy_addr_to_reg (XEXP (registers, 0));
1516 }
1517
1518 /* __builtin_apply_args returns block of memory allocated on
1519 the stack into which is stored the arg pointer, structure
1520 value address, static chain, and all the registers that might
1521 possibly be used in performing a function call. The code is
1522 moved to the start of the function so the incoming values are
1523 saved. */
1524
1525 static rtx
1526 expand_builtin_apply_args (void)
1527 {
1528 /* Don't do __builtin_apply_args more than once in a function.
1529 Save the result of the first call and reuse it. */
1530 if (apply_args_value != 0)
1531 return apply_args_value;
1532 {
1533 /* When this function is called, it means that registers must be
1534 saved on entry to this function. So we migrate the
1535 call to the first insn of this function. */
1536 rtx temp;
1537
1538 start_sequence ();
1539 temp = expand_builtin_apply_args_1 ();
1540 rtx_insn *seq = get_insns ();
1541 end_sequence ();
1542
1543 apply_args_value = temp;
1544
1545 /* Put the insns after the NOTE that starts the function.
1546 If this is inside a start_sequence, make the outer-level insn
1547 chain current, so the code is placed at the start of the
1548 function. If internal_arg_pointer is a non-virtual pseudo,
1549 it needs to be placed after the function that initializes
1550 that pseudo. */
1551 push_topmost_sequence ();
1552 if (REG_P (crtl->args.internal_arg_pointer)
1553 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1554 emit_insn_before (seq, parm_birth_insn);
1555 else
1556 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1557 pop_topmost_sequence ();
1558 return temp;
1559 }
1560 }
1561
1562 /* Perform an untyped call and save the state required to perform an
1563 untyped return of whatever value was returned by the given function. */
1564
1565 static rtx
1566 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1567 {
1568 int size, align, regno;
1569 machine_mode mode;
1570 rtx incoming_args, result, reg, dest, src;
1571 rtx_call_insn *call_insn;
1572 rtx old_stack_level = 0;
1573 rtx call_fusage = 0;
1574 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1575
1576 arguments = convert_memory_address (Pmode, arguments);
1577
1578 /* Create a block where the return registers can be saved. */
1579 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1580
1581 /* Fetch the arg pointer from the ARGUMENTS block. */
1582 incoming_args = gen_reg_rtx (Pmode);
1583 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1584 if (!STACK_GROWS_DOWNWARD)
1585 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1586 incoming_args, 0, OPTAB_LIB_WIDEN);
1587
1588 /* Push a new argument block and copy the arguments. Do not allow
1589 the (potential) memcpy call below to interfere with our stack
1590 manipulations. */
1591 do_pending_stack_adjust ();
1592 NO_DEFER_POP;
1593
1594 /* Save the stack with nonlocal if available. */
1595 if (targetm.have_save_stack_nonlocal ())
1596 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1597 else
1598 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1599
1600 /* Allocate a block of memory onto the stack and copy the memory
1601 arguments to the outgoing arguments address. We can pass TRUE
1602 as the 4th argument because we just saved the stack pointer
1603 and will restore it right after the call. */
1604 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1605
1606 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1607 may have already set current_function_calls_alloca to true.
1608 current_function_calls_alloca won't be set if argsize is zero,
1609 so we have to guarantee need_drap is true here. */
1610 if (SUPPORTS_STACK_ALIGNMENT)
1611 crtl->need_drap = true;
1612
1613 dest = virtual_outgoing_args_rtx;
1614 if (!STACK_GROWS_DOWNWARD)
1615 {
1616 if (CONST_INT_P (argsize))
1617 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1618 else
1619 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1620 }
1621 dest = gen_rtx_MEM (BLKmode, dest);
1622 set_mem_align (dest, PARM_BOUNDARY);
1623 src = gen_rtx_MEM (BLKmode, incoming_args);
1624 set_mem_align (src, PARM_BOUNDARY);
1625 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1626
1627 /* Refer to the argument block. */
1628 apply_args_size ();
1629 arguments = gen_rtx_MEM (BLKmode, arguments);
1630 set_mem_align (arguments, PARM_BOUNDARY);
1631
1632 /* Walk past the arg-pointer and structure value address. */
1633 size = GET_MODE_SIZE (Pmode);
1634 if (struct_value)
1635 size += GET_MODE_SIZE (Pmode);
1636
1637 /* Restore each of the registers previously saved. Make USE insns
1638 for each of these registers for use in making the call. */
1639 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1640 if ((mode = apply_args_mode[regno]) != VOIDmode)
1641 {
1642 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1643 if (size % align != 0)
1644 size = CEIL (size, align) * align;
1645 reg = gen_rtx_REG (mode, regno);
1646 emit_move_insn (reg, adjust_address (arguments, mode, size));
1647 use_reg (&call_fusage, reg);
1648 size += GET_MODE_SIZE (mode);
1649 }
1650
1651 /* Restore the structure value address unless this is passed as an
1652 "invisible" first argument. */
1653 size = GET_MODE_SIZE (Pmode);
1654 if (struct_value)
1655 {
1656 rtx value = gen_reg_rtx (Pmode);
1657 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1658 emit_move_insn (struct_value, value);
1659 if (REG_P (struct_value))
1660 use_reg (&call_fusage, struct_value);
1661 size += GET_MODE_SIZE (Pmode);
1662 }
1663
1664 /* All arguments and registers used for the call are set up by now! */
1665 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1666
1667 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1668 and we don't want to load it into a register as an optimization,
1669 because prepare_call_address already did it if it should be done. */
1670 if (GET_CODE (function) != SYMBOL_REF)
1671 function = memory_address (FUNCTION_MODE, function);
1672
1673 /* Generate the actual call instruction and save the return value. */
1674 if (targetm.have_untyped_call ())
1675 {
1676 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1677 emit_call_insn (targetm.gen_untyped_call (mem, result,
1678 result_vector (1, result)));
1679 }
1680 else if (targetm.have_call_value ())
1681 {
1682 rtx valreg = 0;
1683
1684 /* Locate the unique return register. It is not possible to
1685 express a call that sets more than one return register using
1686 call_value; use untyped_call for that. In fact, untyped_call
1687 only needs to save the return registers in the given block. */
1688 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1689 if ((mode = apply_result_mode[regno]) != VOIDmode)
1690 {
1691 gcc_assert (!valreg); /* have_untyped_call required. */
1692
1693 valreg = gen_rtx_REG (mode, regno);
1694 }
1695
1696 emit_insn (targetm.gen_call_value (valreg,
1697 gen_rtx_MEM (FUNCTION_MODE, function),
1698 const0_rtx, NULL_RTX, const0_rtx));
1699
1700 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1701 }
1702 else
1703 gcc_unreachable ();
1704
1705 /* Find the CALL insn we just emitted, and attach the register usage
1706 information. */
1707 call_insn = last_call_insn ();
1708 add_function_usage_to (call_insn, call_fusage);
1709
1710 /* Restore the stack. */
1711 if (targetm.have_save_stack_nonlocal ())
1712 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1713 else
1714 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1715 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1716
1717 OK_DEFER_POP;
1718
1719 /* Return the address of the result block. */
1720 result = copy_addr_to_reg (XEXP (result, 0));
1721 return convert_memory_address (ptr_mode, result);
1722 }
1723
1724 /* Perform an untyped return. */
1725
1726 static void
1727 expand_builtin_return (rtx result)
1728 {
1729 int size, align, regno;
1730 machine_mode mode;
1731 rtx reg;
1732 rtx_insn *call_fusage = 0;
1733
1734 result = convert_memory_address (Pmode, result);
1735
1736 apply_result_size ();
1737 result = gen_rtx_MEM (BLKmode, result);
1738
1739 if (targetm.have_untyped_return ())
1740 {
1741 rtx vector = result_vector (0, result);
1742 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1743 emit_barrier ();
1744 return;
1745 }
1746
1747 /* Restore the return value and note that each value is used. */
1748 size = 0;
1749 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1750 if ((mode = apply_result_mode[regno]) != VOIDmode)
1751 {
1752 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1753 if (size % align != 0)
1754 size = CEIL (size, align) * align;
1755 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1756 emit_move_insn (reg, adjust_address (result, mode, size));
1757
1758 push_to_sequence (call_fusage);
1759 emit_use (reg);
1760 call_fusage = get_insns ();
1761 end_sequence ();
1762 size += GET_MODE_SIZE (mode);
1763 }
1764
1765 /* Put the USE insns before the return. */
1766 emit_insn (call_fusage);
1767
1768 /* Return whatever values was restored by jumping directly to the end
1769 of the function. */
1770 expand_naked_return ();
1771 }
1772
1773 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1774
1775 static enum type_class
1776 type_to_class (tree type)
1777 {
1778 switch (TREE_CODE (type))
1779 {
1780 case VOID_TYPE: return void_type_class;
1781 case INTEGER_TYPE: return integer_type_class;
1782 case ENUMERAL_TYPE: return enumeral_type_class;
1783 case BOOLEAN_TYPE: return boolean_type_class;
1784 case POINTER_TYPE: return pointer_type_class;
1785 case REFERENCE_TYPE: return reference_type_class;
1786 case OFFSET_TYPE: return offset_type_class;
1787 case REAL_TYPE: return real_type_class;
1788 case COMPLEX_TYPE: return complex_type_class;
1789 case FUNCTION_TYPE: return function_type_class;
1790 case METHOD_TYPE: return method_type_class;
1791 case RECORD_TYPE: return record_type_class;
1792 case UNION_TYPE:
1793 case QUAL_UNION_TYPE: return union_type_class;
1794 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1795 ? string_type_class : array_type_class);
1796 case LANG_TYPE: return lang_type_class;
1797 default: return no_type_class;
1798 }
1799 }
1800
1801 /* Expand a call EXP to __builtin_classify_type. */
1802
1803 static rtx
1804 expand_builtin_classify_type (tree exp)
1805 {
1806 if (call_expr_nargs (exp))
1807 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1808 return GEN_INT (no_type_class);
1809 }
1810
1811 /* This helper macro, meant to be used in mathfn_built_in below,
1812 determines which among a set of three builtin math functions is
1813 appropriate for a given type mode. The `F' and `L' cases are
1814 automatically generated from the `double' case. */
1815 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1816 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1817 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1818 fcodel = BUILT_IN_MATHFN##L ; break;
1819 /* Similar to above, but appends _R after any F/L suffix. */
1820 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1821 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1822 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1823 fcodel = BUILT_IN_MATHFN##L_R ; break;
1824
1825 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1826 if available. If IMPLICIT is true use the implicit builtin declaration,
1827 otherwise use the explicit declaration. If we can't do the conversion,
1828 return zero. */
1829
1830 static tree
1831 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1832 {
1833 enum built_in_function fcode, fcodef, fcodel, fcode2;
1834
1835 switch (fn)
1836 {
1837 CASE_MATHFN (BUILT_IN_ACOS)
1838 CASE_MATHFN (BUILT_IN_ACOSH)
1839 CASE_MATHFN (BUILT_IN_ASIN)
1840 CASE_MATHFN (BUILT_IN_ASINH)
1841 CASE_MATHFN (BUILT_IN_ATAN)
1842 CASE_MATHFN (BUILT_IN_ATAN2)
1843 CASE_MATHFN (BUILT_IN_ATANH)
1844 CASE_MATHFN (BUILT_IN_CBRT)
1845 CASE_MATHFN (BUILT_IN_CEIL)
1846 CASE_MATHFN (BUILT_IN_CEXPI)
1847 CASE_MATHFN (BUILT_IN_COPYSIGN)
1848 CASE_MATHFN (BUILT_IN_COS)
1849 CASE_MATHFN (BUILT_IN_COSH)
1850 CASE_MATHFN (BUILT_IN_DREM)
1851 CASE_MATHFN (BUILT_IN_ERF)
1852 CASE_MATHFN (BUILT_IN_ERFC)
1853 CASE_MATHFN (BUILT_IN_EXP)
1854 CASE_MATHFN (BUILT_IN_EXP10)
1855 CASE_MATHFN (BUILT_IN_EXP2)
1856 CASE_MATHFN (BUILT_IN_EXPM1)
1857 CASE_MATHFN (BUILT_IN_FABS)
1858 CASE_MATHFN (BUILT_IN_FDIM)
1859 CASE_MATHFN (BUILT_IN_FLOOR)
1860 CASE_MATHFN (BUILT_IN_FMA)
1861 CASE_MATHFN (BUILT_IN_FMAX)
1862 CASE_MATHFN (BUILT_IN_FMIN)
1863 CASE_MATHFN (BUILT_IN_FMOD)
1864 CASE_MATHFN (BUILT_IN_FREXP)
1865 CASE_MATHFN (BUILT_IN_GAMMA)
1866 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1867 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1868 CASE_MATHFN (BUILT_IN_HYPOT)
1869 CASE_MATHFN (BUILT_IN_ILOGB)
1870 CASE_MATHFN (BUILT_IN_ICEIL)
1871 CASE_MATHFN (BUILT_IN_IFLOOR)
1872 CASE_MATHFN (BUILT_IN_INF)
1873 CASE_MATHFN (BUILT_IN_IRINT)
1874 CASE_MATHFN (BUILT_IN_IROUND)
1875 CASE_MATHFN (BUILT_IN_ISINF)
1876 CASE_MATHFN (BUILT_IN_J0)
1877 CASE_MATHFN (BUILT_IN_J1)
1878 CASE_MATHFN (BUILT_IN_JN)
1879 CASE_MATHFN (BUILT_IN_LCEIL)
1880 CASE_MATHFN (BUILT_IN_LDEXP)
1881 CASE_MATHFN (BUILT_IN_LFLOOR)
1882 CASE_MATHFN (BUILT_IN_LGAMMA)
1883 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1884 CASE_MATHFN (BUILT_IN_LLCEIL)
1885 CASE_MATHFN (BUILT_IN_LLFLOOR)
1886 CASE_MATHFN (BUILT_IN_LLRINT)
1887 CASE_MATHFN (BUILT_IN_LLROUND)
1888 CASE_MATHFN (BUILT_IN_LOG)
1889 CASE_MATHFN (BUILT_IN_LOG10)
1890 CASE_MATHFN (BUILT_IN_LOG1P)
1891 CASE_MATHFN (BUILT_IN_LOG2)
1892 CASE_MATHFN (BUILT_IN_LOGB)
1893 CASE_MATHFN (BUILT_IN_LRINT)
1894 CASE_MATHFN (BUILT_IN_LROUND)
1895 CASE_MATHFN (BUILT_IN_MODF)
1896 CASE_MATHFN (BUILT_IN_NAN)
1897 CASE_MATHFN (BUILT_IN_NANS)
1898 CASE_MATHFN (BUILT_IN_NEARBYINT)
1899 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1900 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1901 CASE_MATHFN (BUILT_IN_POW)
1902 CASE_MATHFN (BUILT_IN_POWI)
1903 CASE_MATHFN (BUILT_IN_POW10)
1904 CASE_MATHFN (BUILT_IN_REMAINDER)
1905 CASE_MATHFN (BUILT_IN_REMQUO)
1906 CASE_MATHFN (BUILT_IN_RINT)
1907 CASE_MATHFN (BUILT_IN_ROUND)
1908 CASE_MATHFN (BUILT_IN_SCALB)
1909 CASE_MATHFN (BUILT_IN_SCALBLN)
1910 CASE_MATHFN (BUILT_IN_SCALBN)
1911 CASE_MATHFN (BUILT_IN_SIGNBIT)
1912 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1913 CASE_MATHFN (BUILT_IN_SIN)
1914 CASE_MATHFN (BUILT_IN_SINCOS)
1915 CASE_MATHFN (BUILT_IN_SINH)
1916 CASE_MATHFN (BUILT_IN_SQRT)
1917 CASE_MATHFN (BUILT_IN_TAN)
1918 CASE_MATHFN (BUILT_IN_TANH)
1919 CASE_MATHFN (BUILT_IN_TGAMMA)
1920 CASE_MATHFN (BUILT_IN_TRUNC)
1921 CASE_MATHFN (BUILT_IN_Y0)
1922 CASE_MATHFN (BUILT_IN_Y1)
1923 CASE_MATHFN (BUILT_IN_YN)
1924
1925 default:
1926 return NULL_TREE;
1927 }
1928
1929 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1930 fcode2 = fcode;
1931 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1932 fcode2 = fcodef;
1933 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1934 fcode2 = fcodel;
1935 else
1936 return NULL_TREE;
1937
1938 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1939 return NULL_TREE;
1940
1941 return builtin_decl_explicit (fcode2);
1942 }
1943
1944 /* Like mathfn_built_in_1(), but always use the implicit array. */
1945
1946 tree
1947 mathfn_built_in (tree type, enum built_in_function fn)
1948 {
1949 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1950 }
1951
1952 /* If errno must be maintained, expand the RTL to check if the result,
1953 TARGET, of a built-in function call, EXP, is NaN, and if so set
1954 errno to EDOM. */
1955
1956 static void
1957 expand_errno_check (tree exp, rtx target)
1958 {
1959 rtx_code_label *lab = gen_label_rtx ();
1960
1961 /* Test the result; if it is NaN, set errno=EDOM because
1962 the argument was not in the domain. */
1963 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1964 NULL_RTX, NULL, lab,
1965 /* The jump is very likely. */
1966 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1967
1968 #ifdef TARGET_EDOM
1969 /* If this built-in doesn't throw an exception, set errno directly. */
1970 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1971 {
1972 #ifdef GEN_ERRNO_RTX
1973 rtx errno_rtx = GEN_ERRNO_RTX;
1974 #else
1975 rtx errno_rtx
1976 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1977 #endif
1978 emit_move_insn (errno_rtx,
1979 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1980 emit_label (lab);
1981 return;
1982 }
1983 #endif
1984
1985 /* Make sure the library call isn't expanded as a tail call. */
1986 CALL_EXPR_TAILCALL (exp) = 0;
1987
1988 /* We can't set errno=EDOM directly; let the library call do it.
1989 Pop the arguments right away in case the call gets deleted. */
1990 NO_DEFER_POP;
1991 expand_call (exp, target, 0);
1992 OK_DEFER_POP;
1993 emit_label (lab);
1994 }
1995
1996 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1997 Return NULL_RTX if a normal call should be emitted rather than expanding
1998 the function in-line. EXP is the expression that is a call to the builtin
1999 function; if convenient, the result should be placed in TARGET.
2000 SUBTARGET may be used as the target for computing one of EXP's operands. */
2001
2002 static rtx
2003 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2004 {
2005 optab builtin_optab;
2006 rtx op0;
2007 rtx_insn *insns;
2008 tree fndecl = get_callee_fndecl (exp);
2009 machine_mode mode;
2010 bool errno_set = false;
2011 bool try_widening = false;
2012 tree arg;
2013
2014 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2015 return NULL_RTX;
2016
2017 arg = CALL_EXPR_ARG (exp, 0);
2018
2019 switch (DECL_FUNCTION_CODE (fndecl))
2020 {
2021 CASE_FLT_FN (BUILT_IN_SQRT):
2022 errno_set = ! tree_expr_nonnegative_p (arg);
2023 try_widening = true;
2024 builtin_optab = sqrt_optab;
2025 break;
2026 CASE_FLT_FN (BUILT_IN_EXP):
2027 errno_set = true; builtin_optab = exp_optab; break;
2028 CASE_FLT_FN (BUILT_IN_EXP10):
2029 CASE_FLT_FN (BUILT_IN_POW10):
2030 errno_set = true; builtin_optab = exp10_optab; break;
2031 CASE_FLT_FN (BUILT_IN_EXP2):
2032 errno_set = true; builtin_optab = exp2_optab; break;
2033 CASE_FLT_FN (BUILT_IN_EXPM1):
2034 errno_set = true; builtin_optab = expm1_optab; break;
2035 CASE_FLT_FN (BUILT_IN_LOGB):
2036 errno_set = true; builtin_optab = logb_optab; break;
2037 CASE_FLT_FN (BUILT_IN_LOG):
2038 errno_set = true; builtin_optab = log_optab; break;
2039 CASE_FLT_FN (BUILT_IN_LOG10):
2040 errno_set = true; builtin_optab = log10_optab; break;
2041 CASE_FLT_FN (BUILT_IN_LOG2):
2042 errno_set = true; builtin_optab = log2_optab; break;
2043 CASE_FLT_FN (BUILT_IN_LOG1P):
2044 errno_set = true; builtin_optab = log1p_optab; break;
2045 CASE_FLT_FN (BUILT_IN_ASIN):
2046 builtin_optab = asin_optab; break;
2047 CASE_FLT_FN (BUILT_IN_ACOS):
2048 builtin_optab = acos_optab; break;
2049 CASE_FLT_FN (BUILT_IN_TAN):
2050 builtin_optab = tan_optab; break;
2051 CASE_FLT_FN (BUILT_IN_ATAN):
2052 builtin_optab = atan_optab; break;
2053 CASE_FLT_FN (BUILT_IN_FLOOR):
2054 builtin_optab = floor_optab; break;
2055 CASE_FLT_FN (BUILT_IN_CEIL):
2056 builtin_optab = ceil_optab; break;
2057 CASE_FLT_FN (BUILT_IN_TRUNC):
2058 builtin_optab = btrunc_optab; break;
2059 CASE_FLT_FN (BUILT_IN_ROUND):
2060 builtin_optab = round_optab; break;
2061 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2062 builtin_optab = nearbyint_optab;
2063 if (flag_trapping_math)
2064 break;
2065 /* Else fallthrough and expand as rint. */
2066 CASE_FLT_FN (BUILT_IN_RINT):
2067 builtin_optab = rint_optab; break;
2068 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2069 builtin_optab = significand_optab; break;
2070 default:
2071 gcc_unreachable ();
2072 }
2073
2074 /* Make a suitable register to place result in. */
2075 mode = TYPE_MODE (TREE_TYPE (exp));
2076
2077 if (! flag_errno_math || ! HONOR_NANS (mode))
2078 errno_set = false;
2079
2080 /* Before working hard, check whether the instruction is available, but try
2081 to widen the mode for specific operations. */
2082 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2083 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2084 && (!errno_set || !optimize_insn_for_size_p ()))
2085 {
2086 rtx result = gen_reg_rtx (mode);
2087
2088 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2089 need to expand the argument again. This way, we will not perform
2090 side-effects more the once. */
2091 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2092
2093 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2094
2095 start_sequence ();
2096
2097 /* Compute into RESULT.
2098 Set RESULT to wherever the result comes back. */
2099 result = expand_unop (mode, builtin_optab, op0, result, 0);
2100
2101 if (result != 0)
2102 {
2103 if (errno_set)
2104 expand_errno_check (exp, result);
2105
2106 /* Output the entire sequence. */
2107 insns = get_insns ();
2108 end_sequence ();
2109 emit_insn (insns);
2110 return result;
2111 }
2112
2113 /* If we were unable to expand via the builtin, stop the sequence
2114 (without outputting the insns) and call to the library function
2115 with the stabilized argument list. */
2116 end_sequence ();
2117 }
2118
2119 return expand_call (exp, target, target == const0_rtx);
2120 }
2121
2122 /* Expand a call to the builtin binary math functions (pow and atan2).
2123 Return NULL_RTX if a normal call should be emitted rather than expanding the
2124 function in-line. EXP is the expression that is a call to the builtin
2125 function; if convenient, the result should be placed in TARGET.
2126 SUBTARGET may be used as the target for computing one of EXP's
2127 operands. */
2128
2129 static rtx
2130 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2131 {
2132 optab builtin_optab;
2133 rtx op0, op1, result;
2134 rtx_insn *insns;
2135 int op1_type = REAL_TYPE;
2136 tree fndecl = get_callee_fndecl (exp);
2137 tree arg0, arg1;
2138 machine_mode mode;
2139 bool errno_set = true;
2140
2141 switch (DECL_FUNCTION_CODE (fndecl))
2142 {
2143 CASE_FLT_FN (BUILT_IN_SCALBN):
2144 CASE_FLT_FN (BUILT_IN_SCALBLN):
2145 CASE_FLT_FN (BUILT_IN_LDEXP):
2146 op1_type = INTEGER_TYPE;
2147 default:
2148 break;
2149 }
2150
2151 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2152 return NULL_RTX;
2153
2154 arg0 = CALL_EXPR_ARG (exp, 0);
2155 arg1 = CALL_EXPR_ARG (exp, 1);
2156
2157 switch (DECL_FUNCTION_CODE (fndecl))
2158 {
2159 CASE_FLT_FN (BUILT_IN_POW):
2160 builtin_optab = pow_optab; break;
2161 CASE_FLT_FN (BUILT_IN_ATAN2):
2162 builtin_optab = atan2_optab; break;
2163 CASE_FLT_FN (BUILT_IN_SCALB):
2164 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2165 return 0;
2166 builtin_optab = scalb_optab; break;
2167 CASE_FLT_FN (BUILT_IN_SCALBN):
2168 CASE_FLT_FN (BUILT_IN_SCALBLN):
2169 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2170 return 0;
2171 /* Fall through... */
2172 CASE_FLT_FN (BUILT_IN_LDEXP):
2173 builtin_optab = ldexp_optab; break;
2174 CASE_FLT_FN (BUILT_IN_FMOD):
2175 builtin_optab = fmod_optab; break;
2176 CASE_FLT_FN (BUILT_IN_REMAINDER):
2177 CASE_FLT_FN (BUILT_IN_DREM):
2178 builtin_optab = remainder_optab; break;
2179 default:
2180 gcc_unreachable ();
2181 }
2182
2183 /* Make a suitable register to place result in. */
2184 mode = TYPE_MODE (TREE_TYPE (exp));
2185
2186 /* Before working hard, check whether the instruction is available. */
2187 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2188 return NULL_RTX;
2189
2190 result = gen_reg_rtx (mode);
2191
2192 if (! flag_errno_math || ! HONOR_NANS (mode))
2193 errno_set = false;
2194
2195 if (errno_set && optimize_insn_for_size_p ())
2196 return 0;
2197
2198 /* Always stabilize the argument list. */
2199 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2200 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2201
2202 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2203 op1 = expand_normal (arg1);
2204
2205 start_sequence ();
2206
2207 /* Compute into RESULT.
2208 Set RESULT to wherever the result comes back. */
2209 result = expand_binop (mode, builtin_optab, op0, op1,
2210 result, 0, OPTAB_DIRECT);
2211
2212 /* If we were unable to expand via the builtin, stop the sequence
2213 (without outputting the insns) and call to the library function
2214 with the stabilized argument list. */
2215 if (result == 0)
2216 {
2217 end_sequence ();
2218 return expand_call (exp, target, target == const0_rtx);
2219 }
2220
2221 if (errno_set)
2222 expand_errno_check (exp, result);
2223
2224 /* Output the entire sequence. */
2225 insns = get_insns ();
2226 end_sequence ();
2227 emit_insn (insns);
2228
2229 return result;
2230 }
2231
2232 /* Expand a call to the builtin trinary math functions (fma).
2233 Return NULL_RTX if a normal call should be emitted rather than expanding the
2234 function in-line. EXP is the expression that is a call to the builtin
2235 function; if convenient, the result should be placed in TARGET.
2236 SUBTARGET may be used as the target for computing one of EXP's
2237 operands. */
2238
2239 static rtx
2240 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2241 {
2242 optab builtin_optab;
2243 rtx op0, op1, op2, result;
2244 rtx_insn *insns;
2245 tree fndecl = get_callee_fndecl (exp);
2246 tree arg0, arg1, arg2;
2247 machine_mode mode;
2248
2249 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2250 return NULL_RTX;
2251
2252 arg0 = CALL_EXPR_ARG (exp, 0);
2253 arg1 = CALL_EXPR_ARG (exp, 1);
2254 arg2 = CALL_EXPR_ARG (exp, 2);
2255
2256 switch (DECL_FUNCTION_CODE (fndecl))
2257 {
2258 CASE_FLT_FN (BUILT_IN_FMA):
2259 builtin_optab = fma_optab; break;
2260 default:
2261 gcc_unreachable ();
2262 }
2263
2264 /* Make a suitable register to place result in. */
2265 mode = TYPE_MODE (TREE_TYPE (exp));
2266
2267 /* Before working hard, check whether the instruction is available. */
2268 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2269 return NULL_RTX;
2270
2271 result = gen_reg_rtx (mode);
2272
2273 /* Always stabilize the argument list. */
2274 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2275 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2276 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2277
2278 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2279 op1 = expand_normal (arg1);
2280 op2 = expand_normal (arg2);
2281
2282 start_sequence ();
2283
2284 /* Compute into RESULT.
2285 Set RESULT to wherever the result comes back. */
2286 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2287 result, 0);
2288
2289 /* If we were unable to expand via the builtin, stop the sequence
2290 (without outputting the insns) and call to the library function
2291 with the stabilized argument list. */
2292 if (result == 0)
2293 {
2294 end_sequence ();
2295 return expand_call (exp, target, target == const0_rtx);
2296 }
2297
2298 /* Output the entire sequence. */
2299 insns = get_insns ();
2300 end_sequence ();
2301 emit_insn (insns);
2302
2303 return result;
2304 }
2305
2306 /* Expand a call to the builtin sin and cos math functions.
2307 Return NULL_RTX if a normal call should be emitted rather than expanding the
2308 function in-line. EXP is the expression that is a call to the builtin
2309 function; if convenient, the result should be placed in TARGET.
2310 SUBTARGET may be used as the target for computing one of EXP's
2311 operands. */
2312
2313 static rtx
2314 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2315 {
2316 optab builtin_optab;
2317 rtx op0;
2318 rtx_insn *insns;
2319 tree fndecl = get_callee_fndecl (exp);
2320 machine_mode mode;
2321 tree arg;
2322
2323 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2324 return NULL_RTX;
2325
2326 arg = CALL_EXPR_ARG (exp, 0);
2327
2328 switch (DECL_FUNCTION_CODE (fndecl))
2329 {
2330 CASE_FLT_FN (BUILT_IN_SIN):
2331 CASE_FLT_FN (BUILT_IN_COS):
2332 builtin_optab = sincos_optab; break;
2333 default:
2334 gcc_unreachable ();
2335 }
2336
2337 /* Make a suitable register to place result in. */
2338 mode = TYPE_MODE (TREE_TYPE (exp));
2339
2340 /* Check if sincos insn is available, otherwise fallback
2341 to sin or cos insn. */
2342 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2343 switch (DECL_FUNCTION_CODE (fndecl))
2344 {
2345 CASE_FLT_FN (BUILT_IN_SIN):
2346 builtin_optab = sin_optab; break;
2347 CASE_FLT_FN (BUILT_IN_COS):
2348 builtin_optab = cos_optab; break;
2349 default:
2350 gcc_unreachable ();
2351 }
2352
2353 /* Before working hard, check whether the instruction is available. */
2354 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2355 {
2356 rtx result = gen_reg_rtx (mode);
2357
2358 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2359 need to expand the argument again. This way, we will not perform
2360 side-effects more the once. */
2361 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2362
2363 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2364
2365 start_sequence ();
2366
2367 /* Compute into RESULT.
2368 Set RESULT to wherever the result comes back. */
2369 if (builtin_optab == sincos_optab)
2370 {
2371 int ok;
2372
2373 switch (DECL_FUNCTION_CODE (fndecl))
2374 {
2375 CASE_FLT_FN (BUILT_IN_SIN):
2376 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2377 break;
2378 CASE_FLT_FN (BUILT_IN_COS):
2379 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2380 break;
2381 default:
2382 gcc_unreachable ();
2383 }
2384 gcc_assert (ok);
2385 }
2386 else
2387 result = expand_unop (mode, builtin_optab, op0, result, 0);
2388
2389 if (result != 0)
2390 {
2391 /* Output the entire sequence. */
2392 insns = get_insns ();
2393 end_sequence ();
2394 emit_insn (insns);
2395 return result;
2396 }
2397
2398 /* If we were unable to expand via the builtin, stop the sequence
2399 (without outputting the insns) and call to the library function
2400 with the stabilized argument list. */
2401 end_sequence ();
2402 }
2403
2404 return expand_call (exp, target, target == const0_rtx);
2405 }
2406
2407 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2408 return an RTL instruction code that implements the functionality.
2409 If that isn't possible or available return CODE_FOR_nothing. */
2410
2411 static enum insn_code
2412 interclass_mathfn_icode (tree arg, tree fndecl)
2413 {
2414 bool errno_set = false;
2415 optab builtin_optab = unknown_optab;
2416 machine_mode mode;
2417
2418 switch (DECL_FUNCTION_CODE (fndecl))
2419 {
2420 CASE_FLT_FN (BUILT_IN_ILOGB):
2421 errno_set = true; builtin_optab = ilogb_optab; break;
2422 CASE_FLT_FN (BUILT_IN_ISINF):
2423 builtin_optab = isinf_optab; break;
2424 case BUILT_IN_ISNORMAL:
2425 case BUILT_IN_ISFINITE:
2426 CASE_FLT_FN (BUILT_IN_FINITE):
2427 case BUILT_IN_FINITED32:
2428 case BUILT_IN_FINITED64:
2429 case BUILT_IN_FINITED128:
2430 case BUILT_IN_ISINFD32:
2431 case BUILT_IN_ISINFD64:
2432 case BUILT_IN_ISINFD128:
2433 /* These builtins have no optabs (yet). */
2434 break;
2435 default:
2436 gcc_unreachable ();
2437 }
2438
2439 /* There's no easy way to detect the case we need to set EDOM. */
2440 if (flag_errno_math && errno_set)
2441 return CODE_FOR_nothing;
2442
2443 /* Optab mode depends on the mode of the input argument. */
2444 mode = TYPE_MODE (TREE_TYPE (arg));
2445
2446 if (builtin_optab)
2447 return optab_handler (builtin_optab, mode);
2448 return CODE_FOR_nothing;
2449 }
2450
2451 /* Expand a call to one of the builtin math functions that operate on
2452 floating point argument and output an integer result (ilogb, isinf,
2453 isnan, etc).
2454 Return 0 if a normal call should be emitted rather than expanding the
2455 function in-line. EXP is the expression that is a call to the builtin
2456 function; if convenient, the result should be placed in TARGET. */
2457
2458 static rtx
2459 expand_builtin_interclass_mathfn (tree exp, rtx target)
2460 {
2461 enum insn_code icode = CODE_FOR_nothing;
2462 rtx op0;
2463 tree fndecl = get_callee_fndecl (exp);
2464 machine_mode mode;
2465 tree arg;
2466
2467 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2468 return NULL_RTX;
2469
2470 arg = CALL_EXPR_ARG (exp, 0);
2471 icode = interclass_mathfn_icode (arg, fndecl);
2472 mode = TYPE_MODE (TREE_TYPE (arg));
2473
2474 if (icode != CODE_FOR_nothing)
2475 {
2476 struct expand_operand ops[1];
2477 rtx_insn *last = get_last_insn ();
2478 tree orig_arg = arg;
2479
2480 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2481 need to expand the argument again. This way, we will not perform
2482 side-effects more the once. */
2483 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2484
2485 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2486
2487 if (mode != GET_MODE (op0))
2488 op0 = convert_to_mode (mode, op0, 0);
2489
2490 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2491 if (maybe_legitimize_operands (icode, 0, 1, ops)
2492 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2493 return ops[0].value;
2494
2495 delete_insns_since (last);
2496 CALL_EXPR_ARG (exp, 0) = orig_arg;
2497 }
2498
2499 return NULL_RTX;
2500 }
2501
2502 /* Expand a call to the builtin sincos math function.
2503 Return NULL_RTX if a normal call should be emitted rather than expanding the
2504 function in-line. EXP is the expression that is a call to the builtin
2505 function. */
2506
2507 static rtx
2508 expand_builtin_sincos (tree exp)
2509 {
2510 rtx op0, op1, op2, target1, target2;
2511 machine_mode mode;
2512 tree arg, sinp, cosp;
2513 int result;
2514 location_t loc = EXPR_LOCATION (exp);
2515 tree alias_type, alias_off;
2516
2517 if (!validate_arglist (exp, REAL_TYPE,
2518 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2519 return NULL_RTX;
2520
2521 arg = CALL_EXPR_ARG (exp, 0);
2522 sinp = CALL_EXPR_ARG (exp, 1);
2523 cosp = CALL_EXPR_ARG (exp, 2);
2524
2525 /* Make a suitable register to place result in. */
2526 mode = TYPE_MODE (TREE_TYPE (arg));
2527
2528 /* Check if sincos insn is available, otherwise emit the call. */
2529 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2530 return NULL_RTX;
2531
2532 target1 = gen_reg_rtx (mode);
2533 target2 = gen_reg_rtx (mode);
2534
2535 op0 = expand_normal (arg);
2536 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2537 alias_off = build_int_cst (alias_type, 0);
2538 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2539 sinp, alias_off));
2540 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2541 cosp, alias_off));
2542
2543 /* Compute into target1 and target2.
2544 Set TARGET to wherever the result comes back. */
2545 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2546 gcc_assert (result);
2547
2548 /* Move target1 and target2 to the memory locations indicated
2549 by op1 and op2. */
2550 emit_move_insn (op1, target1);
2551 emit_move_insn (op2, target2);
2552
2553 return const0_rtx;
2554 }
2555
2556 /* Expand a call to the internal cexpi builtin to the sincos math function.
2557 EXP is the expression that is a call to the builtin function; if convenient,
2558 the result should be placed in TARGET. */
2559
2560 static rtx
2561 expand_builtin_cexpi (tree exp, rtx target)
2562 {
2563 tree fndecl = get_callee_fndecl (exp);
2564 tree arg, type;
2565 machine_mode mode;
2566 rtx op0, op1, op2;
2567 location_t loc = EXPR_LOCATION (exp);
2568
2569 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2570 return NULL_RTX;
2571
2572 arg = CALL_EXPR_ARG (exp, 0);
2573 type = TREE_TYPE (arg);
2574 mode = TYPE_MODE (TREE_TYPE (arg));
2575
2576 /* Try expanding via a sincos optab, fall back to emitting a libcall
2577 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2578 is only generated from sincos, cexp or if we have either of them. */
2579 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2580 {
2581 op1 = gen_reg_rtx (mode);
2582 op2 = gen_reg_rtx (mode);
2583
2584 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2585
2586 /* Compute into op1 and op2. */
2587 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2588 }
2589 else if (targetm.libc_has_function (function_sincos))
2590 {
2591 tree call, fn = NULL_TREE;
2592 tree top1, top2;
2593 rtx op1a, op2a;
2594
2595 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2596 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2597 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2598 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2599 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2600 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2601 else
2602 gcc_unreachable ();
2603
2604 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2605 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2606 op1a = copy_addr_to_reg (XEXP (op1, 0));
2607 op2a = copy_addr_to_reg (XEXP (op2, 0));
2608 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2609 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2610
2611 /* Make sure not to fold the sincos call again. */
2612 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2613 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2614 call, 3, arg, top1, top2));
2615 }
2616 else
2617 {
2618 tree call, fn = NULL_TREE, narg;
2619 tree ctype = build_complex_type (type);
2620
2621 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2622 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2623 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2624 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2625 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2626 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2627 else
2628 gcc_unreachable ();
2629
2630 /* If we don't have a decl for cexp create one. This is the
2631 friendliest fallback if the user calls __builtin_cexpi
2632 without full target C99 function support. */
2633 if (fn == NULL_TREE)
2634 {
2635 tree fntype;
2636 const char *name = NULL;
2637
2638 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2639 name = "cexpf";
2640 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2641 name = "cexp";
2642 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2643 name = "cexpl";
2644
2645 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2646 fn = build_fn_decl (name, fntype);
2647 }
2648
2649 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2650 build_real (type, dconst0), arg);
2651
2652 /* Make sure not to fold the cexp call again. */
2653 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2654 return expand_expr (build_call_nary (ctype, call, 1, narg),
2655 target, VOIDmode, EXPAND_NORMAL);
2656 }
2657
2658 /* Now build the proper return type. */
2659 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2660 make_tree (TREE_TYPE (arg), op2),
2661 make_tree (TREE_TYPE (arg), op1)),
2662 target, VOIDmode, EXPAND_NORMAL);
2663 }
2664
2665 /* Conveniently construct a function call expression. FNDECL names the
2666 function to be called, N is the number of arguments, and the "..."
2667 parameters are the argument expressions. Unlike build_call_exr
2668 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2669
2670 static tree
2671 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2672 {
2673 va_list ap;
2674 tree fntype = TREE_TYPE (fndecl);
2675 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2676
2677 va_start (ap, n);
2678 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2679 va_end (ap);
2680 SET_EXPR_LOCATION (fn, loc);
2681 return fn;
2682 }
2683
2684 /* Expand a call to one of the builtin rounding functions gcc defines
2685 as an extension (lfloor and lceil). As these are gcc extensions we
2686 do not need to worry about setting errno to EDOM.
2687 If expanding via optab fails, lower expression to (int)(floor(x)).
2688 EXP is the expression that is a call to the builtin function;
2689 if convenient, the result should be placed in TARGET. */
2690
2691 static rtx
2692 expand_builtin_int_roundingfn (tree exp, rtx target)
2693 {
2694 convert_optab builtin_optab;
2695 rtx op0, tmp;
2696 rtx_insn *insns;
2697 tree fndecl = get_callee_fndecl (exp);
2698 enum built_in_function fallback_fn;
2699 tree fallback_fndecl;
2700 machine_mode mode;
2701 tree arg;
2702
2703 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2704 gcc_unreachable ();
2705
2706 arg = CALL_EXPR_ARG (exp, 0);
2707
2708 switch (DECL_FUNCTION_CODE (fndecl))
2709 {
2710 CASE_FLT_FN (BUILT_IN_ICEIL):
2711 CASE_FLT_FN (BUILT_IN_LCEIL):
2712 CASE_FLT_FN (BUILT_IN_LLCEIL):
2713 builtin_optab = lceil_optab;
2714 fallback_fn = BUILT_IN_CEIL;
2715 break;
2716
2717 CASE_FLT_FN (BUILT_IN_IFLOOR):
2718 CASE_FLT_FN (BUILT_IN_LFLOOR):
2719 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2720 builtin_optab = lfloor_optab;
2721 fallback_fn = BUILT_IN_FLOOR;
2722 break;
2723
2724 default:
2725 gcc_unreachable ();
2726 }
2727
2728 /* Make a suitable register to place result in. */
2729 mode = TYPE_MODE (TREE_TYPE (exp));
2730
2731 target = gen_reg_rtx (mode);
2732
2733 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2734 need to expand the argument again. This way, we will not perform
2735 side-effects more the once. */
2736 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2737
2738 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2739
2740 start_sequence ();
2741
2742 /* Compute into TARGET. */
2743 if (expand_sfix_optab (target, op0, builtin_optab))
2744 {
2745 /* Output the entire sequence. */
2746 insns = get_insns ();
2747 end_sequence ();
2748 emit_insn (insns);
2749 return target;
2750 }
2751
2752 /* If we were unable to expand via the builtin, stop the sequence
2753 (without outputting the insns). */
2754 end_sequence ();
2755
2756 /* Fall back to floating point rounding optab. */
2757 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2758
2759 /* For non-C99 targets we may end up without a fallback fndecl here
2760 if the user called __builtin_lfloor directly. In this case emit
2761 a call to the floor/ceil variants nevertheless. This should result
2762 in the best user experience for not full C99 targets. */
2763 if (fallback_fndecl == NULL_TREE)
2764 {
2765 tree fntype;
2766 const char *name = NULL;
2767
2768 switch (DECL_FUNCTION_CODE (fndecl))
2769 {
2770 case BUILT_IN_ICEIL:
2771 case BUILT_IN_LCEIL:
2772 case BUILT_IN_LLCEIL:
2773 name = "ceil";
2774 break;
2775 case BUILT_IN_ICEILF:
2776 case BUILT_IN_LCEILF:
2777 case BUILT_IN_LLCEILF:
2778 name = "ceilf";
2779 break;
2780 case BUILT_IN_ICEILL:
2781 case BUILT_IN_LCEILL:
2782 case BUILT_IN_LLCEILL:
2783 name = "ceill";
2784 break;
2785 case BUILT_IN_IFLOOR:
2786 case BUILT_IN_LFLOOR:
2787 case BUILT_IN_LLFLOOR:
2788 name = "floor";
2789 break;
2790 case BUILT_IN_IFLOORF:
2791 case BUILT_IN_LFLOORF:
2792 case BUILT_IN_LLFLOORF:
2793 name = "floorf";
2794 break;
2795 case BUILT_IN_IFLOORL:
2796 case BUILT_IN_LFLOORL:
2797 case BUILT_IN_LLFLOORL:
2798 name = "floorl";
2799 break;
2800 default:
2801 gcc_unreachable ();
2802 }
2803
2804 fntype = build_function_type_list (TREE_TYPE (arg),
2805 TREE_TYPE (arg), NULL_TREE);
2806 fallback_fndecl = build_fn_decl (name, fntype);
2807 }
2808
2809 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2810
2811 tmp = expand_normal (exp);
2812 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2813
2814 /* Truncate the result of floating point optab to integer
2815 via expand_fix (). */
2816 target = gen_reg_rtx (mode);
2817 expand_fix (target, tmp, 0);
2818
2819 return target;
2820 }
2821
2822 /* Expand a call to one of the builtin math functions doing integer
2823 conversion (lrint).
2824 Return 0 if a normal call should be emitted rather than expanding the
2825 function in-line. EXP is the expression that is a call to the builtin
2826 function; if convenient, the result should be placed in TARGET. */
2827
2828 static rtx
2829 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2830 {
2831 convert_optab builtin_optab;
2832 rtx op0;
2833 rtx_insn *insns;
2834 tree fndecl = get_callee_fndecl (exp);
2835 tree arg;
2836 machine_mode mode;
2837 enum built_in_function fallback_fn = BUILT_IN_NONE;
2838
2839 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2840 gcc_unreachable ();
2841
2842 arg = CALL_EXPR_ARG (exp, 0);
2843
2844 switch (DECL_FUNCTION_CODE (fndecl))
2845 {
2846 CASE_FLT_FN (BUILT_IN_IRINT):
2847 fallback_fn = BUILT_IN_LRINT;
2848 /* FALLTHRU */
2849 CASE_FLT_FN (BUILT_IN_LRINT):
2850 CASE_FLT_FN (BUILT_IN_LLRINT):
2851 builtin_optab = lrint_optab;
2852 break;
2853
2854 CASE_FLT_FN (BUILT_IN_IROUND):
2855 fallback_fn = BUILT_IN_LROUND;
2856 /* FALLTHRU */
2857 CASE_FLT_FN (BUILT_IN_LROUND):
2858 CASE_FLT_FN (BUILT_IN_LLROUND):
2859 builtin_optab = lround_optab;
2860 break;
2861
2862 default:
2863 gcc_unreachable ();
2864 }
2865
2866 /* There's no easy way to detect the case we need to set EDOM. */
2867 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2868 return NULL_RTX;
2869
2870 /* Make a suitable register to place result in. */
2871 mode = TYPE_MODE (TREE_TYPE (exp));
2872
2873 /* There's no easy way to detect the case we need to set EDOM. */
2874 if (!flag_errno_math)
2875 {
2876 rtx result = gen_reg_rtx (mode);
2877
2878 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2879 need to expand the argument again. This way, we will not perform
2880 side-effects more the once. */
2881 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2882
2883 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2884
2885 start_sequence ();
2886
2887 if (expand_sfix_optab (result, op0, builtin_optab))
2888 {
2889 /* Output the entire sequence. */
2890 insns = get_insns ();
2891 end_sequence ();
2892 emit_insn (insns);
2893 return result;
2894 }
2895
2896 /* If we were unable to expand via the builtin, stop the sequence
2897 (without outputting the insns) and call to the library function
2898 with the stabilized argument list. */
2899 end_sequence ();
2900 }
2901
2902 if (fallback_fn != BUILT_IN_NONE)
2903 {
2904 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2905 targets, (int) round (x) should never be transformed into
2906 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2907 a call to lround in the hope that the target provides at least some
2908 C99 functions. This should result in the best user experience for
2909 not full C99 targets. */
2910 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2911 fallback_fn, 0);
2912
2913 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2914 fallback_fndecl, 1, arg);
2915
2916 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2917 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2918 return convert_to_mode (mode, target, 0);
2919 }
2920
2921 return expand_call (exp, target, target == const0_rtx);
2922 }
2923
2924 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2925 a normal call should be emitted rather than expanding the function
2926 in-line. EXP is the expression that is a call to the builtin
2927 function; if convenient, the result should be placed in TARGET. */
2928
2929 static rtx
2930 expand_builtin_powi (tree exp, rtx target)
2931 {
2932 tree arg0, arg1;
2933 rtx op0, op1;
2934 machine_mode mode;
2935 machine_mode mode2;
2936
2937 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2938 return NULL_RTX;
2939
2940 arg0 = CALL_EXPR_ARG (exp, 0);
2941 arg1 = CALL_EXPR_ARG (exp, 1);
2942 mode = TYPE_MODE (TREE_TYPE (exp));
2943
2944 /* Emit a libcall to libgcc. */
2945
2946 /* Mode of the 2nd argument must match that of an int. */
2947 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2948
2949 if (target == NULL_RTX)
2950 target = gen_reg_rtx (mode);
2951
2952 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2953 if (GET_MODE (op0) != mode)
2954 op0 = convert_to_mode (mode, op0, 0);
2955 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2956 if (GET_MODE (op1) != mode2)
2957 op1 = convert_to_mode (mode2, op1, 0);
2958
2959 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2960 target, LCT_CONST, mode, 2,
2961 op0, mode, op1, mode2);
2962
2963 return target;
2964 }
2965
2966 /* Expand expression EXP which is a call to the strlen builtin. Return
2967 NULL_RTX if we failed the caller should emit a normal call, otherwise
2968 try to get the result in TARGET, if convenient. */
2969
2970 static rtx
2971 expand_builtin_strlen (tree exp, rtx target,
2972 machine_mode target_mode)
2973 {
2974 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2975 return NULL_RTX;
2976 else
2977 {
2978 struct expand_operand ops[4];
2979 rtx pat;
2980 tree len;
2981 tree src = CALL_EXPR_ARG (exp, 0);
2982 rtx src_reg;
2983 rtx_insn *before_strlen;
2984 machine_mode insn_mode = target_mode;
2985 enum insn_code icode = CODE_FOR_nothing;
2986 unsigned int align;
2987
2988 /* If the length can be computed at compile-time, return it. */
2989 len = c_strlen (src, 0);
2990 if (len)
2991 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2992
2993 /* If the length can be computed at compile-time and is constant
2994 integer, but there are side-effects in src, evaluate
2995 src for side-effects, then return len.
2996 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2997 can be optimized into: i++; x = 3; */
2998 len = c_strlen (src, 1);
2999 if (len && TREE_CODE (len) == INTEGER_CST)
3000 {
3001 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3002 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3003 }
3004
3005 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3006
3007 /* If SRC is not a pointer type, don't do this operation inline. */
3008 if (align == 0)
3009 return NULL_RTX;
3010
3011 /* Bail out if we can't compute strlen in the right mode. */
3012 while (insn_mode != VOIDmode)
3013 {
3014 icode = optab_handler (strlen_optab, insn_mode);
3015 if (icode != CODE_FOR_nothing)
3016 break;
3017
3018 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3019 }
3020 if (insn_mode == VOIDmode)
3021 return NULL_RTX;
3022
3023 /* Make a place to hold the source address. We will not expand
3024 the actual source until we are sure that the expansion will
3025 not fail -- there are trees that cannot be expanded twice. */
3026 src_reg = gen_reg_rtx (Pmode);
3027
3028 /* Mark the beginning of the strlen sequence so we can emit the
3029 source operand later. */
3030 before_strlen = get_last_insn ();
3031
3032 create_output_operand (&ops[0], target, insn_mode);
3033 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3034 create_integer_operand (&ops[2], 0);
3035 create_integer_operand (&ops[3], align);
3036 if (!maybe_expand_insn (icode, 4, ops))
3037 return NULL_RTX;
3038
3039 /* Now that we are assured of success, expand the source. */
3040 start_sequence ();
3041 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3042 if (pat != src_reg)
3043 {
3044 #ifdef POINTERS_EXTEND_UNSIGNED
3045 if (GET_MODE (pat) != Pmode)
3046 pat = convert_to_mode (Pmode, pat,
3047 POINTERS_EXTEND_UNSIGNED);
3048 #endif
3049 emit_move_insn (src_reg, pat);
3050 }
3051 pat = get_insns ();
3052 end_sequence ();
3053
3054 if (before_strlen)
3055 emit_insn_after (pat, before_strlen);
3056 else
3057 emit_insn_before (pat, get_insns ());
3058
3059 /* Return the value in the proper mode for this function. */
3060 if (GET_MODE (ops[0].value) == target_mode)
3061 target = ops[0].value;
3062 else if (target != 0)
3063 convert_move (target, ops[0].value, 0);
3064 else
3065 target = convert_to_mode (target_mode, ops[0].value, 0);
3066
3067 return target;
3068 }
3069 }
3070
3071 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3072 bytes from constant string DATA + OFFSET and return it as target
3073 constant. */
3074
3075 static rtx
3076 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3077 machine_mode mode)
3078 {
3079 const char *str = (const char *) data;
3080
3081 gcc_assert (offset >= 0
3082 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3083 <= strlen (str) + 1));
3084
3085 return c_readstr (str + offset, mode);
3086 }
3087
3088 /* LEN specify length of the block of memcpy/memset operation.
3089 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3090 In some cases we can make very likely guess on max size, then we
3091 set it into PROBABLE_MAX_SIZE. */
3092
3093 static void
3094 determine_block_size (tree len, rtx len_rtx,
3095 unsigned HOST_WIDE_INT *min_size,
3096 unsigned HOST_WIDE_INT *max_size,
3097 unsigned HOST_WIDE_INT *probable_max_size)
3098 {
3099 if (CONST_INT_P (len_rtx))
3100 {
3101 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3102 return;
3103 }
3104 else
3105 {
3106 wide_int min, max;
3107 enum value_range_type range_type = VR_UNDEFINED;
3108
3109 /* Determine bounds from the type. */
3110 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3111 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3112 else
3113 *min_size = 0;
3114 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3115 *probable_max_size = *max_size
3116 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3117 else
3118 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3119
3120 if (TREE_CODE (len) == SSA_NAME)
3121 range_type = get_range_info (len, &min, &max);
3122 if (range_type == VR_RANGE)
3123 {
3124 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3125 *min_size = min.to_uhwi ();
3126 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3127 *probable_max_size = *max_size = max.to_uhwi ();
3128 }
3129 else if (range_type == VR_ANTI_RANGE)
3130 {
3131 /* Anti range 0...N lets us to determine minimal size to N+1. */
3132 if (min == 0)
3133 {
3134 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3135 *min_size = max.to_uhwi () + 1;
3136 }
3137 /* Code like
3138
3139 int n;
3140 if (n < 100)
3141 memcpy (a, b, n)
3142
3143 Produce anti range allowing negative values of N. We still
3144 can use the information and make a guess that N is not negative.
3145 */
3146 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3147 *probable_max_size = min.to_uhwi () - 1;
3148 }
3149 }
3150 gcc_checking_assert (*max_size <=
3151 (unsigned HOST_WIDE_INT)
3152 GET_MODE_MASK (GET_MODE (len_rtx)));
3153 }
3154
3155 /* Helper function to do the actual work for expand_builtin_memcpy. */
3156
3157 static rtx
3158 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3159 {
3160 const char *src_str;
3161 unsigned int src_align = get_pointer_alignment (src);
3162 unsigned int dest_align = get_pointer_alignment (dest);
3163 rtx dest_mem, src_mem, dest_addr, len_rtx;
3164 HOST_WIDE_INT expected_size = -1;
3165 unsigned int expected_align = 0;
3166 unsigned HOST_WIDE_INT min_size;
3167 unsigned HOST_WIDE_INT max_size;
3168 unsigned HOST_WIDE_INT probable_max_size;
3169
3170 /* If DEST is not a pointer type, call the normal function. */
3171 if (dest_align == 0)
3172 return NULL_RTX;
3173
3174 /* If either SRC is not a pointer type, don't do this
3175 operation in-line. */
3176 if (src_align == 0)
3177 return NULL_RTX;
3178
3179 if (currently_expanding_gimple_stmt)
3180 stringop_block_profile (currently_expanding_gimple_stmt,
3181 &expected_align, &expected_size);
3182
3183 if (expected_align < dest_align)
3184 expected_align = dest_align;
3185 dest_mem = get_memory_rtx (dest, len);
3186 set_mem_align (dest_mem, dest_align);
3187 len_rtx = expand_normal (len);
3188 determine_block_size (len, len_rtx, &min_size, &max_size,
3189 &probable_max_size);
3190 src_str = c_getstr (src);
3191
3192 /* If SRC is a string constant and block move would be done
3193 by pieces, we can avoid loading the string from memory
3194 and only stored the computed constants. */
3195 if (src_str
3196 && CONST_INT_P (len_rtx)
3197 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3198 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3199 CONST_CAST (char *, src_str),
3200 dest_align, false))
3201 {
3202 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3203 builtin_memcpy_read_str,
3204 CONST_CAST (char *, src_str),
3205 dest_align, false, 0);
3206 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3207 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3208 return dest_mem;
3209 }
3210
3211 src_mem = get_memory_rtx (src, len);
3212 set_mem_align (src_mem, src_align);
3213
3214 /* Copy word part most expediently. */
3215 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3216 CALL_EXPR_TAILCALL (exp)
3217 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3218 expected_align, expected_size,
3219 min_size, max_size, probable_max_size);
3220
3221 if (dest_addr == 0)
3222 {
3223 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3224 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3225 }
3226
3227 return dest_addr;
3228 }
3229
3230 /* Expand a call EXP to the memcpy builtin.
3231 Return NULL_RTX if we failed, the caller should emit a normal call,
3232 otherwise try to get the result in TARGET, if convenient (and in
3233 mode MODE if that's convenient). */
3234
3235 static rtx
3236 expand_builtin_memcpy (tree exp, rtx target)
3237 {
3238 if (!validate_arglist (exp,
3239 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3240 return NULL_RTX;
3241 else
3242 {
3243 tree dest = CALL_EXPR_ARG (exp, 0);
3244 tree src = CALL_EXPR_ARG (exp, 1);
3245 tree len = CALL_EXPR_ARG (exp, 2);
3246 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3247 }
3248 }
3249
3250 /* Expand an instrumented call EXP to the memcpy builtin.
3251 Return NULL_RTX if we failed, the caller should emit a normal call,
3252 otherwise try to get the result in TARGET, if convenient (and in
3253 mode MODE if that's convenient). */
3254
3255 static rtx
3256 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3257 {
3258 if (!validate_arglist (exp,
3259 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3260 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3261 INTEGER_TYPE, VOID_TYPE))
3262 return NULL_RTX;
3263 else
3264 {
3265 tree dest = CALL_EXPR_ARG (exp, 0);
3266 tree src = CALL_EXPR_ARG (exp, 2);
3267 tree len = CALL_EXPR_ARG (exp, 4);
3268 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3269
3270 /* Return src bounds with the result. */
3271 if (res)
3272 {
3273 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3274 expand_normal (CALL_EXPR_ARG (exp, 1)));
3275 res = chkp_join_splitted_slot (res, bnd);
3276 }
3277 return res;
3278 }
3279 }
3280
3281 /* Expand a call EXP to the mempcpy builtin.
3282 Return NULL_RTX if we failed; the caller should emit a normal call,
3283 otherwise try to get the result in TARGET, if convenient (and in
3284 mode MODE if that's convenient). If ENDP is 0 return the
3285 destination pointer, if ENDP is 1 return the end pointer ala
3286 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3287 stpcpy. */
3288
3289 static rtx
3290 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3291 {
3292 if (!validate_arglist (exp,
3293 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3294 return NULL_RTX;
3295 else
3296 {
3297 tree dest = CALL_EXPR_ARG (exp, 0);
3298 tree src = CALL_EXPR_ARG (exp, 1);
3299 tree len = CALL_EXPR_ARG (exp, 2);
3300 return expand_builtin_mempcpy_args (dest, src, len,
3301 target, mode, /*endp=*/ 1,
3302 exp);
3303 }
3304 }
3305
3306 /* Expand an instrumented call EXP to the mempcpy builtin.
3307 Return NULL_RTX if we failed, the caller should emit a normal call,
3308 otherwise try to get the result in TARGET, if convenient (and in
3309 mode MODE if that's convenient). */
3310
3311 static rtx
3312 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3313 {
3314 if (!validate_arglist (exp,
3315 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3316 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3317 INTEGER_TYPE, VOID_TYPE))
3318 return NULL_RTX;
3319 else
3320 {
3321 tree dest = CALL_EXPR_ARG (exp, 0);
3322 tree src = CALL_EXPR_ARG (exp, 2);
3323 tree len = CALL_EXPR_ARG (exp, 4);
3324 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3325 mode, 1, exp);
3326
3327 /* Return src bounds with the result. */
3328 if (res)
3329 {
3330 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3331 expand_normal (CALL_EXPR_ARG (exp, 1)));
3332 res = chkp_join_splitted_slot (res, bnd);
3333 }
3334 return res;
3335 }
3336 }
3337
3338 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3339 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3340 so that this can also be called without constructing an actual CALL_EXPR.
3341 The other arguments and return value are the same as for
3342 expand_builtin_mempcpy. */
3343
3344 static rtx
3345 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3346 rtx target, machine_mode mode, int endp,
3347 tree orig_exp)
3348 {
3349 tree fndecl = get_callee_fndecl (orig_exp);
3350
3351 /* If return value is ignored, transform mempcpy into memcpy. */
3352 if (target == const0_rtx
3353 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3354 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3355 {
3356 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3357 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3358 dest, src, len);
3359 return expand_expr (result, target, mode, EXPAND_NORMAL);
3360 }
3361 else if (target == const0_rtx
3362 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3363 {
3364 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3365 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3366 dest, src, len);
3367 return expand_expr (result, target, mode, EXPAND_NORMAL);
3368 }
3369 else
3370 {
3371 const char *src_str;
3372 unsigned int src_align = get_pointer_alignment (src);
3373 unsigned int dest_align = get_pointer_alignment (dest);
3374 rtx dest_mem, src_mem, len_rtx;
3375
3376 /* If either SRC or DEST is not a pointer type, don't do this
3377 operation in-line. */
3378 if (dest_align == 0 || src_align == 0)
3379 return NULL_RTX;
3380
3381 /* If LEN is not constant, call the normal function. */
3382 if (! tree_fits_uhwi_p (len))
3383 return NULL_RTX;
3384
3385 len_rtx = expand_normal (len);
3386 src_str = c_getstr (src);
3387
3388 /* If SRC is a string constant and block move would be done
3389 by pieces, we can avoid loading the string from memory
3390 and only stored the computed constants. */
3391 if (src_str
3392 && CONST_INT_P (len_rtx)
3393 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3394 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3395 CONST_CAST (char *, src_str),
3396 dest_align, false))
3397 {
3398 dest_mem = get_memory_rtx (dest, len);
3399 set_mem_align (dest_mem, dest_align);
3400 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3401 builtin_memcpy_read_str,
3402 CONST_CAST (char *, src_str),
3403 dest_align, false, endp);
3404 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3405 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3406 return dest_mem;
3407 }
3408
3409 if (CONST_INT_P (len_rtx)
3410 && can_move_by_pieces (INTVAL (len_rtx),
3411 MIN (dest_align, src_align)))
3412 {
3413 dest_mem = get_memory_rtx (dest, len);
3414 set_mem_align (dest_mem, dest_align);
3415 src_mem = get_memory_rtx (src, len);
3416 set_mem_align (src_mem, src_align);
3417 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3418 MIN (dest_align, src_align), endp);
3419 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3420 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3421 return dest_mem;
3422 }
3423
3424 return NULL_RTX;
3425 }
3426 }
3427
3428 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3429 we failed, the caller should emit a normal call, otherwise try to
3430 get the result in TARGET, if convenient. If ENDP is 0 return the
3431 destination pointer, if ENDP is 1 return the end pointer ala
3432 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3433 stpcpy. */
3434
3435 static rtx
3436 expand_movstr (tree dest, tree src, rtx target, int endp)
3437 {
3438 struct expand_operand ops[3];
3439 rtx dest_mem;
3440 rtx src_mem;
3441
3442 if (!targetm.have_movstr ())
3443 return NULL_RTX;
3444
3445 dest_mem = get_memory_rtx (dest, NULL);
3446 src_mem = get_memory_rtx (src, NULL);
3447 if (!endp)
3448 {
3449 target = force_reg (Pmode, XEXP (dest_mem, 0));
3450 dest_mem = replace_equiv_address (dest_mem, target);
3451 }
3452
3453 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3454 create_fixed_operand (&ops[1], dest_mem);
3455 create_fixed_operand (&ops[2], src_mem);
3456 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3457 return NULL_RTX;
3458
3459 if (endp && target != const0_rtx)
3460 {
3461 target = ops[0].value;
3462 /* movstr is supposed to set end to the address of the NUL
3463 terminator. If the caller requested a mempcpy-like return value,
3464 adjust it. */
3465 if (endp == 1)
3466 {
3467 rtx tem = plus_constant (GET_MODE (target),
3468 gen_lowpart (GET_MODE (target), target), 1);
3469 emit_move_insn (target, force_operand (tem, NULL_RTX));
3470 }
3471 }
3472 return target;
3473 }
3474
3475 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3476 NULL_RTX if we failed the caller should emit a normal call, otherwise
3477 try to get the result in TARGET, if convenient (and in mode MODE if that's
3478 convenient). */
3479
3480 static rtx
3481 expand_builtin_strcpy (tree exp, rtx target)
3482 {
3483 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3484 {
3485 tree dest = CALL_EXPR_ARG (exp, 0);
3486 tree src = CALL_EXPR_ARG (exp, 1);
3487 return expand_builtin_strcpy_args (dest, src, target);
3488 }
3489 return NULL_RTX;
3490 }
3491
3492 /* Helper function to do the actual work for expand_builtin_strcpy. The
3493 arguments to the builtin_strcpy call DEST and SRC are broken out
3494 so that this can also be called without constructing an actual CALL_EXPR.
3495 The other arguments and return value are the same as for
3496 expand_builtin_strcpy. */
3497
3498 static rtx
3499 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3500 {
3501 return expand_movstr (dest, src, target, /*endp=*/0);
3502 }
3503
3504 /* Expand a call EXP to the stpcpy builtin.
3505 Return NULL_RTX if we failed the caller should emit a normal call,
3506 otherwise try to get the result in TARGET, if convenient (and in
3507 mode MODE if that's convenient). */
3508
3509 static rtx
3510 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3511 {
3512 tree dst, src;
3513 location_t loc = EXPR_LOCATION (exp);
3514
3515 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3516 return NULL_RTX;
3517
3518 dst = CALL_EXPR_ARG (exp, 0);
3519 src = CALL_EXPR_ARG (exp, 1);
3520
3521 /* If return value is ignored, transform stpcpy into strcpy. */
3522 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3523 {
3524 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3525 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3526 return expand_expr (result, target, mode, EXPAND_NORMAL);
3527 }
3528 else
3529 {
3530 tree len, lenp1;
3531 rtx ret;
3532
3533 /* Ensure we get an actual string whose length can be evaluated at
3534 compile-time, not an expression containing a string. This is
3535 because the latter will potentially produce pessimized code
3536 when used to produce the return value. */
3537 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3538 return expand_movstr (dst, src, target, /*endp=*/2);
3539
3540 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3541 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3542 target, mode, /*endp=*/2,
3543 exp);
3544
3545 if (ret)
3546 return ret;
3547
3548 if (TREE_CODE (len) == INTEGER_CST)
3549 {
3550 rtx len_rtx = expand_normal (len);
3551
3552 if (CONST_INT_P (len_rtx))
3553 {
3554 ret = expand_builtin_strcpy_args (dst, src, target);
3555
3556 if (ret)
3557 {
3558 if (! target)
3559 {
3560 if (mode != VOIDmode)
3561 target = gen_reg_rtx (mode);
3562 else
3563 target = gen_reg_rtx (GET_MODE (ret));
3564 }
3565 if (GET_MODE (target) != GET_MODE (ret))
3566 ret = gen_lowpart (GET_MODE (target), ret);
3567
3568 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3569 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3570 gcc_assert (ret);
3571
3572 return target;
3573 }
3574 }
3575 }
3576
3577 return expand_movstr (dst, src, target, /*endp=*/2);
3578 }
3579 }
3580
3581 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3582 bytes from constant string DATA + OFFSET and return it as target
3583 constant. */
3584
3585 rtx
3586 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3587 machine_mode mode)
3588 {
3589 const char *str = (const char *) data;
3590
3591 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3592 return const0_rtx;
3593
3594 return c_readstr (str + offset, mode);
3595 }
3596
3597 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3598 NULL_RTX if we failed the caller should emit a normal call. */
3599
3600 static rtx
3601 expand_builtin_strncpy (tree exp, rtx target)
3602 {
3603 location_t loc = EXPR_LOCATION (exp);
3604
3605 if (validate_arglist (exp,
3606 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3607 {
3608 tree dest = CALL_EXPR_ARG (exp, 0);
3609 tree src = CALL_EXPR_ARG (exp, 1);
3610 tree len = CALL_EXPR_ARG (exp, 2);
3611 tree slen = c_strlen (src, 1);
3612
3613 /* We must be passed a constant len and src parameter. */
3614 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3615 return NULL_RTX;
3616
3617 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3618
3619 /* We're required to pad with trailing zeros if the requested
3620 len is greater than strlen(s2)+1. In that case try to
3621 use store_by_pieces, if it fails, punt. */
3622 if (tree_int_cst_lt (slen, len))
3623 {
3624 unsigned int dest_align = get_pointer_alignment (dest);
3625 const char *p = c_getstr (src);
3626 rtx dest_mem;
3627
3628 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3629 || !can_store_by_pieces (tree_to_uhwi (len),
3630 builtin_strncpy_read_str,
3631 CONST_CAST (char *, p),
3632 dest_align, false))
3633 return NULL_RTX;
3634
3635 dest_mem = get_memory_rtx (dest, len);
3636 store_by_pieces (dest_mem, tree_to_uhwi (len),
3637 builtin_strncpy_read_str,
3638 CONST_CAST (char *, p), dest_align, false, 0);
3639 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3640 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3641 return dest_mem;
3642 }
3643 }
3644 return NULL_RTX;
3645 }
3646
3647 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3648 bytes from constant string DATA + OFFSET and return it as target
3649 constant. */
3650
3651 rtx
3652 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3653 machine_mode mode)
3654 {
3655 const char *c = (const char *) data;
3656 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3657
3658 memset (p, *c, GET_MODE_SIZE (mode));
3659
3660 return c_readstr (p, mode);
3661 }
3662
3663 /* Callback routine for store_by_pieces. Return the RTL of a register
3664 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3665 char value given in the RTL register data. For example, if mode is
3666 4 bytes wide, return the RTL for 0x01010101*data. */
3667
3668 static rtx
3669 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3670 machine_mode mode)
3671 {
3672 rtx target, coeff;
3673 size_t size;
3674 char *p;
3675
3676 size = GET_MODE_SIZE (mode);
3677 if (size == 1)
3678 return (rtx) data;
3679
3680 p = XALLOCAVEC (char, size);
3681 memset (p, 1, size);
3682 coeff = c_readstr (p, mode);
3683
3684 target = convert_to_mode (mode, (rtx) data, 1);
3685 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3686 return force_reg (mode, target);
3687 }
3688
3689 /* Expand expression EXP, which is a call to the memset builtin. Return
3690 NULL_RTX if we failed the caller should emit a normal call, otherwise
3691 try to get the result in TARGET, if convenient (and in mode MODE if that's
3692 convenient). */
3693
3694 static rtx
3695 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3696 {
3697 if (!validate_arglist (exp,
3698 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3699 return NULL_RTX;
3700 else
3701 {
3702 tree dest = CALL_EXPR_ARG (exp, 0);
3703 tree val = CALL_EXPR_ARG (exp, 1);
3704 tree len = CALL_EXPR_ARG (exp, 2);
3705 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3706 }
3707 }
3708
3709 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3710 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3711 try to get the result in TARGET, if convenient (and in mode MODE if that's
3712 convenient). */
3713
3714 static rtx
3715 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3716 {
3717 if (!validate_arglist (exp,
3718 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3719 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3720 return NULL_RTX;
3721 else
3722 {
3723 tree dest = CALL_EXPR_ARG (exp, 0);
3724 tree val = CALL_EXPR_ARG (exp, 2);
3725 tree len = CALL_EXPR_ARG (exp, 3);
3726 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3727
3728 /* Return src bounds with the result. */
3729 if (res)
3730 {
3731 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3732 expand_normal (CALL_EXPR_ARG (exp, 1)));
3733 res = chkp_join_splitted_slot (res, bnd);
3734 }
3735 return res;
3736 }
3737 }
3738
3739 /* Helper function to do the actual work for expand_builtin_memset. The
3740 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3741 so that this can also be called without constructing an actual CALL_EXPR.
3742 The other arguments and return value are the same as for
3743 expand_builtin_memset. */
3744
3745 static rtx
3746 expand_builtin_memset_args (tree dest, tree val, tree len,
3747 rtx target, machine_mode mode, tree orig_exp)
3748 {
3749 tree fndecl, fn;
3750 enum built_in_function fcode;
3751 machine_mode val_mode;
3752 char c;
3753 unsigned int dest_align;
3754 rtx dest_mem, dest_addr, len_rtx;
3755 HOST_WIDE_INT expected_size = -1;
3756 unsigned int expected_align = 0;
3757 unsigned HOST_WIDE_INT min_size;
3758 unsigned HOST_WIDE_INT max_size;
3759 unsigned HOST_WIDE_INT probable_max_size;
3760
3761 dest_align = get_pointer_alignment (dest);
3762
3763 /* If DEST is not a pointer type, don't do this operation in-line. */
3764 if (dest_align == 0)
3765 return NULL_RTX;
3766
3767 if (currently_expanding_gimple_stmt)
3768 stringop_block_profile (currently_expanding_gimple_stmt,
3769 &expected_align, &expected_size);
3770
3771 if (expected_align < dest_align)
3772 expected_align = dest_align;
3773
3774 /* If the LEN parameter is zero, return DEST. */
3775 if (integer_zerop (len))
3776 {
3777 /* Evaluate and ignore VAL in case it has side-effects. */
3778 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3779 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3780 }
3781
3782 /* Stabilize the arguments in case we fail. */
3783 dest = builtin_save_expr (dest);
3784 val = builtin_save_expr (val);
3785 len = builtin_save_expr (len);
3786
3787 len_rtx = expand_normal (len);
3788 determine_block_size (len, len_rtx, &min_size, &max_size,
3789 &probable_max_size);
3790 dest_mem = get_memory_rtx (dest, len);
3791 val_mode = TYPE_MODE (unsigned_char_type_node);
3792
3793 if (TREE_CODE (val) != INTEGER_CST)
3794 {
3795 rtx val_rtx;
3796
3797 val_rtx = expand_normal (val);
3798 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3799
3800 /* Assume that we can memset by pieces if we can store
3801 * the coefficients by pieces (in the required modes).
3802 * We can't pass builtin_memset_gen_str as that emits RTL. */
3803 c = 1;
3804 if (tree_fits_uhwi_p (len)
3805 && can_store_by_pieces (tree_to_uhwi (len),
3806 builtin_memset_read_str, &c, dest_align,
3807 true))
3808 {
3809 val_rtx = force_reg (val_mode, val_rtx);
3810 store_by_pieces (dest_mem, tree_to_uhwi (len),
3811 builtin_memset_gen_str, val_rtx, dest_align,
3812 true, 0);
3813 }
3814 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3815 dest_align, expected_align,
3816 expected_size, min_size, max_size,
3817 probable_max_size))
3818 goto do_libcall;
3819
3820 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3821 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3822 return dest_mem;
3823 }
3824
3825 if (target_char_cast (val, &c))
3826 goto do_libcall;
3827
3828 if (c)
3829 {
3830 if (tree_fits_uhwi_p (len)
3831 && can_store_by_pieces (tree_to_uhwi (len),
3832 builtin_memset_read_str, &c, dest_align,
3833 true))
3834 store_by_pieces (dest_mem, tree_to_uhwi (len),
3835 builtin_memset_read_str, &c, dest_align, true, 0);
3836 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3837 gen_int_mode (c, val_mode),
3838 dest_align, expected_align,
3839 expected_size, min_size, max_size,
3840 probable_max_size))
3841 goto do_libcall;
3842
3843 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3844 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3845 return dest_mem;
3846 }
3847
3848 set_mem_align (dest_mem, dest_align);
3849 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3850 CALL_EXPR_TAILCALL (orig_exp)
3851 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3852 expected_align, expected_size,
3853 min_size, max_size,
3854 probable_max_size);
3855
3856 if (dest_addr == 0)
3857 {
3858 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3859 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3860 }
3861
3862 return dest_addr;
3863
3864 do_libcall:
3865 fndecl = get_callee_fndecl (orig_exp);
3866 fcode = DECL_FUNCTION_CODE (fndecl);
3867 if (fcode == BUILT_IN_MEMSET
3868 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3869 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3870 dest, val, len);
3871 else if (fcode == BUILT_IN_BZERO)
3872 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3873 dest, len);
3874 else
3875 gcc_unreachable ();
3876 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3877 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3878 return expand_call (fn, target, target == const0_rtx);
3879 }
3880
3881 /* Expand expression EXP, which is a call to the bzero builtin. Return
3882 NULL_RTX if we failed the caller should emit a normal call. */
3883
3884 static rtx
3885 expand_builtin_bzero (tree exp)
3886 {
3887 tree dest, size;
3888 location_t loc = EXPR_LOCATION (exp);
3889
3890 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3891 return NULL_RTX;
3892
3893 dest = CALL_EXPR_ARG (exp, 0);
3894 size = CALL_EXPR_ARG (exp, 1);
3895
3896 /* New argument list transforming bzero(ptr x, int y) to
3897 memset(ptr x, int 0, size_t y). This is done this way
3898 so that if it isn't expanded inline, we fallback to
3899 calling bzero instead of memset. */
3900
3901 return expand_builtin_memset_args (dest, integer_zero_node,
3902 fold_convert_loc (loc,
3903 size_type_node, size),
3904 const0_rtx, VOIDmode, exp);
3905 }
3906
3907 /* Try to expand cmpstr operation ICODE with the given operands.
3908 Return the result rtx on success, otherwise return null. */
3909
3910 static rtx
3911 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3912 HOST_WIDE_INT align)
3913 {
3914 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3915
3916 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3917 target = NULL_RTX;
3918
3919 struct expand_operand ops[4];
3920 create_output_operand (&ops[0], target, insn_mode);
3921 create_fixed_operand (&ops[1], arg1_rtx);
3922 create_fixed_operand (&ops[2], arg2_rtx);
3923 create_integer_operand (&ops[3], align);
3924 if (maybe_expand_insn (icode, 4, ops))
3925 return ops[0].value;
3926 return NULL_RTX;
3927 }
3928
3929 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3930 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3931 otherwise return null. */
3932
3933 static rtx
3934 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
3935 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
3936 HOST_WIDE_INT align)
3937 {
3938 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3939
3940 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3941 target = NULL_RTX;
3942
3943 struct expand_operand ops[5];
3944 create_output_operand (&ops[0], target, insn_mode);
3945 create_fixed_operand (&ops[1], arg1_rtx);
3946 create_fixed_operand (&ops[2], arg2_rtx);
3947 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
3948 TYPE_UNSIGNED (arg3_type));
3949 create_integer_operand (&ops[4], align);
3950 if (maybe_expand_insn (icode, 5, ops))
3951 return ops[0].value;
3952 return NULL_RTX;
3953 }
3954
3955 /* Expand expression EXP, which is a call to the memcmp built-in function.
3956 Return NULL_RTX if we failed and the caller should emit a normal call,
3957 otherwise try to get the result in TARGET, if convenient. */
3958
3959 static rtx
3960 expand_builtin_memcmp (tree exp, rtx target)
3961 {
3962 if (!validate_arglist (exp,
3963 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3964 return NULL_RTX;
3965
3966 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3967 implementing memcmp because it will stop if it encounters two
3968 zero bytes. */
3969 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
3970 if (icode == CODE_FOR_nothing)
3971 return NULL_RTX;
3972
3973 tree arg1 = CALL_EXPR_ARG (exp, 0);
3974 tree arg2 = CALL_EXPR_ARG (exp, 1);
3975 tree len = CALL_EXPR_ARG (exp, 2);
3976
3977 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3978 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3979
3980 /* If we don't have POINTER_TYPE, call the function. */
3981 if (arg1_align == 0 || arg2_align == 0)
3982 return NULL_RTX;
3983
3984 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3985 location_t loc = EXPR_LOCATION (exp);
3986 rtx arg1_rtx = get_memory_rtx (arg1, len);
3987 rtx arg2_rtx = get_memory_rtx (arg2, len);
3988 rtx arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3989
3990 /* Set MEM_SIZE as appropriate. */
3991 if (CONST_INT_P (arg3_rtx))
3992 {
3993 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3994 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3995 }
3996
3997 rtx result = expand_cmpstrn_or_cmpmem (icode, target, arg1_rtx, arg2_rtx,
3998 TREE_TYPE (len), arg3_rtx,
3999 MIN (arg1_align, arg2_align));
4000 if (result)
4001 {
4002 /* Return the value in the proper mode for this function. */
4003 if (GET_MODE (result) == mode)
4004 return result;
4005
4006 if (target != 0)
4007 {
4008 convert_move (target, result, 0);
4009 return target;
4010 }
4011
4012 return convert_to_mode (mode, result, 0);
4013 }
4014
4015 result = target;
4016 if (! (result != 0
4017 && REG_P (result) && GET_MODE (result) == mode
4018 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4019 result = gen_reg_rtx (mode);
4020
4021 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4022 TYPE_MODE (integer_type_node), 3,
4023 XEXP (arg1_rtx, 0), Pmode,
4024 XEXP (arg2_rtx, 0), Pmode,
4025 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4026 TYPE_UNSIGNED (sizetype)),
4027 TYPE_MODE (sizetype));
4028 return result;
4029 }
4030
4031 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4032 if we failed the caller should emit a normal call, otherwise try to get
4033 the result in TARGET, if convenient. */
4034
4035 static rtx
4036 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4037 {
4038 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4039 return NULL_RTX;
4040
4041 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4042 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4043 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4044 {
4045 rtx arg1_rtx, arg2_rtx;
4046 tree fndecl, fn;
4047 tree arg1 = CALL_EXPR_ARG (exp, 0);
4048 tree arg2 = CALL_EXPR_ARG (exp, 1);
4049 rtx result = NULL_RTX;
4050
4051 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4052 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4053
4054 /* If we don't have POINTER_TYPE, call the function. */
4055 if (arg1_align == 0 || arg2_align == 0)
4056 return NULL_RTX;
4057
4058 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4059 arg1 = builtin_save_expr (arg1);
4060 arg2 = builtin_save_expr (arg2);
4061
4062 arg1_rtx = get_memory_rtx (arg1, NULL);
4063 arg2_rtx = get_memory_rtx (arg2, NULL);
4064
4065 /* Try to call cmpstrsi. */
4066 if (cmpstr_icode != CODE_FOR_nothing)
4067 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4068 MIN (arg1_align, arg2_align));
4069
4070 /* Try to determine at least one length and call cmpstrnsi. */
4071 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4072 {
4073 tree len;
4074 rtx arg3_rtx;
4075
4076 tree len1 = c_strlen (arg1, 1);
4077 tree len2 = c_strlen (arg2, 1);
4078
4079 if (len1)
4080 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4081 if (len2)
4082 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4083
4084 /* If we don't have a constant length for the first, use the length
4085 of the second, if we know it. We don't require a constant for
4086 this case; some cost analysis could be done if both are available
4087 but neither is constant. For now, assume they're equally cheap,
4088 unless one has side effects. If both strings have constant lengths,
4089 use the smaller. */
4090
4091 if (!len1)
4092 len = len2;
4093 else if (!len2)
4094 len = len1;
4095 else if (TREE_SIDE_EFFECTS (len1))
4096 len = len2;
4097 else if (TREE_SIDE_EFFECTS (len2))
4098 len = len1;
4099 else if (TREE_CODE (len1) != INTEGER_CST)
4100 len = len2;
4101 else if (TREE_CODE (len2) != INTEGER_CST)
4102 len = len1;
4103 else if (tree_int_cst_lt (len1, len2))
4104 len = len1;
4105 else
4106 len = len2;
4107
4108 /* If both arguments have side effects, we cannot optimize. */
4109 if (len && !TREE_SIDE_EFFECTS (len))
4110 {
4111 arg3_rtx = expand_normal (len);
4112 result = expand_cmpstrn_or_cmpmem
4113 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4114 arg3_rtx, MIN (arg1_align, arg2_align));
4115 }
4116 }
4117
4118 if (result)
4119 {
4120 /* Return the value in the proper mode for this function. */
4121 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4122 if (GET_MODE (result) == mode)
4123 return result;
4124 if (target == 0)
4125 return convert_to_mode (mode, result, 0);
4126 convert_move (target, result, 0);
4127 return target;
4128 }
4129
4130 /* Expand the library call ourselves using a stabilized argument
4131 list to avoid re-evaluating the function's arguments twice. */
4132 fndecl = get_callee_fndecl (exp);
4133 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4134 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4135 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4136 return expand_call (fn, target, target == const0_rtx);
4137 }
4138 return NULL_RTX;
4139 }
4140
4141 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4142 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4143 the result in TARGET, if convenient. */
4144
4145 static rtx
4146 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4147 ATTRIBUTE_UNUSED machine_mode mode)
4148 {
4149 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4150
4151 if (!validate_arglist (exp,
4152 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4153 return NULL_RTX;
4154
4155 /* If c_strlen can determine an expression for one of the string
4156 lengths, and it doesn't have side effects, then emit cmpstrnsi
4157 using length MIN(strlen(string)+1, arg3). */
4158 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4159 if (cmpstrn_icode != CODE_FOR_nothing)
4160 {
4161 tree len, len1, len2;
4162 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4163 rtx result;
4164 tree fndecl, fn;
4165 tree arg1 = CALL_EXPR_ARG (exp, 0);
4166 tree arg2 = CALL_EXPR_ARG (exp, 1);
4167 tree arg3 = CALL_EXPR_ARG (exp, 2);
4168
4169 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4170 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4171
4172 len1 = c_strlen (arg1, 1);
4173 len2 = c_strlen (arg2, 1);
4174
4175 if (len1)
4176 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4177 if (len2)
4178 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4179
4180 /* If we don't have a constant length for the first, use the length
4181 of the second, if we know it. We don't require a constant for
4182 this case; some cost analysis could be done if both are available
4183 but neither is constant. For now, assume they're equally cheap,
4184 unless one has side effects. If both strings have constant lengths,
4185 use the smaller. */
4186
4187 if (!len1)
4188 len = len2;
4189 else if (!len2)
4190 len = len1;
4191 else if (TREE_SIDE_EFFECTS (len1))
4192 len = len2;
4193 else if (TREE_SIDE_EFFECTS (len2))
4194 len = len1;
4195 else if (TREE_CODE (len1) != INTEGER_CST)
4196 len = len2;
4197 else if (TREE_CODE (len2) != INTEGER_CST)
4198 len = len1;
4199 else if (tree_int_cst_lt (len1, len2))
4200 len = len1;
4201 else
4202 len = len2;
4203
4204 /* If both arguments have side effects, we cannot optimize. */
4205 if (!len || TREE_SIDE_EFFECTS (len))
4206 return NULL_RTX;
4207
4208 /* The actual new length parameter is MIN(len,arg3). */
4209 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4210 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4211
4212 /* If we don't have POINTER_TYPE, call the function. */
4213 if (arg1_align == 0 || arg2_align == 0)
4214 return NULL_RTX;
4215
4216 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4217 arg1 = builtin_save_expr (arg1);
4218 arg2 = builtin_save_expr (arg2);
4219 len = builtin_save_expr (len);
4220
4221 arg1_rtx = get_memory_rtx (arg1, len);
4222 arg2_rtx = get_memory_rtx (arg2, len);
4223 arg3_rtx = expand_normal (len);
4224 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4225 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4226 MIN (arg1_align, arg2_align));
4227 if (result)
4228 {
4229 /* Return the value in the proper mode for this function. */
4230 mode = TYPE_MODE (TREE_TYPE (exp));
4231 if (GET_MODE (result) == mode)
4232 return result;
4233 if (target == 0)
4234 return convert_to_mode (mode, result, 0);
4235 convert_move (target, result, 0);
4236 return target;
4237 }
4238
4239 /* Expand the library call ourselves using a stabilized argument
4240 list to avoid re-evaluating the function's arguments twice. */
4241 fndecl = get_callee_fndecl (exp);
4242 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4243 arg1, arg2, len);
4244 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4245 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4246 return expand_call (fn, target, target == const0_rtx);
4247 }
4248 return NULL_RTX;
4249 }
4250
4251 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4252 if that's convenient. */
4253
4254 rtx
4255 expand_builtin_saveregs (void)
4256 {
4257 rtx val;
4258 rtx_insn *seq;
4259
4260 /* Don't do __builtin_saveregs more than once in a function.
4261 Save the result of the first call and reuse it. */
4262 if (saveregs_value != 0)
4263 return saveregs_value;
4264
4265 /* When this function is called, it means that registers must be
4266 saved on entry to this function. So we migrate the call to the
4267 first insn of this function. */
4268
4269 start_sequence ();
4270
4271 /* Do whatever the machine needs done in this case. */
4272 val = targetm.calls.expand_builtin_saveregs ();
4273
4274 seq = get_insns ();
4275 end_sequence ();
4276
4277 saveregs_value = val;
4278
4279 /* Put the insns after the NOTE that starts the function. If this
4280 is inside a start_sequence, make the outer-level insn chain current, so
4281 the code is placed at the start of the function. */
4282 push_topmost_sequence ();
4283 emit_insn_after (seq, entry_of_function ());
4284 pop_topmost_sequence ();
4285
4286 return val;
4287 }
4288
4289 /* Expand a call to __builtin_next_arg. */
4290
4291 static rtx
4292 expand_builtin_next_arg (void)
4293 {
4294 /* Checking arguments is already done in fold_builtin_next_arg
4295 that must be called before this function. */
4296 return expand_binop (ptr_mode, add_optab,
4297 crtl->args.internal_arg_pointer,
4298 crtl->args.arg_offset_rtx,
4299 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4300 }
4301
4302 /* Make it easier for the backends by protecting the valist argument
4303 from multiple evaluations. */
4304
4305 static tree
4306 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4307 {
4308 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4309
4310 /* The current way of determining the type of valist is completely
4311 bogus. We should have the information on the va builtin instead. */
4312 if (!vatype)
4313 vatype = targetm.fn_abi_va_list (cfun->decl);
4314
4315 if (TREE_CODE (vatype) == ARRAY_TYPE)
4316 {
4317 if (TREE_SIDE_EFFECTS (valist))
4318 valist = save_expr (valist);
4319
4320 /* For this case, the backends will be expecting a pointer to
4321 vatype, but it's possible we've actually been given an array
4322 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4323 So fix it. */
4324 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4325 {
4326 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4327 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4328 }
4329 }
4330 else
4331 {
4332 tree pt = build_pointer_type (vatype);
4333
4334 if (! needs_lvalue)
4335 {
4336 if (! TREE_SIDE_EFFECTS (valist))
4337 return valist;
4338
4339 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4340 TREE_SIDE_EFFECTS (valist) = 1;
4341 }
4342
4343 if (TREE_SIDE_EFFECTS (valist))
4344 valist = save_expr (valist);
4345 valist = fold_build2_loc (loc, MEM_REF,
4346 vatype, valist, build_int_cst (pt, 0));
4347 }
4348
4349 return valist;
4350 }
4351
4352 /* The "standard" definition of va_list is void*. */
4353
4354 tree
4355 std_build_builtin_va_list (void)
4356 {
4357 return ptr_type_node;
4358 }
4359
4360 /* The "standard" abi va_list is va_list_type_node. */
4361
4362 tree
4363 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4364 {
4365 return va_list_type_node;
4366 }
4367
4368 /* The "standard" type of va_list is va_list_type_node. */
4369
4370 tree
4371 std_canonical_va_list_type (tree type)
4372 {
4373 tree wtype, htype;
4374
4375 if (INDIRECT_REF_P (type))
4376 type = TREE_TYPE (type);
4377 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4378 type = TREE_TYPE (type);
4379 wtype = va_list_type_node;
4380 htype = type;
4381 /* Treat structure va_list types. */
4382 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4383 htype = TREE_TYPE (htype);
4384 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4385 {
4386 /* If va_list is an array type, the argument may have decayed
4387 to a pointer type, e.g. by being passed to another function.
4388 In that case, unwrap both types so that we can compare the
4389 underlying records. */
4390 if (TREE_CODE (htype) == ARRAY_TYPE
4391 || POINTER_TYPE_P (htype))
4392 {
4393 wtype = TREE_TYPE (wtype);
4394 htype = TREE_TYPE (htype);
4395 }
4396 }
4397 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4398 return va_list_type_node;
4399
4400 return NULL_TREE;
4401 }
4402
4403 /* The "standard" implementation of va_start: just assign `nextarg' to
4404 the variable. */
4405
4406 void
4407 std_expand_builtin_va_start (tree valist, rtx nextarg)
4408 {
4409 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4410 convert_move (va_r, nextarg, 0);
4411
4412 /* We do not have any valid bounds for the pointer, so
4413 just store zero bounds for it. */
4414 if (chkp_function_instrumented_p (current_function_decl))
4415 chkp_expand_bounds_reset_for_mem (valist,
4416 make_tree (TREE_TYPE (valist),
4417 nextarg));
4418 }
4419
4420 /* Expand EXP, a call to __builtin_va_start. */
4421
4422 static rtx
4423 expand_builtin_va_start (tree exp)
4424 {
4425 rtx nextarg;
4426 tree valist;
4427 location_t loc = EXPR_LOCATION (exp);
4428
4429 if (call_expr_nargs (exp) < 2)
4430 {
4431 error_at (loc, "too few arguments to function %<va_start%>");
4432 return const0_rtx;
4433 }
4434
4435 if (fold_builtin_next_arg (exp, true))
4436 return const0_rtx;
4437
4438 nextarg = expand_builtin_next_arg ();
4439 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4440
4441 if (targetm.expand_builtin_va_start)
4442 targetm.expand_builtin_va_start (valist, nextarg);
4443 else
4444 std_expand_builtin_va_start (valist, nextarg);
4445
4446 return const0_rtx;
4447 }
4448
4449 /* Expand EXP, a call to __builtin_va_end. */
4450
4451 static rtx
4452 expand_builtin_va_end (tree exp)
4453 {
4454 tree valist = CALL_EXPR_ARG (exp, 0);
4455
4456 /* Evaluate for side effects, if needed. I hate macros that don't
4457 do that. */
4458 if (TREE_SIDE_EFFECTS (valist))
4459 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4460
4461 return const0_rtx;
4462 }
4463
4464 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4465 builtin rather than just as an assignment in stdarg.h because of the
4466 nastiness of array-type va_list types. */
4467
4468 static rtx
4469 expand_builtin_va_copy (tree exp)
4470 {
4471 tree dst, src, t;
4472 location_t loc = EXPR_LOCATION (exp);
4473
4474 dst = CALL_EXPR_ARG (exp, 0);
4475 src = CALL_EXPR_ARG (exp, 1);
4476
4477 dst = stabilize_va_list_loc (loc, dst, 1);
4478 src = stabilize_va_list_loc (loc, src, 0);
4479
4480 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4481
4482 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4483 {
4484 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4485 TREE_SIDE_EFFECTS (t) = 1;
4486 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4487 }
4488 else
4489 {
4490 rtx dstb, srcb, size;
4491
4492 /* Evaluate to pointers. */
4493 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4494 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4495 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4496 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4497
4498 dstb = convert_memory_address (Pmode, dstb);
4499 srcb = convert_memory_address (Pmode, srcb);
4500
4501 /* "Dereference" to BLKmode memories. */
4502 dstb = gen_rtx_MEM (BLKmode, dstb);
4503 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4504 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4505 srcb = gen_rtx_MEM (BLKmode, srcb);
4506 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4507 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4508
4509 /* Copy. */
4510 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4511 }
4512
4513 return const0_rtx;
4514 }
4515
4516 /* Expand a call to one of the builtin functions __builtin_frame_address or
4517 __builtin_return_address. */
4518
4519 static rtx
4520 expand_builtin_frame_address (tree fndecl, tree exp)
4521 {
4522 /* The argument must be a nonnegative integer constant.
4523 It counts the number of frames to scan up the stack.
4524 The value is either the frame pointer value or the return
4525 address saved in that frame. */
4526 if (call_expr_nargs (exp) == 0)
4527 /* Warning about missing arg was already issued. */
4528 return const0_rtx;
4529 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4530 {
4531 error ("invalid argument to %qD", fndecl);
4532 return const0_rtx;
4533 }
4534 else
4535 {
4536 /* Number of frames to scan up the stack. */
4537 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4538
4539 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4540
4541 /* Some ports cannot access arbitrary stack frames. */
4542 if (tem == NULL)
4543 {
4544 warning (0, "unsupported argument to %qD", fndecl);
4545 return const0_rtx;
4546 }
4547
4548 if (count)
4549 {
4550 /* Warn since no effort is made to ensure that any frame
4551 beyond the current one exists or can be safely reached. */
4552 warning (OPT_Wframe_address, "calling %qD with "
4553 "a nonzero argument is unsafe", fndecl);
4554 }
4555
4556 /* For __builtin_frame_address, return what we've got. */
4557 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4558 return tem;
4559
4560 if (!REG_P (tem)
4561 && ! CONSTANT_P (tem))
4562 tem = copy_addr_to_reg (tem);
4563 return tem;
4564 }
4565 }
4566
4567 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4568 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4569 is the same as for allocate_dynamic_stack_space. */
4570
4571 static rtx
4572 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4573 {
4574 rtx op0;
4575 rtx result;
4576 bool valid_arglist;
4577 unsigned int align;
4578 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4579 == BUILT_IN_ALLOCA_WITH_ALIGN);
4580
4581 valid_arglist
4582 = (alloca_with_align
4583 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4584 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4585
4586 if (!valid_arglist)
4587 return NULL_RTX;
4588
4589 /* Compute the argument. */
4590 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4591
4592 /* Compute the alignment. */
4593 align = (alloca_with_align
4594 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4595 : BIGGEST_ALIGNMENT);
4596
4597 /* Allocate the desired space. */
4598 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4599 result = convert_memory_address (ptr_mode, result);
4600
4601 return result;
4602 }
4603
4604 /* Expand a call to bswap builtin in EXP.
4605 Return NULL_RTX if a normal call should be emitted rather than expanding the
4606 function in-line. If convenient, the result should be placed in TARGET.
4607 SUBTARGET may be used as the target for computing one of EXP's operands. */
4608
4609 static rtx
4610 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4611 rtx subtarget)
4612 {
4613 tree arg;
4614 rtx op0;
4615
4616 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4617 return NULL_RTX;
4618
4619 arg = CALL_EXPR_ARG (exp, 0);
4620 op0 = expand_expr (arg,
4621 subtarget && GET_MODE (subtarget) == target_mode
4622 ? subtarget : NULL_RTX,
4623 target_mode, EXPAND_NORMAL);
4624 if (GET_MODE (op0) != target_mode)
4625 op0 = convert_to_mode (target_mode, op0, 1);
4626
4627 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4628
4629 gcc_assert (target);
4630
4631 return convert_to_mode (target_mode, target, 1);
4632 }
4633
4634 /* Expand a call to a unary builtin in EXP.
4635 Return NULL_RTX if a normal call should be emitted rather than expanding the
4636 function in-line. If convenient, the result should be placed in TARGET.
4637 SUBTARGET may be used as the target for computing one of EXP's operands. */
4638
4639 static rtx
4640 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4641 rtx subtarget, optab op_optab)
4642 {
4643 rtx op0;
4644
4645 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4646 return NULL_RTX;
4647
4648 /* Compute the argument. */
4649 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4650 (subtarget
4651 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4652 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4653 VOIDmode, EXPAND_NORMAL);
4654 /* Compute op, into TARGET if possible.
4655 Set TARGET to wherever the result comes back. */
4656 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4657 op_optab, op0, target, op_optab != clrsb_optab);
4658 gcc_assert (target);
4659
4660 return convert_to_mode (target_mode, target, 0);
4661 }
4662
4663 /* Expand a call to __builtin_expect. We just return our argument
4664 as the builtin_expect semantic should've been already executed by
4665 tree branch prediction pass. */
4666
4667 static rtx
4668 expand_builtin_expect (tree exp, rtx target)
4669 {
4670 tree arg;
4671
4672 if (call_expr_nargs (exp) < 2)
4673 return const0_rtx;
4674 arg = CALL_EXPR_ARG (exp, 0);
4675
4676 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4677 /* When guessing was done, the hints should be already stripped away. */
4678 gcc_assert (!flag_guess_branch_prob
4679 || optimize == 0 || seen_error ());
4680 return target;
4681 }
4682
4683 /* Expand a call to __builtin_assume_aligned. We just return our first
4684 argument as the builtin_assume_aligned semantic should've been already
4685 executed by CCP. */
4686
4687 static rtx
4688 expand_builtin_assume_aligned (tree exp, rtx target)
4689 {
4690 if (call_expr_nargs (exp) < 2)
4691 return const0_rtx;
4692 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4693 EXPAND_NORMAL);
4694 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4695 && (call_expr_nargs (exp) < 3
4696 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4697 return target;
4698 }
4699
4700 void
4701 expand_builtin_trap (void)
4702 {
4703 if (targetm.have_trap ())
4704 {
4705 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4706 /* For trap insns when not accumulating outgoing args force
4707 REG_ARGS_SIZE note to prevent crossjumping of calls with
4708 different args sizes. */
4709 if (!ACCUMULATE_OUTGOING_ARGS)
4710 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4711 }
4712 else
4713 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4714 emit_barrier ();
4715 }
4716
4717 /* Expand a call to __builtin_unreachable. We do nothing except emit
4718 a barrier saying that control flow will not pass here.
4719
4720 It is the responsibility of the program being compiled to ensure
4721 that control flow does never reach __builtin_unreachable. */
4722 static void
4723 expand_builtin_unreachable (void)
4724 {
4725 emit_barrier ();
4726 }
4727
4728 /* Expand EXP, a call to fabs, fabsf or fabsl.
4729 Return NULL_RTX if a normal call should be emitted rather than expanding
4730 the function inline. If convenient, the result should be placed
4731 in TARGET. SUBTARGET may be used as the target for computing
4732 the operand. */
4733
4734 static rtx
4735 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4736 {
4737 machine_mode mode;
4738 tree arg;
4739 rtx op0;
4740
4741 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4742 return NULL_RTX;
4743
4744 arg = CALL_EXPR_ARG (exp, 0);
4745 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4746 mode = TYPE_MODE (TREE_TYPE (arg));
4747 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4748 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4749 }
4750
4751 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4752 Return NULL is a normal call should be emitted rather than expanding the
4753 function inline. If convenient, the result should be placed in TARGET.
4754 SUBTARGET may be used as the target for computing the operand. */
4755
4756 static rtx
4757 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4758 {
4759 rtx op0, op1;
4760 tree arg;
4761
4762 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4763 return NULL_RTX;
4764
4765 arg = CALL_EXPR_ARG (exp, 0);
4766 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4767
4768 arg = CALL_EXPR_ARG (exp, 1);
4769 op1 = expand_normal (arg);
4770
4771 return expand_copysign (op0, op1, target);
4772 }
4773
4774 /* Expand a call to __builtin___clear_cache. */
4775
4776 static rtx
4777 expand_builtin___clear_cache (tree exp)
4778 {
4779 if (!targetm.code_for_clear_cache)
4780 {
4781 #ifdef CLEAR_INSN_CACHE
4782 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4783 does something. Just do the default expansion to a call to
4784 __clear_cache(). */
4785 return NULL_RTX;
4786 #else
4787 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4788 does nothing. There is no need to call it. Do nothing. */
4789 return const0_rtx;
4790 #endif /* CLEAR_INSN_CACHE */
4791 }
4792
4793 /* We have a "clear_cache" insn, and it will handle everything. */
4794 tree begin, end;
4795 rtx begin_rtx, end_rtx;
4796
4797 /* We must not expand to a library call. If we did, any
4798 fallback library function in libgcc that might contain a call to
4799 __builtin___clear_cache() would recurse infinitely. */
4800 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4801 {
4802 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4803 return const0_rtx;
4804 }
4805
4806 if (targetm.have_clear_cache ())
4807 {
4808 struct expand_operand ops[2];
4809
4810 begin = CALL_EXPR_ARG (exp, 0);
4811 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4812
4813 end = CALL_EXPR_ARG (exp, 1);
4814 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4815
4816 create_address_operand (&ops[0], begin_rtx);
4817 create_address_operand (&ops[1], end_rtx);
4818 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4819 return const0_rtx;
4820 }
4821 return const0_rtx;
4822 }
4823
4824 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4825
4826 static rtx
4827 round_trampoline_addr (rtx tramp)
4828 {
4829 rtx temp, addend, mask;
4830
4831 /* If we don't need too much alignment, we'll have been guaranteed
4832 proper alignment by get_trampoline_type. */
4833 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4834 return tramp;
4835
4836 /* Round address up to desired boundary. */
4837 temp = gen_reg_rtx (Pmode);
4838 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4839 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4840
4841 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4842 temp, 0, OPTAB_LIB_WIDEN);
4843 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4844 temp, 0, OPTAB_LIB_WIDEN);
4845
4846 return tramp;
4847 }
4848
4849 static rtx
4850 expand_builtin_init_trampoline (tree exp, bool onstack)
4851 {
4852 tree t_tramp, t_func, t_chain;
4853 rtx m_tramp, r_tramp, r_chain, tmp;
4854
4855 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4856 POINTER_TYPE, VOID_TYPE))
4857 return NULL_RTX;
4858
4859 t_tramp = CALL_EXPR_ARG (exp, 0);
4860 t_func = CALL_EXPR_ARG (exp, 1);
4861 t_chain = CALL_EXPR_ARG (exp, 2);
4862
4863 r_tramp = expand_normal (t_tramp);
4864 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4865 MEM_NOTRAP_P (m_tramp) = 1;
4866
4867 /* If ONSTACK, the TRAMP argument should be the address of a field
4868 within the local function's FRAME decl. Either way, let's see if
4869 we can fill in the MEM_ATTRs for this memory. */
4870 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4871 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4872
4873 /* Creator of a heap trampoline is responsible for making sure the
4874 address is aligned to at least STACK_BOUNDARY. Normally malloc
4875 will ensure this anyhow. */
4876 tmp = round_trampoline_addr (r_tramp);
4877 if (tmp != r_tramp)
4878 {
4879 m_tramp = change_address (m_tramp, BLKmode, tmp);
4880 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4881 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4882 }
4883
4884 /* The FUNC argument should be the address of the nested function.
4885 Extract the actual function decl to pass to the hook. */
4886 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4887 t_func = TREE_OPERAND (t_func, 0);
4888 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4889
4890 r_chain = expand_normal (t_chain);
4891
4892 /* Generate insns to initialize the trampoline. */
4893 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4894
4895 if (onstack)
4896 {
4897 trampolines_created = 1;
4898
4899 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4900 "trampoline generated for nested function %qD", t_func);
4901 }
4902
4903 return const0_rtx;
4904 }
4905
4906 static rtx
4907 expand_builtin_adjust_trampoline (tree exp)
4908 {
4909 rtx tramp;
4910
4911 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4912 return NULL_RTX;
4913
4914 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4915 tramp = round_trampoline_addr (tramp);
4916 if (targetm.calls.trampoline_adjust_address)
4917 tramp = targetm.calls.trampoline_adjust_address (tramp);
4918
4919 return tramp;
4920 }
4921
4922 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4923 function. The function first checks whether the back end provides
4924 an insn to implement signbit for the respective mode. If not, it
4925 checks whether the floating point format of the value is such that
4926 the sign bit can be extracted. If that is not the case, error out.
4927 EXP is the expression that is a call to the builtin function; if
4928 convenient, the result should be placed in TARGET. */
4929 static rtx
4930 expand_builtin_signbit (tree exp, rtx target)
4931 {
4932 const struct real_format *fmt;
4933 machine_mode fmode, imode, rmode;
4934 tree arg;
4935 int word, bitpos;
4936 enum insn_code icode;
4937 rtx temp;
4938 location_t loc = EXPR_LOCATION (exp);
4939
4940 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4941 return NULL_RTX;
4942
4943 arg = CALL_EXPR_ARG (exp, 0);
4944 fmode = TYPE_MODE (TREE_TYPE (arg));
4945 rmode = TYPE_MODE (TREE_TYPE (exp));
4946 fmt = REAL_MODE_FORMAT (fmode);
4947
4948 arg = builtin_save_expr (arg);
4949
4950 /* Expand the argument yielding a RTX expression. */
4951 temp = expand_normal (arg);
4952
4953 /* Check if the back end provides an insn that handles signbit for the
4954 argument's mode. */
4955 icode = optab_handler (signbit_optab, fmode);
4956 if (icode != CODE_FOR_nothing)
4957 {
4958 rtx_insn *last = get_last_insn ();
4959 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4960 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4961 return target;
4962 delete_insns_since (last);
4963 }
4964
4965 /* For floating point formats without a sign bit, implement signbit
4966 as "ARG < 0.0". */
4967 bitpos = fmt->signbit_ro;
4968 if (bitpos < 0)
4969 {
4970 /* But we can't do this if the format supports signed zero. */
4971 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4972
4973 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4974 build_real (TREE_TYPE (arg), dconst0));
4975 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4976 }
4977
4978 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4979 {
4980 imode = int_mode_for_mode (fmode);
4981 gcc_assert (imode != BLKmode);
4982 temp = gen_lowpart (imode, temp);
4983 }
4984 else
4985 {
4986 imode = word_mode;
4987 /* Handle targets with different FP word orders. */
4988 if (FLOAT_WORDS_BIG_ENDIAN)
4989 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4990 else
4991 word = bitpos / BITS_PER_WORD;
4992 temp = operand_subword_force (temp, word, fmode);
4993 bitpos = bitpos % BITS_PER_WORD;
4994 }
4995
4996 /* Force the intermediate word_mode (or narrower) result into a
4997 register. This avoids attempting to create paradoxical SUBREGs
4998 of floating point modes below. */
4999 temp = force_reg (imode, temp);
5000
5001 /* If the bitpos is within the "result mode" lowpart, the operation
5002 can be implement with a single bitwise AND. Otherwise, we need
5003 a right shift and an AND. */
5004
5005 if (bitpos < GET_MODE_BITSIZE (rmode))
5006 {
5007 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5008
5009 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5010 temp = gen_lowpart (rmode, temp);
5011 temp = expand_binop (rmode, and_optab, temp,
5012 immed_wide_int_const (mask, rmode),
5013 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5014 }
5015 else
5016 {
5017 /* Perform a logical right shift to place the signbit in the least
5018 significant bit, then truncate the result to the desired mode
5019 and mask just this bit. */
5020 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5021 temp = gen_lowpart (rmode, temp);
5022 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5023 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5024 }
5025
5026 return temp;
5027 }
5028
5029 /* Expand fork or exec calls. TARGET is the desired target of the
5030 call. EXP is the call. FN is the
5031 identificator of the actual function. IGNORE is nonzero if the
5032 value is to be ignored. */
5033
5034 static rtx
5035 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5036 {
5037 tree id, decl;
5038 tree call;
5039
5040 /* If we are not profiling, just call the function. */
5041 if (!profile_arc_flag)
5042 return NULL_RTX;
5043
5044 /* Otherwise call the wrapper. This should be equivalent for the rest of
5045 compiler, so the code does not diverge, and the wrapper may run the
5046 code necessary for keeping the profiling sane. */
5047
5048 switch (DECL_FUNCTION_CODE (fn))
5049 {
5050 case BUILT_IN_FORK:
5051 id = get_identifier ("__gcov_fork");
5052 break;
5053
5054 case BUILT_IN_EXECL:
5055 id = get_identifier ("__gcov_execl");
5056 break;
5057
5058 case BUILT_IN_EXECV:
5059 id = get_identifier ("__gcov_execv");
5060 break;
5061
5062 case BUILT_IN_EXECLP:
5063 id = get_identifier ("__gcov_execlp");
5064 break;
5065
5066 case BUILT_IN_EXECLE:
5067 id = get_identifier ("__gcov_execle");
5068 break;
5069
5070 case BUILT_IN_EXECVP:
5071 id = get_identifier ("__gcov_execvp");
5072 break;
5073
5074 case BUILT_IN_EXECVE:
5075 id = get_identifier ("__gcov_execve");
5076 break;
5077
5078 default:
5079 gcc_unreachable ();
5080 }
5081
5082 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5083 FUNCTION_DECL, id, TREE_TYPE (fn));
5084 DECL_EXTERNAL (decl) = 1;
5085 TREE_PUBLIC (decl) = 1;
5086 DECL_ARTIFICIAL (decl) = 1;
5087 TREE_NOTHROW (decl) = 1;
5088 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5089 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5090 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5091 return expand_call (call, target, ignore);
5092 }
5093
5094
5095 \f
5096 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5097 the pointer in these functions is void*, the tree optimizers may remove
5098 casts. The mode computed in expand_builtin isn't reliable either, due
5099 to __sync_bool_compare_and_swap.
5100
5101 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5102 group of builtins. This gives us log2 of the mode size. */
5103
5104 static inline machine_mode
5105 get_builtin_sync_mode (int fcode_diff)
5106 {
5107 /* The size is not negotiable, so ask not to get BLKmode in return
5108 if the target indicates that a smaller size would be better. */
5109 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5110 }
5111
5112 /* Expand the memory expression LOC and return the appropriate memory operand
5113 for the builtin_sync operations. */
5114
5115 static rtx
5116 get_builtin_sync_mem (tree loc, machine_mode mode)
5117 {
5118 rtx addr, mem;
5119
5120 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5121 addr = convert_memory_address (Pmode, addr);
5122
5123 /* Note that we explicitly do not want any alias information for this
5124 memory, so that we kill all other live memories. Otherwise we don't
5125 satisfy the full barrier semantics of the intrinsic. */
5126 mem = validize_mem (gen_rtx_MEM (mode, addr));
5127
5128 /* The alignment needs to be at least according to that of the mode. */
5129 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5130 get_pointer_alignment (loc)));
5131 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5132 MEM_VOLATILE_P (mem) = 1;
5133
5134 return mem;
5135 }
5136
5137 /* Make sure an argument is in the right mode.
5138 EXP is the tree argument.
5139 MODE is the mode it should be in. */
5140
5141 static rtx
5142 expand_expr_force_mode (tree exp, machine_mode mode)
5143 {
5144 rtx val;
5145 machine_mode old_mode;
5146
5147 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5148 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5149 of CONST_INTs, where we know the old_mode only from the call argument. */
5150
5151 old_mode = GET_MODE (val);
5152 if (old_mode == VOIDmode)
5153 old_mode = TYPE_MODE (TREE_TYPE (exp));
5154 val = convert_modes (mode, old_mode, val, 1);
5155 return val;
5156 }
5157
5158
5159 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5160 EXP is the CALL_EXPR. CODE is the rtx code
5161 that corresponds to the arithmetic or logical operation from the name;
5162 an exception here is that NOT actually means NAND. TARGET is an optional
5163 place for us to store the results; AFTER is true if this is the
5164 fetch_and_xxx form. */
5165
5166 static rtx
5167 expand_builtin_sync_operation (machine_mode mode, tree exp,
5168 enum rtx_code code, bool after,
5169 rtx target)
5170 {
5171 rtx val, mem;
5172 location_t loc = EXPR_LOCATION (exp);
5173
5174 if (code == NOT && warn_sync_nand)
5175 {
5176 tree fndecl = get_callee_fndecl (exp);
5177 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5178
5179 static bool warned_f_a_n, warned_n_a_f;
5180
5181 switch (fcode)
5182 {
5183 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5184 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5185 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5186 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5187 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5188 if (warned_f_a_n)
5189 break;
5190
5191 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5192 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5193 warned_f_a_n = true;
5194 break;
5195
5196 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5197 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5198 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5199 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5200 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5201 if (warned_n_a_f)
5202 break;
5203
5204 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5205 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5206 warned_n_a_f = true;
5207 break;
5208
5209 default:
5210 gcc_unreachable ();
5211 }
5212 }
5213
5214 /* Expand the operands. */
5215 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5216 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5217
5218 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5219 after);
5220 }
5221
5222 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5223 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5224 true if this is the boolean form. TARGET is a place for us to store the
5225 results; this is NOT optional if IS_BOOL is true. */
5226
5227 static rtx
5228 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5229 bool is_bool, rtx target)
5230 {
5231 rtx old_val, new_val, mem;
5232 rtx *pbool, *poval;
5233
5234 /* Expand the operands. */
5235 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5236 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5237 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5238
5239 pbool = poval = NULL;
5240 if (target != const0_rtx)
5241 {
5242 if (is_bool)
5243 pbool = &target;
5244 else
5245 poval = &target;
5246 }
5247 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5248 false, MEMMODEL_SYNC_SEQ_CST,
5249 MEMMODEL_SYNC_SEQ_CST))
5250 return NULL_RTX;
5251
5252 return target;
5253 }
5254
5255 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5256 general form is actually an atomic exchange, and some targets only
5257 support a reduced form with the second argument being a constant 1.
5258 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5259 the results. */
5260
5261 static rtx
5262 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5263 rtx target)
5264 {
5265 rtx val, mem;
5266
5267 /* Expand the operands. */
5268 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5269 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5270
5271 return expand_sync_lock_test_and_set (target, mem, val);
5272 }
5273
5274 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5275
5276 static void
5277 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5278 {
5279 rtx mem;
5280
5281 /* Expand the operands. */
5282 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5283
5284 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5285 }
5286
5287 /* Given an integer representing an ``enum memmodel'', verify its
5288 correctness and return the memory model enum. */
5289
5290 static enum memmodel
5291 get_memmodel (tree exp)
5292 {
5293 rtx op;
5294 unsigned HOST_WIDE_INT val;
5295
5296 /* If the parameter is not a constant, it's a run time value so we'll just
5297 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5298 if (TREE_CODE (exp) != INTEGER_CST)
5299 return MEMMODEL_SEQ_CST;
5300
5301 op = expand_normal (exp);
5302
5303 val = INTVAL (op);
5304 if (targetm.memmodel_check)
5305 val = targetm.memmodel_check (val);
5306 else if (val & ~MEMMODEL_MASK)
5307 {
5308 warning (OPT_Winvalid_memory_model,
5309 "Unknown architecture specifier in memory model to builtin.");
5310 return MEMMODEL_SEQ_CST;
5311 }
5312
5313 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5314 if (memmodel_base (val) >= MEMMODEL_LAST)
5315 {
5316 warning (OPT_Winvalid_memory_model,
5317 "invalid memory model argument to builtin");
5318 return MEMMODEL_SEQ_CST;
5319 }
5320
5321 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5322 be conservative and promote consume to acquire. */
5323 if (val == MEMMODEL_CONSUME)
5324 val = MEMMODEL_ACQUIRE;
5325
5326 return (enum memmodel) val;
5327 }
5328
5329 /* Expand the __atomic_exchange intrinsic:
5330 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5331 EXP is the CALL_EXPR.
5332 TARGET is an optional place for us to store the results. */
5333
5334 static rtx
5335 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5336 {
5337 rtx val, mem;
5338 enum memmodel model;
5339
5340 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5341
5342 if (!flag_inline_atomics)
5343 return NULL_RTX;
5344
5345 /* Expand the operands. */
5346 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5347 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5348
5349 return expand_atomic_exchange (target, mem, val, model);
5350 }
5351
5352 /* Expand the __atomic_compare_exchange intrinsic:
5353 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5354 TYPE desired, BOOL weak,
5355 enum memmodel success,
5356 enum memmodel failure)
5357 EXP is the CALL_EXPR.
5358 TARGET is an optional place for us to store the results. */
5359
5360 static rtx
5361 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5362 rtx target)
5363 {
5364 rtx expect, desired, mem, oldval;
5365 rtx_code_label *label;
5366 enum memmodel success, failure;
5367 tree weak;
5368 bool is_weak;
5369
5370 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5371 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5372
5373 if (failure > success)
5374 {
5375 warning (OPT_Winvalid_memory_model,
5376 "failure memory model cannot be stronger than success memory "
5377 "model for %<__atomic_compare_exchange%>");
5378 success = MEMMODEL_SEQ_CST;
5379 }
5380
5381 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5382 {
5383 warning (OPT_Winvalid_memory_model,
5384 "invalid failure memory model for "
5385 "%<__atomic_compare_exchange%>");
5386 failure = MEMMODEL_SEQ_CST;
5387 success = MEMMODEL_SEQ_CST;
5388 }
5389
5390
5391 if (!flag_inline_atomics)
5392 return NULL_RTX;
5393
5394 /* Expand the operands. */
5395 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5396
5397 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5398 expect = convert_memory_address (Pmode, expect);
5399 expect = gen_rtx_MEM (mode, expect);
5400 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5401
5402 weak = CALL_EXPR_ARG (exp, 3);
5403 is_weak = false;
5404 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5405 is_weak = true;
5406
5407 if (target == const0_rtx)
5408 target = NULL;
5409
5410 /* Lest the rtl backend create a race condition with an imporoper store
5411 to memory, always create a new pseudo for OLDVAL. */
5412 oldval = NULL;
5413
5414 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5415 is_weak, success, failure))
5416 return NULL_RTX;
5417
5418 /* Conditionally store back to EXPECT, lest we create a race condition
5419 with an improper store to memory. */
5420 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5421 the normal case where EXPECT is totally private, i.e. a register. At
5422 which point the store can be unconditional. */
5423 label = gen_label_rtx ();
5424 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5425 GET_MODE (target), 1, label);
5426 emit_move_insn (expect, oldval);
5427 emit_label (label);
5428
5429 return target;
5430 }
5431
5432 /* Expand the __atomic_load intrinsic:
5433 TYPE __atomic_load (TYPE *object, enum memmodel)
5434 EXP is the CALL_EXPR.
5435 TARGET is an optional place for us to store the results. */
5436
5437 static rtx
5438 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5439 {
5440 rtx mem;
5441 enum memmodel model;
5442
5443 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5444 if (is_mm_release (model) || is_mm_acq_rel (model))
5445 {
5446 warning (OPT_Winvalid_memory_model,
5447 "invalid memory model for %<__atomic_load%>");
5448 model = MEMMODEL_SEQ_CST;
5449 }
5450
5451 if (!flag_inline_atomics)
5452 return NULL_RTX;
5453
5454 /* Expand the operand. */
5455 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5456
5457 return expand_atomic_load (target, mem, model);
5458 }
5459
5460
5461 /* Expand the __atomic_store intrinsic:
5462 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5463 EXP is the CALL_EXPR.
5464 TARGET is an optional place for us to store the results. */
5465
5466 static rtx
5467 expand_builtin_atomic_store (machine_mode mode, tree exp)
5468 {
5469 rtx mem, val;
5470 enum memmodel model;
5471
5472 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5473 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5474 || is_mm_release (model)))
5475 {
5476 warning (OPT_Winvalid_memory_model,
5477 "invalid memory model for %<__atomic_store%>");
5478 model = MEMMODEL_SEQ_CST;
5479 }
5480
5481 if (!flag_inline_atomics)
5482 return NULL_RTX;
5483
5484 /* Expand the operands. */
5485 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5486 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5487
5488 return expand_atomic_store (mem, val, model, false);
5489 }
5490
5491 /* Expand the __atomic_fetch_XXX intrinsic:
5492 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5493 EXP is the CALL_EXPR.
5494 TARGET is an optional place for us to store the results.
5495 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5496 FETCH_AFTER is true if returning the result of the operation.
5497 FETCH_AFTER is false if returning the value before the operation.
5498 IGNORE is true if the result is not used.
5499 EXT_CALL is the correct builtin for an external call if this cannot be
5500 resolved to an instruction sequence. */
5501
5502 static rtx
5503 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5504 enum rtx_code code, bool fetch_after,
5505 bool ignore, enum built_in_function ext_call)
5506 {
5507 rtx val, mem, ret;
5508 enum memmodel model;
5509 tree fndecl;
5510 tree addr;
5511
5512 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5513
5514 /* Expand the operands. */
5515 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5516 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5517
5518 /* Only try generating instructions if inlining is turned on. */
5519 if (flag_inline_atomics)
5520 {
5521 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5522 if (ret)
5523 return ret;
5524 }
5525
5526 /* Return if a different routine isn't needed for the library call. */
5527 if (ext_call == BUILT_IN_NONE)
5528 return NULL_RTX;
5529
5530 /* Change the call to the specified function. */
5531 fndecl = get_callee_fndecl (exp);
5532 addr = CALL_EXPR_FN (exp);
5533 STRIP_NOPS (addr);
5534
5535 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5536 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5537
5538 /* Expand the call here so we can emit trailing code. */
5539 ret = expand_call (exp, target, ignore);
5540
5541 /* Replace the original function just in case it matters. */
5542 TREE_OPERAND (addr, 0) = fndecl;
5543
5544 /* Then issue the arithmetic correction to return the right result. */
5545 if (!ignore)
5546 {
5547 if (code == NOT)
5548 {
5549 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5550 OPTAB_LIB_WIDEN);
5551 ret = expand_simple_unop (mode, NOT, ret, target, true);
5552 }
5553 else
5554 ret = expand_simple_binop (mode, code, ret, val, target, true,
5555 OPTAB_LIB_WIDEN);
5556 }
5557 return ret;
5558 }
5559
5560 /* Expand an atomic clear operation.
5561 void _atomic_clear (BOOL *obj, enum memmodel)
5562 EXP is the call expression. */
5563
5564 static rtx
5565 expand_builtin_atomic_clear (tree exp)
5566 {
5567 machine_mode mode;
5568 rtx mem, ret;
5569 enum memmodel model;
5570
5571 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5572 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5573 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5574
5575 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5576 {
5577 warning (OPT_Winvalid_memory_model,
5578 "invalid memory model for %<__atomic_store%>");
5579 model = MEMMODEL_SEQ_CST;
5580 }
5581
5582 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5583 Failing that, a store is issued by __atomic_store. The only way this can
5584 fail is if the bool type is larger than a word size. Unlikely, but
5585 handle it anyway for completeness. Assume a single threaded model since
5586 there is no atomic support in this case, and no barriers are required. */
5587 ret = expand_atomic_store (mem, const0_rtx, model, true);
5588 if (!ret)
5589 emit_move_insn (mem, const0_rtx);
5590 return const0_rtx;
5591 }
5592
5593 /* Expand an atomic test_and_set operation.
5594 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5595 EXP is the call expression. */
5596
5597 static rtx
5598 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5599 {
5600 rtx mem;
5601 enum memmodel model;
5602 machine_mode mode;
5603
5604 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5605 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5606 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5607
5608 return expand_atomic_test_and_set (target, mem, model);
5609 }
5610
5611
5612 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5613 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5614
5615 static tree
5616 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5617 {
5618 int size;
5619 machine_mode mode;
5620 unsigned int mode_align, type_align;
5621
5622 if (TREE_CODE (arg0) != INTEGER_CST)
5623 return NULL_TREE;
5624
5625 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5626 mode = mode_for_size (size, MODE_INT, 0);
5627 mode_align = GET_MODE_ALIGNMENT (mode);
5628
5629 if (TREE_CODE (arg1) == INTEGER_CST)
5630 {
5631 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5632
5633 /* Either this argument is null, or it's a fake pointer encoding
5634 the alignment of the object. */
5635 val = val & -val;
5636 val *= BITS_PER_UNIT;
5637
5638 if (val == 0 || mode_align < val)
5639 type_align = mode_align;
5640 else
5641 type_align = val;
5642 }
5643 else
5644 {
5645 tree ttype = TREE_TYPE (arg1);
5646
5647 /* This function is usually invoked and folded immediately by the front
5648 end before anything else has a chance to look at it. The pointer
5649 parameter at this point is usually cast to a void *, so check for that
5650 and look past the cast. */
5651 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5652 && VOID_TYPE_P (TREE_TYPE (ttype)))
5653 arg1 = TREE_OPERAND (arg1, 0);
5654
5655 ttype = TREE_TYPE (arg1);
5656 gcc_assert (POINTER_TYPE_P (ttype));
5657
5658 /* Get the underlying type of the object. */
5659 ttype = TREE_TYPE (ttype);
5660 type_align = TYPE_ALIGN (ttype);
5661 }
5662
5663 /* If the object has smaller alignment, the lock free routines cannot
5664 be used. */
5665 if (type_align < mode_align)
5666 return boolean_false_node;
5667
5668 /* Check if a compare_and_swap pattern exists for the mode which represents
5669 the required size. The pattern is not allowed to fail, so the existence
5670 of the pattern indicates support is present. */
5671 if (can_compare_and_swap_p (mode, true))
5672 return boolean_true_node;
5673 else
5674 return boolean_false_node;
5675 }
5676
5677 /* Return true if the parameters to call EXP represent an object which will
5678 always generate lock free instructions. The first argument represents the
5679 size of the object, and the second parameter is a pointer to the object
5680 itself. If NULL is passed for the object, then the result is based on
5681 typical alignment for an object of the specified size. Otherwise return
5682 false. */
5683
5684 static rtx
5685 expand_builtin_atomic_always_lock_free (tree exp)
5686 {
5687 tree size;
5688 tree arg0 = CALL_EXPR_ARG (exp, 0);
5689 tree arg1 = CALL_EXPR_ARG (exp, 1);
5690
5691 if (TREE_CODE (arg0) != INTEGER_CST)
5692 {
5693 error ("non-constant argument 1 to __atomic_always_lock_free");
5694 return const0_rtx;
5695 }
5696
5697 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5698 if (size == boolean_true_node)
5699 return const1_rtx;
5700 return const0_rtx;
5701 }
5702
5703 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5704 is lock free on this architecture. */
5705
5706 static tree
5707 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5708 {
5709 if (!flag_inline_atomics)
5710 return NULL_TREE;
5711
5712 /* If it isn't always lock free, don't generate a result. */
5713 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5714 return boolean_true_node;
5715
5716 return NULL_TREE;
5717 }
5718
5719 /* Return true if the parameters to call EXP represent an object which will
5720 always generate lock free instructions. The first argument represents the
5721 size of the object, and the second parameter is a pointer to the object
5722 itself. If NULL is passed for the object, then the result is based on
5723 typical alignment for an object of the specified size. Otherwise return
5724 NULL*/
5725
5726 static rtx
5727 expand_builtin_atomic_is_lock_free (tree exp)
5728 {
5729 tree size;
5730 tree arg0 = CALL_EXPR_ARG (exp, 0);
5731 tree arg1 = CALL_EXPR_ARG (exp, 1);
5732
5733 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5734 {
5735 error ("non-integer argument 1 to __atomic_is_lock_free");
5736 return NULL_RTX;
5737 }
5738
5739 if (!flag_inline_atomics)
5740 return NULL_RTX;
5741
5742 /* If the value is known at compile time, return the RTX for it. */
5743 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5744 if (size == boolean_true_node)
5745 return const1_rtx;
5746
5747 return NULL_RTX;
5748 }
5749
5750 /* Expand the __atomic_thread_fence intrinsic:
5751 void __atomic_thread_fence (enum memmodel)
5752 EXP is the CALL_EXPR. */
5753
5754 static void
5755 expand_builtin_atomic_thread_fence (tree exp)
5756 {
5757 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5758 expand_mem_thread_fence (model);
5759 }
5760
5761 /* Expand the __atomic_signal_fence intrinsic:
5762 void __atomic_signal_fence (enum memmodel)
5763 EXP is the CALL_EXPR. */
5764
5765 static void
5766 expand_builtin_atomic_signal_fence (tree exp)
5767 {
5768 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5769 expand_mem_signal_fence (model);
5770 }
5771
5772 /* Expand the __sync_synchronize intrinsic. */
5773
5774 static void
5775 expand_builtin_sync_synchronize (void)
5776 {
5777 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5778 }
5779
5780 static rtx
5781 expand_builtin_thread_pointer (tree exp, rtx target)
5782 {
5783 enum insn_code icode;
5784 if (!validate_arglist (exp, VOID_TYPE))
5785 return const0_rtx;
5786 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5787 if (icode != CODE_FOR_nothing)
5788 {
5789 struct expand_operand op;
5790 /* If the target is not sutitable then create a new target. */
5791 if (target == NULL_RTX
5792 || !REG_P (target)
5793 || GET_MODE (target) != Pmode)
5794 target = gen_reg_rtx (Pmode);
5795 create_output_operand (&op, target, Pmode);
5796 expand_insn (icode, 1, &op);
5797 return target;
5798 }
5799 error ("__builtin_thread_pointer is not supported on this target");
5800 return const0_rtx;
5801 }
5802
5803 static void
5804 expand_builtin_set_thread_pointer (tree exp)
5805 {
5806 enum insn_code icode;
5807 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5808 return;
5809 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5810 if (icode != CODE_FOR_nothing)
5811 {
5812 struct expand_operand op;
5813 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5814 Pmode, EXPAND_NORMAL);
5815 create_input_operand (&op, val, Pmode);
5816 expand_insn (icode, 1, &op);
5817 return;
5818 }
5819 error ("__builtin_set_thread_pointer is not supported on this target");
5820 }
5821
5822 \f
5823 /* Emit code to restore the current value of stack. */
5824
5825 static void
5826 expand_stack_restore (tree var)
5827 {
5828 rtx_insn *prev;
5829 rtx sa = expand_normal (var);
5830
5831 sa = convert_memory_address (Pmode, sa);
5832
5833 prev = get_last_insn ();
5834 emit_stack_restore (SAVE_BLOCK, sa);
5835
5836 record_new_stack_level ();
5837
5838 fixup_args_size_notes (prev, get_last_insn (), 0);
5839 }
5840
5841 /* Emit code to save the current value of stack. */
5842
5843 static rtx
5844 expand_stack_save (void)
5845 {
5846 rtx ret = NULL_RTX;
5847
5848 emit_stack_save (SAVE_BLOCK, &ret);
5849 return ret;
5850 }
5851
5852
5853 /* Expand an expression EXP that calls a built-in function,
5854 with result going to TARGET if that's convenient
5855 (and in mode MODE if that's convenient).
5856 SUBTARGET may be used as the target for computing one of EXP's operands.
5857 IGNORE is nonzero if the value is to be ignored. */
5858
5859 rtx
5860 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5861 int ignore)
5862 {
5863 tree fndecl = get_callee_fndecl (exp);
5864 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5865 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5866 int flags;
5867
5868 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5869 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5870
5871 /* When ASan is enabled, we don't want to expand some memory/string
5872 builtins and rely on libsanitizer's hooks. This allows us to avoid
5873 redundant checks and be sure, that possible overflow will be detected
5874 by ASan. */
5875
5876 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5877 return expand_call (exp, target, ignore);
5878
5879 /* When not optimizing, generate calls to library functions for a certain
5880 set of builtins. */
5881 if (!optimize
5882 && !called_as_built_in (fndecl)
5883 && fcode != BUILT_IN_FORK
5884 && fcode != BUILT_IN_EXECL
5885 && fcode != BUILT_IN_EXECV
5886 && fcode != BUILT_IN_EXECLP
5887 && fcode != BUILT_IN_EXECLE
5888 && fcode != BUILT_IN_EXECVP
5889 && fcode != BUILT_IN_EXECVE
5890 && fcode != BUILT_IN_ALLOCA
5891 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5892 && fcode != BUILT_IN_FREE
5893 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5894 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5895 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5896 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5897 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5898 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5899 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5900 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5901 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5902 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5903 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5904 && fcode != BUILT_IN_CHKP_BNDRET)
5905 return expand_call (exp, target, ignore);
5906
5907 /* The built-in function expanders test for target == const0_rtx
5908 to determine whether the function's result will be ignored. */
5909 if (ignore)
5910 target = const0_rtx;
5911
5912 /* If the result of a pure or const built-in function is ignored, and
5913 none of its arguments are volatile, we can avoid expanding the
5914 built-in call and just evaluate the arguments for side-effects. */
5915 if (target == const0_rtx
5916 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5917 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5918 {
5919 bool volatilep = false;
5920 tree arg;
5921 call_expr_arg_iterator iter;
5922
5923 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5924 if (TREE_THIS_VOLATILE (arg))
5925 {
5926 volatilep = true;
5927 break;
5928 }
5929
5930 if (! volatilep)
5931 {
5932 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5933 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5934 return const0_rtx;
5935 }
5936 }
5937
5938 /* expand_builtin_with_bounds is supposed to be used for
5939 instrumented builtin calls. */
5940 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5941
5942 switch (fcode)
5943 {
5944 CASE_FLT_FN (BUILT_IN_FABS):
5945 case BUILT_IN_FABSD32:
5946 case BUILT_IN_FABSD64:
5947 case BUILT_IN_FABSD128:
5948 target = expand_builtin_fabs (exp, target, subtarget);
5949 if (target)
5950 return target;
5951 break;
5952
5953 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5954 target = expand_builtin_copysign (exp, target, subtarget);
5955 if (target)
5956 return target;
5957 break;
5958
5959 /* Just do a normal library call if we were unable to fold
5960 the values. */
5961 CASE_FLT_FN (BUILT_IN_CABS):
5962 break;
5963
5964 CASE_FLT_FN (BUILT_IN_EXP):
5965 CASE_FLT_FN (BUILT_IN_EXP10):
5966 CASE_FLT_FN (BUILT_IN_POW10):
5967 CASE_FLT_FN (BUILT_IN_EXP2):
5968 CASE_FLT_FN (BUILT_IN_EXPM1):
5969 CASE_FLT_FN (BUILT_IN_LOGB):
5970 CASE_FLT_FN (BUILT_IN_LOG):
5971 CASE_FLT_FN (BUILT_IN_LOG10):
5972 CASE_FLT_FN (BUILT_IN_LOG2):
5973 CASE_FLT_FN (BUILT_IN_LOG1P):
5974 CASE_FLT_FN (BUILT_IN_TAN):
5975 CASE_FLT_FN (BUILT_IN_ASIN):
5976 CASE_FLT_FN (BUILT_IN_ACOS):
5977 CASE_FLT_FN (BUILT_IN_ATAN):
5978 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5979 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5980 because of possible accuracy problems. */
5981 if (! flag_unsafe_math_optimizations)
5982 break;
5983 CASE_FLT_FN (BUILT_IN_SQRT):
5984 CASE_FLT_FN (BUILT_IN_FLOOR):
5985 CASE_FLT_FN (BUILT_IN_CEIL):
5986 CASE_FLT_FN (BUILT_IN_TRUNC):
5987 CASE_FLT_FN (BUILT_IN_ROUND):
5988 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5989 CASE_FLT_FN (BUILT_IN_RINT):
5990 target = expand_builtin_mathfn (exp, target, subtarget);
5991 if (target)
5992 return target;
5993 break;
5994
5995 CASE_FLT_FN (BUILT_IN_FMA):
5996 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5997 if (target)
5998 return target;
5999 break;
6000
6001 CASE_FLT_FN (BUILT_IN_ILOGB):
6002 if (! flag_unsafe_math_optimizations)
6003 break;
6004 CASE_FLT_FN (BUILT_IN_ISINF):
6005 CASE_FLT_FN (BUILT_IN_FINITE):
6006 case BUILT_IN_ISFINITE:
6007 case BUILT_IN_ISNORMAL:
6008 target = expand_builtin_interclass_mathfn (exp, target);
6009 if (target)
6010 return target;
6011 break;
6012
6013 CASE_FLT_FN (BUILT_IN_ICEIL):
6014 CASE_FLT_FN (BUILT_IN_LCEIL):
6015 CASE_FLT_FN (BUILT_IN_LLCEIL):
6016 CASE_FLT_FN (BUILT_IN_LFLOOR):
6017 CASE_FLT_FN (BUILT_IN_IFLOOR):
6018 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6019 target = expand_builtin_int_roundingfn (exp, target);
6020 if (target)
6021 return target;
6022 break;
6023
6024 CASE_FLT_FN (BUILT_IN_IRINT):
6025 CASE_FLT_FN (BUILT_IN_LRINT):
6026 CASE_FLT_FN (BUILT_IN_LLRINT):
6027 CASE_FLT_FN (BUILT_IN_IROUND):
6028 CASE_FLT_FN (BUILT_IN_LROUND):
6029 CASE_FLT_FN (BUILT_IN_LLROUND):
6030 target = expand_builtin_int_roundingfn_2 (exp, target);
6031 if (target)
6032 return target;
6033 break;
6034
6035 CASE_FLT_FN (BUILT_IN_POWI):
6036 target = expand_builtin_powi (exp, target);
6037 if (target)
6038 return target;
6039 break;
6040
6041 CASE_FLT_FN (BUILT_IN_ATAN2):
6042 CASE_FLT_FN (BUILT_IN_LDEXP):
6043 CASE_FLT_FN (BUILT_IN_SCALB):
6044 CASE_FLT_FN (BUILT_IN_SCALBN):
6045 CASE_FLT_FN (BUILT_IN_SCALBLN):
6046 if (! flag_unsafe_math_optimizations)
6047 break;
6048
6049 CASE_FLT_FN (BUILT_IN_FMOD):
6050 CASE_FLT_FN (BUILT_IN_REMAINDER):
6051 CASE_FLT_FN (BUILT_IN_DREM):
6052 CASE_FLT_FN (BUILT_IN_POW):
6053 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6054 if (target)
6055 return target;
6056 break;
6057
6058 CASE_FLT_FN (BUILT_IN_CEXPI):
6059 target = expand_builtin_cexpi (exp, target);
6060 gcc_assert (target);
6061 return target;
6062
6063 CASE_FLT_FN (BUILT_IN_SIN):
6064 CASE_FLT_FN (BUILT_IN_COS):
6065 if (! flag_unsafe_math_optimizations)
6066 break;
6067 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6068 if (target)
6069 return target;
6070 break;
6071
6072 CASE_FLT_FN (BUILT_IN_SINCOS):
6073 if (! flag_unsafe_math_optimizations)
6074 break;
6075 target = expand_builtin_sincos (exp);
6076 if (target)
6077 return target;
6078 break;
6079
6080 case BUILT_IN_APPLY_ARGS:
6081 return expand_builtin_apply_args ();
6082
6083 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6084 FUNCTION with a copy of the parameters described by
6085 ARGUMENTS, and ARGSIZE. It returns a block of memory
6086 allocated on the stack into which is stored all the registers
6087 that might possibly be used for returning the result of a
6088 function. ARGUMENTS is the value returned by
6089 __builtin_apply_args. ARGSIZE is the number of bytes of
6090 arguments that must be copied. ??? How should this value be
6091 computed? We'll also need a safe worst case value for varargs
6092 functions. */
6093 case BUILT_IN_APPLY:
6094 if (!validate_arglist (exp, POINTER_TYPE,
6095 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6096 && !validate_arglist (exp, REFERENCE_TYPE,
6097 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6098 return const0_rtx;
6099 else
6100 {
6101 rtx ops[3];
6102
6103 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6104 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6105 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6106
6107 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6108 }
6109
6110 /* __builtin_return (RESULT) causes the function to return the
6111 value described by RESULT. RESULT is address of the block of
6112 memory returned by __builtin_apply. */
6113 case BUILT_IN_RETURN:
6114 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6115 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6116 return const0_rtx;
6117
6118 case BUILT_IN_SAVEREGS:
6119 return expand_builtin_saveregs ();
6120
6121 case BUILT_IN_VA_ARG_PACK:
6122 /* All valid uses of __builtin_va_arg_pack () are removed during
6123 inlining. */
6124 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6125 return const0_rtx;
6126
6127 case BUILT_IN_VA_ARG_PACK_LEN:
6128 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6129 inlining. */
6130 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6131 return const0_rtx;
6132
6133 /* Return the address of the first anonymous stack arg. */
6134 case BUILT_IN_NEXT_ARG:
6135 if (fold_builtin_next_arg (exp, false))
6136 return const0_rtx;
6137 return expand_builtin_next_arg ();
6138
6139 case BUILT_IN_CLEAR_CACHE:
6140 target = expand_builtin___clear_cache (exp);
6141 if (target)
6142 return target;
6143 break;
6144
6145 case BUILT_IN_CLASSIFY_TYPE:
6146 return expand_builtin_classify_type (exp);
6147
6148 case BUILT_IN_CONSTANT_P:
6149 return const0_rtx;
6150
6151 case BUILT_IN_FRAME_ADDRESS:
6152 case BUILT_IN_RETURN_ADDRESS:
6153 return expand_builtin_frame_address (fndecl, exp);
6154
6155 /* Returns the address of the area where the structure is returned.
6156 0 otherwise. */
6157 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6158 if (call_expr_nargs (exp) != 0
6159 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6160 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6161 return const0_rtx;
6162 else
6163 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6164
6165 case BUILT_IN_ALLOCA:
6166 case BUILT_IN_ALLOCA_WITH_ALIGN:
6167 /* If the allocation stems from the declaration of a variable-sized
6168 object, it cannot accumulate. */
6169 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6170 if (target)
6171 return target;
6172 break;
6173
6174 case BUILT_IN_STACK_SAVE:
6175 return expand_stack_save ();
6176
6177 case BUILT_IN_STACK_RESTORE:
6178 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6179 return const0_rtx;
6180
6181 case BUILT_IN_BSWAP16:
6182 case BUILT_IN_BSWAP32:
6183 case BUILT_IN_BSWAP64:
6184 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6185 if (target)
6186 return target;
6187 break;
6188
6189 CASE_INT_FN (BUILT_IN_FFS):
6190 target = expand_builtin_unop (target_mode, exp, target,
6191 subtarget, ffs_optab);
6192 if (target)
6193 return target;
6194 break;
6195
6196 CASE_INT_FN (BUILT_IN_CLZ):
6197 target = expand_builtin_unop (target_mode, exp, target,
6198 subtarget, clz_optab);
6199 if (target)
6200 return target;
6201 break;
6202
6203 CASE_INT_FN (BUILT_IN_CTZ):
6204 target = expand_builtin_unop (target_mode, exp, target,
6205 subtarget, ctz_optab);
6206 if (target)
6207 return target;
6208 break;
6209
6210 CASE_INT_FN (BUILT_IN_CLRSB):
6211 target = expand_builtin_unop (target_mode, exp, target,
6212 subtarget, clrsb_optab);
6213 if (target)
6214 return target;
6215 break;
6216
6217 CASE_INT_FN (BUILT_IN_POPCOUNT):
6218 target = expand_builtin_unop (target_mode, exp, target,
6219 subtarget, popcount_optab);
6220 if (target)
6221 return target;
6222 break;
6223
6224 CASE_INT_FN (BUILT_IN_PARITY):
6225 target = expand_builtin_unop (target_mode, exp, target,
6226 subtarget, parity_optab);
6227 if (target)
6228 return target;
6229 break;
6230
6231 case BUILT_IN_STRLEN:
6232 target = expand_builtin_strlen (exp, target, target_mode);
6233 if (target)
6234 return target;
6235 break;
6236
6237 case BUILT_IN_STRCPY:
6238 target = expand_builtin_strcpy (exp, target);
6239 if (target)
6240 return target;
6241 break;
6242
6243 case BUILT_IN_STRNCPY:
6244 target = expand_builtin_strncpy (exp, target);
6245 if (target)
6246 return target;
6247 break;
6248
6249 case BUILT_IN_STPCPY:
6250 target = expand_builtin_stpcpy (exp, target, mode);
6251 if (target)
6252 return target;
6253 break;
6254
6255 case BUILT_IN_MEMCPY:
6256 target = expand_builtin_memcpy (exp, target);
6257 if (target)
6258 return target;
6259 break;
6260
6261 case BUILT_IN_MEMPCPY:
6262 target = expand_builtin_mempcpy (exp, target, mode);
6263 if (target)
6264 return target;
6265 break;
6266
6267 case BUILT_IN_MEMSET:
6268 target = expand_builtin_memset (exp, target, mode);
6269 if (target)
6270 return target;
6271 break;
6272
6273 case BUILT_IN_BZERO:
6274 target = expand_builtin_bzero (exp);
6275 if (target)
6276 return target;
6277 break;
6278
6279 case BUILT_IN_STRCMP:
6280 target = expand_builtin_strcmp (exp, target);
6281 if (target)
6282 return target;
6283 break;
6284
6285 case BUILT_IN_STRNCMP:
6286 target = expand_builtin_strncmp (exp, target, mode);
6287 if (target)
6288 return target;
6289 break;
6290
6291 case BUILT_IN_BCMP:
6292 case BUILT_IN_MEMCMP:
6293 target = expand_builtin_memcmp (exp, target);
6294 if (target)
6295 return target;
6296 break;
6297
6298 case BUILT_IN_SETJMP:
6299 /* This should have been lowered to the builtins below. */
6300 gcc_unreachable ();
6301
6302 case BUILT_IN_SETJMP_SETUP:
6303 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6304 and the receiver label. */
6305 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6306 {
6307 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6308 VOIDmode, EXPAND_NORMAL);
6309 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6310 rtx_insn *label_r = label_rtx (label);
6311
6312 /* This is copied from the handling of non-local gotos. */
6313 expand_builtin_setjmp_setup (buf_addr, label_r);
6314 nonlocal_goto_handler_labels
6315 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6316 nonlocal_goto_handler_labels);
6317 /* ??? Do not let expand_label treat us as such since we would
6318 not want to be both on the list of non-local labels and on
6319 the list of forced labels. */
6320 FORCED_LABEL (label) = 0;
6321 return const0_rtx;
6322 }
6323 break;
6324
6325 case BUILT_IN_SETJMP_RECEIVER:
6326 /* __builtin_setjmp_receiver is passed the receiver label. */
6327 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6328 {
6329 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6330 rtx_insn *label_r = label_rtx (label);
6331
6332 expand_builtin_setjmp_receiver (label_r);
6333 return const0_rtx;
6334 }
6335 break;
6336
6337 /* __builtin_longjmp is passed a pointer to an array of five words.
6338 It's similar to the C library longjmp function but works with
6339 __builtin_setjmp above. */
6340 case BUILT_IN_LONGJMP:
6341 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6342 {
6343 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6344 VOIDmode, EXPAND_NORMAL);
6345 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6346
6347 if (value != const1_rtx)
6348 {
6349 error ("%<__builtin_longjmp%> second argument must be 1");
6350 return const0_rtx;
6351 }
6352
6353 expand_builtin_longjmp (buf_addr, value);
6354 return const0_rtx;
6355 }
6356 break;
6357
6358 case BUILT_IN_NONLOCAL_GOTO:
6359 target = expand_builtin_nonlocal_goto (exp);
6360 if (target)
6361 return target;
6362 break;
6363
6364 /* This updates the setjmp buffer that is its argument with the value
6365 of the current stack pointer. */
6366 case BUILT_IN_UPDATE_SETJMP_BUF:
6367 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6368 {
6369 rtx buf_addr
6370 = expand_normal (CALL_EXPR_ARG (exp, 0));
6371
6372 expand_builtin_update_setjmp_buf (buf_addr);
6373 return const0_rtx;
6374 }
6375 break;
6376
6377 case BUILT_IN_TRAP:
6378 expand_builtin_trap ();
6379 return const0_rtx;
6380
6381 case BUILT_IN_UNREACHABLE:
6382 expand_builtin_unreachable ();
6383 return const0_rtx;
6384
6385 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6386 case BUILT_IN_SIGNBITD32:
6387 case BUILT_IN_SIGNBITD64:
6388 case BUILT_IN_SIGNBITD128:
6389 target = expand_builtin_signbit (exp, target);
6390 if (target)
6391 return target;
6392 break;
6393
6394 /* Various hooks for the DWARF 2 __throw routine. */
6395 case BUILT_IN_UNWIND_INIT:
6396 expand_builtin_unwind_init ();
6397 return const0_rtx;
6398 case BUILT_IN_DWARF_CFA:
6399 return virtual_cfa_rtx;
6400 #ifdef DWARF2_UNWIND_INFO
6401 case BUILT_IN_DWARF_SP_COLUMN:
6402 return expand_builtin_dwarf_sp_column ();
6403 case BUILT_IN_INIT_DWARF_REG_SIZES:
6404 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6405 return const0_rtx;
6406 #endif
6407 case BUILT_IN_FROB_RETURN_ADDR:
6408 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6409 case BUILT_IN_EXTRACT_RETURN_ADDR:
6410 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6411 case BUILT_IN_EH_RETURN:
6412 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6413 CALL_EXPR_ARG (exp, 1));
6414 return const0_rtx;
6415 case BUILT_IN_EH_RETURN_DATA_REGNO:
6416 return expand_builtin_eh_return_data_regno (exp);
6417 case BUILT_IN_EXTEND_POINTER:
6418 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6419 case BUILT_IN_EH_POINTER:
6420 return expand_builtin_eh_pointer (exp);
6421 case BUILT_IN_EH_FILTER:
6422 return expand_builtin_eh_filter (exp);
6423 case BUILT_IN_EH_COPY_VALUES:
6424 return expand_builtin_eh_copy_values (exp);
6425
6426 case BUILT_IN_VA_START:
6427 return expand_builtin_va_start (exp);
6428 case BUILT_IN_VA_END:
6429 return expand_builtin_va_end (exp);
6430 case BUILT_IN_VA_COPY:
6431 return expand_builtin_va_copy (exp);
6432 case BUILT_IN_EXPECT:
6433 return expand_builtin_expect (exp, target);
6434 case BUILT_IN_ASSUME_ALIGNED:
6435 return expand_builtin_assume_aligned (exp, target);
6436 case BUILT_IN_PREFETCH:
6437 expand_builtin_prefetch (exp);
6438 return const0_rtx;
6439
6440 case BUILT_IN_INIT_TRAMPOLINE:
6441 return expand_builtin_init_trampoline (exp, true);
6442 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6443 return expand_builtin_init_trampoline (exp, false);
6444 case BUILT_IN_ADJUST_TRAMPOLINE:
6445 return expand_builtin_adjust_trampoline (exp);
6446
6447 case BUILT_IN_FORK:
6448 case BUILT_IN_EXECL:
6449 case BUILT_IN_EXECV:
6450 case BUILT_IN_EXECLP:
6451 case BUILT_IN_EXECLE:
6452 case BUILT_IN_EXECVP:
6453 case BUILT_IN_EXECVE:
6454 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6455 if (target)
6456 return target;
6457 break;
6458
6459 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6460 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6461 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6462 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6463 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6464 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6465 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6466 if (target)
6467 return target;
6468 break;
6469
6470 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6471 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6472 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6473 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6474 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6475 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6476 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6477 if (target)
6478 return target;
6479 break;
6480
6481 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6482 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6483 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6484 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6485 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6486 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6487 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6488 if (target)
6489 return target;
6490 break;
6491
6492 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6493 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6494 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6495 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6496 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6497 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6498 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6499 if (target)
6500 return target;
6501 break;
6502
6503 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6504 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6505 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6506 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6507 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6508 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6509 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6510 if (target)
6511 return target;
6512 break;
6513
6514 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6515 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6516 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6517 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6518 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6519 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6520 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6521 if (target)
6522 return target;
6523 break;
6524
6525 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6526 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6527 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6528 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6529 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6530 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6531 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6532 if (target)
6533 return target;
6534 break;
6535
6536 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6537 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6538 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6539 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6540 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6541 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6542 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6543 if (target)
6544 return target;
6545 break;
6546
6547 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6548 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6549 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6550 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6551 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6552 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6553 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6554 if (target)
6555 return target;
6556 break;
6557
6558 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6559 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6560 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6561 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6562 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6563 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6564 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6565 if (target)
6566 return target;
6567 break;
6568
6569 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6570 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6571 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6572 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6573 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6574 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6575 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6576 if (target)
6577 return target;
6578 break;
6579
6580 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6581 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6582 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6583 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6584 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6585 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6586 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6587 if (target)
6588 return target;
6589 break;
6590
6591 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6592 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6593 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6594 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6595 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6596 if (mode == VOIDmode)
6597 mode = TYPE_MODE (boolean_type_node);
6598 if (!target || !register_operand (target, mode))
6599 target = gen_reg_rtx (mode);
6600
6601 mode = get_builtin_sync_mode
6602 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6603 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6604 if (target)
6605 return target;
6606 break;
6607
6608 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6609 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6610 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6611 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6612 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6613 mode = get_builtin_sync_mode
6614 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6615 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6616 if (target)
6617 return target;
6618 break;
6619
6620 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6621 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6622 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6623 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6624 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6625 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6626 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6627 if (target)
6628 return target;
6629 break;
6630
6631 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6632 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6633 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6634 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6635 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6636 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6637 expand_builtin_sync_lock_release (mode, exp);
6638 return const0_rtx;
6639
6640 case BUILT_IN_SYNC_SYNCHRONIZE:
6641 expand_builtin_sync_synchronize ();
6642 return const0_rtx;
6643
6644 case BUILT_IN_ATOMIC_EXCHANGE_1:
6645 case BUILT_IN_ATOMIC_EXCHANGE_2:
6646 case BUILT_IN_ATOMIC_EXCHANGE_4:
6647 case BUILT_IN_ATOMIC_EXCHANGE_8:
6648 case BUILT_IN_ATOMIC_EXCHANGE_16:
6649 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6650 target = expand_builtin_atomic_exchange (mode, exp, target);
6651 if (target)
6652 return target;
6653 break;
6654
6655 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6656 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6657 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6658 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6659 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6660 {
6661 unsigned int nargs, z;
6662 vec<tree, va_gc> *vec;
6663
6664 mode =
6665 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6666 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6667 if (target)
6668 return target;
6669
6670 /* If this is turned into an external library call, the weak parameter
6671 must be dropped to match the expected parameter list. */
6672 nargs = call_expr_nargs (exp);
6673 vec_alloc (vec, nargs - 1);
6674 for (z = 0; z < 3; z++)
6675 vec->quick_push (CALL_EXPR_ARG (exp, z));
6676 /* Skip the boolean weak parameter. */
6677 for (z = 4; z < 6; z++)
6678 vec->quick_push (CALL_EXPR_ARG (exp, z));
6679 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6680 break;
6681 }
6682
6683 case BUILT_IN_ATOMIC_LOAD_1:
6684 case BUILT_IN_ATOMIC_LOAD_2:
6685 case BUILT_IN_ATOMIC_LOAD_4:
6686 case BUILT_IN_ATOMIC_LOAD_8:
6687 case BUILT_IN_ATOMIC_LOAD_16:
6688 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6689 target = expand_builtin_atomic_load (mode, exp, target);
6690 if (target)
6691 return target;
6692 break;
6693
6694 case BUILT_IN_ATOMIC_STORE_1:
6695 case BUILT_IN_ATOMIC_STORE_2:
6696 case BUILT_IN_ATOMIC_STORE_4:
6697 case BUILT_IN_ATOMIC_STORE_8:
6698 case BUILT_IN_ATOMIC_STORE_16:
6699 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6700 target = expand_builtin_atomic_store (mode, exp);
6701 if (target)
6702 return const0_rtx;
6703 break;
6704
6705 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6706 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6707 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6708 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6709 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6710 {
6711 enum built_in_function lib;
6712 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6713 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6714 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6715 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6716 ignore, lib);
6717 if (target)
6718 return target;
6719 break;
6720 }
6721 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6722 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6723 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6724 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6725 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6726 {
6727 enum built_in_function lib;
6728 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6729 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6730 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6731 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6732 ignore, lib);
6733 if (target)
6734 return target;
6735 break;
6736 }
6737 case BUILT_IN_ATOMIC_AND_FETCH_1:
6738 case BUILT_IN_ATOMIC_AND_FETCH_2:
6739 case BUILT_IN_ATOMIC_AND_FETCH_4:
6740 case BUILT_IN_ATOMIC_AND_FETCH_8:
6741 case BUILT_IN_ATOMIC_AND_FETCH_16:
6742 {
6743 enum built_in_function lib;
6744 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6745 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6746 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6747 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6748 ignore, lib);
6749 if (target)
6750 return target;
6751 break;
6752 }
6753 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6754 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6755 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6756 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6757 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6758 {
6759 enum built_in_function lib;
6760 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6761 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6762 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6763 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6764 ignore, lib);
6765 if (target)
6766 return target;
6767 break;
6768 }
6769 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6770 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6771 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6772 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6773 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6774 {
6775 enum built_in_function lib;
6776 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6777 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6778 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6779 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6780 ignore, lib);
6781 if (target)
6782 return target;
6783 break;
6784 }
6785 case BUILT_IN_ATOMIC_OR_FETCH_1:
6786 case BUILT_IN_ATOMIC_OR_FETCH_2:
6787 case BUILT_IN_ATOMIC_OR_FETCH_4:
6788 case BUILT_IN_ATOMIC_OR_FETCH_8:
6789 case BUILT_IN_ATOMIC_OR_FETCH_16:
6790 {
6791 enum built_in_function lib;
6792 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6793 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6794 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6795 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6796 ignore, lib);
6797 if (target)
6798 return target;
6799 break;
6800 }
6801 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6802 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6803 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6804 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6805 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6806 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6807 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6808 ignore, BUILT_IN_NONE);
6809 if (target)
6810 return target;
6811 break;
6812
6813 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6814 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6815 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6816 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6817 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6818 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6819 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6820 ignore, BUILT_IN_NONE);
6821 if (target)
6822 return target;
6823 break;
6824
6825 case BUILT_IN_ATOMIC_FETCH_AND_1:
6826 case BUILT_IN_ATOMIC_FETCH_AND_2:
6827 case BUILT_IN_ATOMIC_FETCH_AND_4:
6828 case BUILT_IN_ATOMIC_FETCH_AND_8:
6829 case BUILT_IN_ATOMIC_FETCH_AND_16:
6830 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6831 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6832 ignore, BUILT_IN_NONE);
6833 if (target)
6834 return target;
6835 break;
6836
6837 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6838 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6839 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6840 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6841 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6842 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6843 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6844 ignore, BUILT_IN_NONE);
6845 if (target)
6846 return target;
6847 break;
6848
6849 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6850 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6851 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6852 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6853 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6854 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6855 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6856 ignore, BUILT_IN_NONE);
6857 if (target)
6858 return target;
6859 break;
6860
6861 case BUILT_IN_ATOMIC_FETCH_OR_1:
6862 case BUILT_IN_ATOMIC_FETCH_OR_2:
6863 case BUILT_IN_ATOMIC_FETCH_OR_4:
6864 case BUILT_IN_ATOMIC_FETCH_OR_8:
6865 case BUILT_IN_ATOMIC_FETCH_OR_16:
6866 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6867 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6868 ignore, BUILT_IN_NONE);
6869 if (target)
6870 return target;
6871 break;
6872
6873 case BUILT_IN_ATOMIC_TEST_AND_SET:
6874 return expand_builtin_atomic_test_and_set (exp, target);
6875
6876 case BUILT_IN_ATOMIC_CLEAR:
6877 return expand_builtin_atomic_clear (exp);
6878
6879 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6880 return expand_builtin_atomic_always_lock_free (exp);
6881
6882 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6883 target = expand_builtin_atomic_is_lock_free (exp);
6884 if (target)
6885 return target;
6886 break;
6887
6888 case BUILT_IN_ATOMIC_THREAD_FENCE:
6889 expand_builtin_atomic_thread_fence (exp);
6890 return const0_rtx;
6891
6892 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6893 expand_builtin_atomic_signal_fence (exp);
6894 return const0_rtx;
6895
6896 case BUILT_IN_OBJECT_SIZE:
6897 return expand_builtin_object_size (exp);
6898
6899 case BUILT_IN_MEMCPY_CHK:
6900 case BUILT_IN_MEMPCPY_CHK:
6901 case BUILT_IN_MEMMOVE_CHK:
6902 case BUILT_IN_MEMSET_CHK:
6903 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6904 if (target)
6905 return target;
6906 break;
6907
6908 case BUILT_IN_STRCPY_CHK:
6909 case BUILT_IN_STPCPY_CHK:
6910 case BUILT_IN_STRNCPY_CHK:
6911 case BUILT_IN_STPNCPY_CHK:
6912 case BUILT_IN_STRCAT_CHK:
6913 case BUILT_IN_STRNCAT_CHK:
6914 case BUILT_IN_SNPRINTF_CHK:
6915 case BUILT_IN_VSNPRINTF_CHK:
6916 maybe_emit_chk_warning (exp, fcode);
6917 break;
6918
6919 case BUILT_IN_SPRINTF_CHK:
6920 case BUILT_IN_VSPRINTF_CHK:
6921 maybe_emit_sprintf_chk_warning (exp, fcode);
6922 break;
6923
6924 case BUILT_IN_FREE:
6925 if (warn_free_nonheap_object)
6926 maybe_emit_free_warning (exp);
6927 break;
6928
6929 case BUILT_IN_THREAD_POINTER:
6930 return expand_builtin_thread_pointer (exp, target);
6931
6932 case BUILT_IN_SET_THREAD_POINTER:
6933 expand_builtin_set_thread_pointer (exp);
6934 return const0_rtx;
6935
6936 case BUILT_IN_CILK_DETACH:
6937 expand_builtin_cilk_detach (exp);
6938 return const0_rtx;
6939
6940 case BUILT_IN_CILK_POP_FRAME:
6941 expand_builtin_cilk_pop_frame (exp);
6942 return const0_rtx;
6943
6944 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6945 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6946 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6947 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6948 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6949 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6950 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6951 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6952 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6953 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6954 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6955 /* We allow user CHKP builtins if Pointer Bounds
6956 Checker is off. */
6957 if (!chkp_function_instrumented_p (current_function_decl))
6958 {
6959 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6960 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6961 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6962 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6963 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6964 return expand_normal (CALL_EXPR_ARG (exp, 0));
6965 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6966 return expand_normal (size_zero_node);
6967 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6968 return expand_normal (size_int (-1));
6969 else
6970 return const0_rtx;
6971 }
6972 /* FALLTHROUGH */
6973
6974 case BUILT_IN_CHKP_BNDMK:
6975 case BUILT_IN_CHKP_BNDSTX:
6976 case BUILT_IN_CHKP_BNDCL:
6977 case BUILT_IN_CHKP_BNDCU:
6978 case BUILT_IN_CHKP_BNDLDX:
6979 case BUILT_IN_CHKP_BNDRET:
6980 case BUILT_IN_CHKP_INTERSECT:
6981 case BUILT_IN_CHKP_NARROW:
6982 case BUILT_IN_CHKP_EXTRACT_LOWER:
6983 case BUILT_IN_CHKP_EXTRACT_UPPER:
6984 /* Software implementation of Pointer Bounds Checker is NYI.
6985 Target support is required. */
6986 error ("Your target platform does not support -fcheck-pointer-bounds");
6987 break;
6988
6989 case BUILT_IN_ACC_ON_DEVICE:
6990 /* Do library call, if we failed to expand the builtin when
6991 folding. */
6992 break;
6993
6994 default: /* just do library call, if unknown builtin */
6995 break;
6996 }
6997
6998 /* The switch statement above can drop through to cause the function
6999 to be called normally. */
7000 return expand_call (exp, target, ignore);
7001 }
7002
7003 /* Similar to expand_builtin but is used for instrumented calls. */
7004
7005 rtx
7006 expand_builtin_with_bounds (tree exp, rtx target,
7007 rtx subtarget ATTRIBUTE_UNUSED,
7008 machine_mode mode, int ignore)
7009 {
7010 tree fndecl = get_callee_fndecl (exp);
7011 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7012
7013 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7014
7015 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7016 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7017
7018 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7019 && fcode < END_CHKP_BUILTINS);
7020
7021 switch (fcode)
7022 {
7023 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7024 target = expand_builtin_memcpy_with_bounds (exp, target);
7025 if (target)
7026 return target;
7027 break;
7028
7029 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7030 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7031 if (target)
7032 return target;
7033 break;
7034
7035 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7036 target = expand_builtin_memset_with_bounds (exp, target, mode);
7037 if (target)
7038 return target;
7039 break;
7040
7041 default:
7042 break;
7043 }
7044
7045 /* The switch statement above can drop through to cause the function
7046 to be called normally. */
7047 return expand_call (exp, target, ignore);
7048 }
7049
7050 /* Determine whether a tree node represents a call to a built-in
7051 function. If the tree T is a call to a built-in function with
7052 the right number of arguments of the appropriate types, return
7053 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7054 Otherwise the return value is END_BUILTINS. */
7055
7056 enum built_in_function
7057 builtin_mathfn_code (const_tree t)
7058 {
7059 const_tree fndecl, arg, parmlist;
7060 const_tree argtype, parmtype;
7061 const_call_expr_arg_iterator iter;
7062
7063 if (TREE_CODE (t) != CALL_EXPR
7064 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7065 return END_BUILTINS;
7066
7067 fndecl = get_callee_fndecl (t);
7068 if (fndecl == NULL_TREE
7069 || TREE_CODE (fndecl) != FUNCTION_DECL
7070 || ! DECL_BUILT_IN (fndecl)
7071 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7072 return END_BUILTINS;
7073
7074 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7075 init_const_call_expr_arg_iterator (t, &iter);
7076 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7077 {
7078 /* If a function doesn't take a variable number of arguments,
7079 the last element in the list will have type `void'. */
7080 parmtype = TREE_VALUE (parmlist);
7081 if (VOID_TYPE_P (parmtype))
7082 {
7083 if (more_const_call_expr_args_p (&iter))
7084 return END_BUILTINS;
7085 return DECL_FUNCTION_CODE (fndecl);
7086 }
7087
7088 if (! more_const_call_expr_args_p (&iter))
7089 return END_BUILTINS;
7090
7091 arg = next_const_call_expr_arg (&iter);
7092 argtype = TREE_TYPE (arg);
7093
7094 if (SCALAR_FLOAT_TYPE_P (parmtype))
7095 {
7096 if (! SCALAR_FLOAT_TYPE_P (argtype))
7097 return END_BUILTINS;
7098 }
7099 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7100 {
7101 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7102 return END_BUILTINS;
7103 }
7104 else if (POINTER_TYPE_P (parmtype))
7105 {
7106 if (! POINTER_TYPE_P (argtype))
7107 return END_BUILTINS;
7108 }
7109 else if (INTEGRAL_TYPE_P (parmtype))
7110 {
7111 if (! INTEGRAL_TYPE_P (argtype))
7112 return END_BUILTINS;
7113 }
7114 else
7115 return END_BUILTINS;
7116 }
7117
7118 /* Variable-length argument list. */
7119 return DECL_FUNCTION_CODE (fndecl);
7120 }
7121
7122 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7123 evaluate to a constant. */
7124
7125 static tree
7126 fold_builtin_constant_p (tree arg)
7127 {
7128 /* We return 1 for a numeric type that's known to be a constant
7129 value at compile-time or for an aggregate type that's a
7130 literal constant. */
7131 STRIP_NOPS (arg);
7132
7133 /* If we know this is a constant, emit the constant of one. */
7134 if (CONSTANT_CLASS_P (arg)
7135 || (TREE_CODE (arg) == CONSTRUCTOR
7136 && TREE_CONSTANT (arg)))
7137 return integer_one_node;
7138 if (TREE_CODE (arg) == ADDR_EXPR)
7139 {
7140 tree op = TREE_OPERAND (arg, 0);
7141 if (TREE_CODE (op) == STRING_CST
7142 || (TREE_CODE (op) == ARRAY_REF
7143 && integer_zerop (TREE_OPERAND (op, 1))
7144 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7145 return integer_one_node;
7146 }
7147
7148 /* If this expression has side effects, show we don't know it to be a
7149 constant. Likewise if it's a pointer or aggregate type since in
7150 those case we only want literals, since those are only optimized
7151 when generating RTL, not later.
7152 And finally, if we are compiling an initializer, not code, we
7153 need to return a definite result now; there's not going to be any
7154 more optimization done. */
7155 if (TREE_SIDE_EFFECTS (arg)
7156 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7157 || POINTER_TYPE_P (TREE_TYPE (arg))
7158 || cfun == 0
7159 || folding_initializer
7160 || force_folding_builtin_constant_p)
7161 return integer_zero_node;
7162
7163 return NULL_TREE;
7164 }
7165
7166 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7167 return it as a truthvalue. */
7168
7169 static tree
7170 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7171 tree predictor)
7172 {
7173 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7174
7175 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7176 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7177 ret_type = TREE_TYPE (TREE_TYPE (fn));
7178 pred_type = TREE_VALUE (arg_types);
7179 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7180
7181 pred = fold_convert_loc (loc, pred_type, pred);
7182 expected = fold_convert_loc (loc, expected_type, expected);
7183 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7184 predictor);
7185
7186 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7187 build_int_cst (ret_type, 0));
7188 }
7189
7190 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7191 NULL_TREE if no simplification is possible. */
7192
7193 tree
7194 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7195 {
7196 tree inner, fndecl, inner_arg0;
7197 enum tree_code code;
7198
7199 /* Distribute the expected value over short-circuiting operators.
7200 See through the cast from truthvalue_type_node to long. */
7201 inner_arg0 = arg0;
7202 while (CONVERT_EXPR_P (inner_arg0)
7203 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7204 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7205 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7206
7207 /* If this is a builtin_expect within a builtin_expect keep the
7208 inner one. See through a comparison against a constant. It
7209 might have been added to create a thruthvalue. */
7210 inner = inner_arg0;
7211
7212 if (COMPARISON_CLASS_P (inner)
7213 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7214 inner = TREE_OPERAND (inner, 0);
7215
7216 if (TREE_CODE (inner) == CALL_EXPR
7217 && (fndecl = get_callee_fndecl (inner))
7218 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7219 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7220 return arg0;
7221
7222 inner = inner_arg0;
7223 code = TREE_CODE (inner);
7224 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7225 {
7226 tree op0 = TREE_OPERAND (inner, 0);
7227 tree op1 = TREE_OPERAND (inner, 1);
7228
7229 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7230 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7231 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7232
7233 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7234 }
7235
7236 /* If the argument isn't invariant then there's nothing else we can do. */
7237 if (!TREE_CONSTANT (inner_arg0))
7238 return NULL_TREE;
7239
7240 /* If we expect that a comparison against the argument will fold to
7241 a constant return the constant. In practice, this means a true
7242 constant or the address of a non-weak symbol. */
7243 inner = inner_arg0;
7244 STRIP_NOPS (inner);
7245 if (TREE_CODE (inner) == ADDR_EXPR)
7246 {
7247 do
7248 {
7249 inner = TREE_OPERAND (inner, 0);
7250 }
7251 while (TREE_CODE (inner) == COMPONENT_REF
7252 || TREE_CODE (inner) == ARRAY_REF);
7253 if ((TREE_CODE (inner) == VAR_DECL
7254 || TREE_CODE (inner) == FUNCTION_DECL)
7255 && DECL_WEAK (inner))
7256 return NULL_TREE;
7257 }
7258
7259 /* Otherwise, ARG0 already has the proper type for the return value. */
7260 return arg0;
7261 }
7262
7263 /* Fold a call to __builtin_classify_type with argument ARG. */
7264
7265 static tree
7266 fold_builtin_classify_type (tree arg)
7267 {
7268 if (arg == 0)
7269 return build_int_cst (integer_type_node, no_type_class);
7270
7271 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7272 }
7273
7274 /* Fold a call to __builtin_strlen with argument ARG. */
7275
7276 static tree
7277 fold_builtin_strlen (location_t loc, tree type, tree arg)
7278 {
7279 if (!validate_arg (arg, POINTER_TYPE))
7280 return NULL_TREE;
7281 else
7282 {
7283 tree len = c_strlen (arg, 0);
7284
7285 if (len)
7286 return fold_convert_loc (loc, type, len);
7287
7288 return NULL_TREE;
7289 }
7290 }
7291
7292 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7293
7294 static tree
7295 fold_builtin_inf (location_t loc, tree type, int warn)
7296 {
7297 REAL_VALUE_TYPE real;
7298
7299 /* __builtin_inff is intended to be usable to define INFINITY on all
7300 targets. If an infinity is not available, INFINITY expands "to a
7301 positive constant of type float that overflows at translation
7302 time", footnote "In this case, using INFINITY will violate the
7303 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7304 Thus we pedwarn to ensure this constraint violation is
7305 diagnosed. */
7306 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7307 pedwarn (loc, 0, "target format does not support infinity");
7308
7309 real_inf (&real);
7310 return build_real (type, real);
7311 }
7312
7313 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7314
7315 static tree
7316 fold_builtin_nan (tree arg, tree type, int quiet)
7317 {
7318 REAL_VALUE_TYPE real;
7319 const char *str;
7320
7321 if (!validate_arg (arg, POINTER_TYPE))
7322 return NULL_TREE;
7323 str = c_getstr (arg);
7324 if (!str)
7325 return NULL_TREE;
7326
7327 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7328 return NULL_TREE;
7329
7330 return build_real (type, real);
7331 }
7332
7333 /* Return true if the floating point expression T has an integer value.
7334 We also allow +Inf, -Inf and NaN to be considered integer values. */
7335
7336 static bool
7337 integer_valued_real_p (tree t)
7338 {
7339 switch (TREE_CODE (t))
7340 {
7341 case FLOAT_EXPR:
7342 return true;
7343
7344 case ABS_EXPR:
7345 case SAVE_EXPR:
7346 return integer_valued_real_p (TREE_OPERAND (t, 0));
7347
7348 case COMPOUND_EXPR:
7349 case MODIFY_EXPR:
7350 case BIND_EXPR:
7351 return integer_valued_real_p (TREE_OPERAND (t, 1));
7352
7353 case PLUS_EXPR:
7354 case MINUS_EXPR:
7355 case MULT_EXPR:
7356 case MIN_EXPR:
7357 case MAX_EXPR:
7358 return integer_valued_real_p (TREE_OPERAND (t, 0))
7359 && integer_valued_real_p (TREE_OPERAND (t, 1));
7360
7361 case COND_EXPR:
7362 return integer_valued_real_p (TREE_OPERAND (t, 1))
7363 && integer_valued_real_p (TREE_OPERAND (t, 2));
7364
7365 case REAL_CST:
7366 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7367
7368 CASE_CONVERT:
7369 {
7370 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7371 if (TREE_CODE (type) == INTEGER_TYPE)
7372 return true;
7373 if (TREE_CODE (type) == REAL_TYPE)
7374 return integer_valued_real_p (TREE_OPERAND (t, 0));
7375 break;
7376 }
7377
7378 case CALL_EXPR:
7379 switch (builtin_mathfn_code (t))
7380 {
7381 CASE_FLT_FN (BUILT_IN_CEIL):
7382 CASE_FLT_FN (BUILT_IN_FLOOR):
7383 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7384 CASE_FLT_FN (BUILT_IN_RINT):
7385 CASE_FLT_FN (BUILT_IN_ROUND):
7386 CASE_FLT_FN (BUILT_IN_TRUNC):
7387 return true;
7388
7389 CASE_FLT_FN (BUILT_IN_FMIN):
7390 CASE_FLT_FN (BUILT_IN_FMAX):
7391 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7392 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7393
7394 default:
7395 break;
7396 }
7397 break;
7398
7399 default:
7400 break;
7401 }
7402 return false;
7403 }
7404
7405 /* FNDECL is assumed to be a builtin where truncation can be propagated
7406 across (for instance floor((double)f) == (double)floorf (f).
7407 Do the transformation for a call with argument ARG. */
7408
7409 static tree
7410 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7411 {
7412 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7413
7414 if (!validate_arg (arg, REAL_TYPE))
7415 return NULL_TREE;
7416
7417 /* Integer rounding functions are idempotent. */
7418 if (fcode == builtin_mathfn_code (arg))
7419 return arg;
7420
7421 /* If argument is already integer valued, and we don't need to worry
7422 about setting errno, there's no need to perform rounding. */
7423 if (! flag_errno_math && integer_valued_real_p (arg))
7424 return arg;
7425
7426 if (optimize)
7427 {
7428 tree arg0 = strip_float_extensions (arg);
7429 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7430 tree newtype = TREE_TYPE (arg0);
7431 tree decl;
7432
7433 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7434 && (decl = mathfn_built_in (newtype, fcode)))
7435 return fold_convert_loc (loc, ftype,
7436 build_call_expr_loc (loc, decl, 1,
7437 fold_convert_loc (loc,
7438 newtype,
7439 arg0)));
7440 }
7441 return NULL_TREE;
7442 }
7443
7444 /* FNDECL is assumed to be builtin which can narrow the FP type of
7445 the argument, for instance lround((double)f) -> lroundf (f).
7446 Do the transformation for a call with argument ARG. */
7447
7448 static tree
7449 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7450 {
7451 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7452
7453 if (!validate_arg (arg, REAL_TYPE))
7454 return NULL_TREE;
7455
7456 /* If argument is already integer valued, and we don't need to worry
7457 about setting errno, there's no need to perform rounding. */
7458 if (! flag_errno_math && integer_valued_real_p (arg))
7459 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7460 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7461
7462 if (optimize)
7463 {
7464 tree ftype = TREE_TYPE (arg);
7465 tree arg0 = strip_float_extensions (arg);
7466 tree newtype = TREE_TYPE (arg0);
7467 tree decl;
7468
7469 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7470 && (decl = mathfn_built_in (newtype, fcode)))
7471 return build_call_expr_loc (loc, decl, 1,
7472 fold_convert_loc (loc, newtype, arg0));
7473 }
7474
7475 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7476 sizeof (int) == sizeof (long). */
7477 if (TYPE_PRECISION (integer_type_node)
7478 == TYPE_PRECISION (long_integer_type_node))
7479 {
7480 tree newfn = NULL_TREE;
7481 switch (fcode)
7482 {
7483 CASE_FLT_FN (BUILT_IN_ICEIL):
7484 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7485 break;
7486
7487 CASE_FLT_FN (BUILT_IN_IFLOOR):
7488 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7489 break;
7490
7491 CASE_FLT_FN (BUILT_IN_IROUND):
7492 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7493 break;
7494
7495 CASE_FLT_FN (BUILT_IN_IRINT):
7496 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7497 break;
7498
7499 default:
7500 break;
7501 }
7502
7503 if (newfn)
7504 {
7505 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7506 return fold_convert_loc (loc,
7507 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7508 }
7509 }
7510
7511 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7512 sizeof (long long) == sizeof (long). */
7513 if (TYPE_PRECISION (long_long_integer_type_node)
7514 == TYPE_PRECISION (long_integer_type_node))
7515 {
7516 tree newfn = NULL_TREE;
7517 switch (fcode)
7518 {
7519 CASE_FLT_FN (BUILT_IN_LLCEIL):
7520 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7521 break;
7522
7523 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7524 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7525 break;
7526
7527 CASE_FLT_FN (BUILT_IN_LLROUND):
7528 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7529 break;
7530
7531 CASE_FLT_FN (BUILT_IN_LLRINT):
7532 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7533 break;
7534
7535 default:
7536 break;
7537 }
7538
7539 if (newfn)
7540 {
7541 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7542 return fold_convert_loc (loc,
7543 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7544 }
7545 }
7546
7547 return NULL_TREE;
7548 }
7549
7550 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7551 return type. Return NULL_TREE if no simplification can be made. */
7552
7553 static tree
7554 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7555 {
7556 tree res;
7557
7558 if (!validate_arg (arg, COMPLEX_TYPE)
7559 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7560 return NULL_TREE;
7561
7562 /* Calculate the result when the argument is a constant. */
7563 if (TREE_CODE (arg) == COMPLEX_CST
7564 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7565 type, mpfr_hypot)))
7566 return res;
7567
7568 if (TREE_CODE (arg) == COMPLEX_EXPR)
7569 {
7570 tree real = TREE_OPERAND (arg, 0);
7571 tree imag = TREE_OPERAND (arg, 1);
7572
7573 /* If either part is zero, cabs is fabs of the other. */
7574 if (real_zerop (real))
7575 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7576 if (real_zerop (imag))
7577 return fold_build1_loc (loc, ABS_EXPR, type, real);
7578
7579 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7580 if (flag_unsafe_math_optimizations
7581 && operand_equal_p (real, imag, OEP_PURE_SAME))
7582 {
7583 STRIP_NOPS (real);
7584 return fold_build2_loc (loc, MULT_EXPR, type,
7585 fold_build1_loc (loc, ABS_EXPR, type, real),
7586 build_real_truncate (type, dconst_sqrt2 ()));
7587 }
7588 }
7589
7590 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7591 if (TREE_CODE (arg) == NEGATE_EXPR
7592 || TREE_CODE (arg) == CONJ_EXPR)
7593 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7594
7595 /* Don't do this when optimizing for size. */
7596 if (flag_unsafe_math_optimizations
7597 && optimize && optimize_function_for_speed_p (cfun))
7598 {
7599 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7600
7601 if (sqrtfn != NULL_TREE)
7602 {
7603 tree rpart, ipart, result;
7604
7605 arg = builtin_save_expr (arg);
7606
7607 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7608 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7609
7610 rpart = builtin_save_expr (rpart);
7611 ipart = builtin_save_expr (ipart);
7612
7613 result = fold_build2_loc (loc, PLUS_EXPR, type,
7614 fold_build2_loc (loc, MULT_EXPR, type,
7615 rpart, rpart),
7616 fold_build2_loc (loc, MULT_EXPR, type,
7617 ipart, ipart));
7618
7619 return build_call_expr_loc (loc, sqrtfn, 1, result);
7620 }
7621 }
7622
7623 return NULL_TREE;
7624 }
7625
7626 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7627 complex tree type of the result. If NEG is true, the imaginary
7628 zero is negative. */
7629
7630 static tree
7631 build_complex_cproj (tree type, bool neg)
7632 {
7633 REAL_VALUE_TYPE rinf, rzero = dconst0;
7634
7635 real_inf (&rinf);
7636 rzero.sign = neg;
7637 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7638 build_real (TREE_TYPE (type), rzero));
7639 }
7640
7641 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7642 return type. Return NULL_TREE if no simplification can be made. */
7643
7644 static tree
7645 fold_builtin_cproj (location_t loc, tree arg, tree type)
7646 {
7647 if (!validate_arg (arg, COMPLEX_TYPE)
7648 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7649 return NULL_TREE;
7650
7651 /* If there are no infinities, return arg. */
7652 if (! HONOR_INFINITIES (type))
7653 return non_lvalue_loc (loc, arg);
7654
7655 /* Calculate the result when the argument is a constant. */
7656 if (TREE_CODE (arg) == COMPLEX_CST)
7657 {
7658 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7659 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7660
7661 if (real_isinf (real) || real_isinf (imag))
7662 return build_complex_cproj (type, imag->sign);
7663 else
7664 return arg;
7665 }
7666 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7667 {
7668 tree real = TREE_OPERAND (arg, 0);
7669 tree imag = TREE_OPERAND (arg, 1);
7670
7671 STRIP_NOPS (real);
7672 STRIP_NOPS (imag);
7673
7674 /* If the real part is inf and the imag part is known to be
7675 nonnegative, return (inf + 0i). Remember side-effects are
7676 possible in the imag part. */
7677 if (TREE_CODE (real) == REAL_CST
7678 && real_isinf (TREE_REAL_CST_PTR (real))
7679 && tree_expr_nonnegative_p (imag))
7680 return omit_one_operand_loc (loc, type,
7681 build_complex_cproj (type, false),
7682 arg);
7683
7684 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7685 Remember side-effects are possible in the real part. */
7686 if (TREE_CODE (imag) == REAL_CST
7687 && real_isinf (TREE_REAL_CST_PTR (imag)))
7688 return
7689 omit_one_operand_loc (loc, type,
7690 build_complex_cproj (type, TREE_REAL_CST_PTR
7691 (imag)->sign), arg);
7692 }
7693
7694 return NULL_TREE;
7695 }
7696
7697 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7698 TYPE is the type of the return value. Return NULL_TREE if no
7699 simplification can be made. */
7700
7701 static tree
7702 fold_builtin_cos (location_t loc,
7703 tree arg, tree type, tree fndecl)
7704 {
7705 tree res, narg;
7706
7707 if (!validate_arg (arg, REAL_TYPE))
7708 return NULL_TREE;
7709
7710 /* Calculate the result when the argument is a constant. */
7711 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7712 return res;
7713
7714 /* Optimize cos(-x) into cos (x). */
7715 if ((narg = fold_strip_sign_ops (arg)))
7716 return build_call_expr_loc (loc, fndecl, 1, narg);
7717
7718 return NULL_TREE;
7719 }
7720
7721 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7722 Return NULL_TREE if no simplification can be made. */
7723
7724 static tree
7725 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7726 {
7727 if (validate_arg (arg, REAL_TYPE))
7728 {
7729 tree res, narg;
7730
7731 /* Calculate the result when the argument is a constant. */
7732 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7733 return res;
7734
7735 /* Optimize cosh(-x) into cosh (x). */
7736 if ((narg = fold_strip_sign_ops (arg)))
7737 return build_call_expr_loc (loc, fndecl, 1, narg);
7738 }
7739
7740 return NULL_TREE;
7741 }
7742
7743 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7744 argument ARG. TYPE is the type of the return value. Return
7745 NULL_TREE if no simplification can be made. */
7746
7747 static tree
7748 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7749 bool hyper)
7750 {
7751 if (validate_arg (arg, COMPLEX_TYPE)
7752 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7753 {
7754 tree tmp;
7755
7756 /* Calculate the result when the argument is a constant. */
7757 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7758 return tmp;
7759
7760 /* Optimize fn(-x) into fn(x). */
7761 if ((tmp = fold_strip_sign_ops (arg)))
7762 return build_call_expr_loc (loc, fndecl, 1, tmp);
7763 }
7764
7765 return NULL_TREE;
7766 }
7767
7768 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7769 Return NULL_TREE if no simplification can be made. */
7770
7771 static tree
7772 fold_builtin_tan (tree arg, tree type)
7773 {
7774 enum built_in_function fcode;
7775 tree res;
7776
7777 if (!validate_arg (arg, REAL_TYPE))
7778 return NULL_TREE;
7779
7780 /* Calculate the result when the argument is a constant. */
7781 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7782 return res;
7783
7784 /* Optimize tan(atan(x)) = x. */
7785 fcode = builtin_mathfn_code (arg);
7786 if (flag_unsafe_math_optimizations
7787 && (fcode == BUILT_IN_ATAN
7788 || fcode == BUILT_IN_ATANF
7789 || fcode == BUILT_IN_ATANL))
7790 return CALL_EXPR_ARG (arg, 0);
7791
7792 return NULL_TREE;
7793 }
7794
7795 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7796 NULL_TREE if no simplification can be made. */
7797
7798 static tree
7799 fold_builtin_sincos (location_t loc,
7800 tree arg0, tree arg1, tree arg2)
7801 {
7802 tree type;
7803 tree res, fn, call;
7804
7805 if (!validate_arg (arg0, REAL_TYPE)
7806 || !validate_arg (arg1, POINTER_TYPE)
7807 || !validate_arg (arg2, POINTER_TYPE))
7808 return NULL_TREE;
7809
7810 type = TREE_TYPE (arg0);
7811
7812 /* Calculate the result when the argument is a constant. */
7813 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7814 return res;
7815
7816 /* Canonicalize sincos to cexpi. */
7817 if (!targetm.libc_has_function (function_c99_math_complex))
7818 return NULL_TREE;
7819 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7820 if (!fn)
7821 return NULL_TREE;
7822
7823 call = build_call_expr_loc (loc, fn, 1, arg0);
7824 call = builtin_save_expr (call);
7825
7826 return build2 (COMPOUND_EXPR, void_type_node,
7827 build2 (MODIFY_EXPR, void_type_node,
7828 build_fold_indirect_ref_loc (loc, arg1),
7829 build1 (IMAGPART_EXPR, type, call)),
7830 build2 (MODIFY_EXPR, void_type_node,
7831 build_fold_indirect_ref_loc (loc, arg2),
7832 build1 (REALPART_EXPR, type, call)));
7833 }
7834
7835 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7836 NULL_TREE if no simplification can be made. */
7837
7838 static tree
7839 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7840 {
7841 tree rtype;
7842 tree realp, imagp, ifn;
7843 tree res;
7844
7845 if (!validate_arg (arg0, COMPLEX_TYPE)
7846 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7847 return NULL_TREE;
7848
7849 /* Calculate the result when the argument is a constant. */
7850 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7851 return res;
7852
7853 rtype = TREE_TYPE (TREE_TYPE (arg0));
7854
7855 /* In case we can figure out the real part of arg0 and it is constant zero
7856 fold to cexpi. */
7857 if (!targetm.libc_has_function (function_c99_math_complex))
7858 return NULL_TREE;
7859 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7860 if (!ifn)
7861 return NULL_TREE;
7862
7863 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7864 && real_zerop (realp))
7865 {
7866 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7867 return build_call_expr_loc (loc, ifn, 1, narg);
7868 }
7869
7870 /* In case we can easily decompose real and imaginary parts split cexp
7871 to exp (r) * cexpi (i). */
7872 if (flag_unsafe_math_optimizations
7873 && realp)
7874 {
7875 tree rfn, rcall, icall;
7876
7877 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7878 if (!rfn)
7879 return NULL_TREE;
7880
7881 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7882 if (!imagp)
7883 return NULL_TREE;
7884
7885 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7886 icall = builtin_save_expr (icall);
7887 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7888 rcall = builtin_save_expr (rcall);
7889 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7890 fold_build2_loc (loc, MULT_EXPR, rtype,
7891 rcall,
7892 fold_build1_loc (loc, REALPART_EXPR,
7893 rtype, icall)),
7894 fold_build2_loc (loc, MULT_EXPR, rtype,
7895 rcall,
7896 fold_build1_loc (loc, IMAGPART_EXPR,
7897 rtype, icall)));
7898 }
7899
7900 return NULL_TREE;
7901 }
7902
7903 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7904 Return NULL_TREE if no simplification can be made. */
7905
7906 static tree
7907 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7908 {
7909 if (!validate_arg (arg, REAL_TYPE))
7910 return NULL_TREE;
7911
7912 /* Optimize trunc of constant value. */
7913 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7914 {
7915 REAL_VALUE_TYPE r, x;
7916 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7917
7918 x = TREE_REAL_CST (arg);
7919 real_trunc (&r, TYPE_MODE (type), &x);
7920 return build_real (type, r);
7921 }
7922
7923 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7924 }
7925
7926 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7927 Return NULL_TREE if no simplification can be made. */
7928
7929 static tree
7930 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7931 {
7932 if (!validate_arg (arg, REAL_TYPE))
7933 return NULL_TREE;
7934
7935 /* Optimize floor of constant value. */
7936 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7937 {
7938 REAL_VALUE_TYPE x;
7939
7940 x = TREE_REAL_CST (arg);
7941 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7942 {
7943 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7944 REAL_VALUE_TYPE r;
7945
7946 real_floor (&r, TYPE_MODE (type), &x);
7947 return build_real (type, r);
7948 }
7949 }
7950
7951 /* Fold floor (x) where x is nonnegative to trunc (x). */
7952 if (tree_expr_nonnegative_p (arg))
7953 {
7954 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7955 if (truncfn)
7956 return build_call_expr_loc (loc, truncfn, 1, arg);
7957 }
7958
7959 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7960 }
7961
7962 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7963 Return NULL_TREE if no simplification can be made. */
7964
7965 static tree
7966 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7967 {
7968 if (!validate_arg (arg, REAL_TYPE))
7969 return NULL_TREE;
7970
7971 /* Optimize ceil of constant value. */
7972 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7973 {
7974 REAL_VALUE_TYPE x;
7975
7976 x = TREE_REAL_CST (arg);
7977 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7978 {
7979 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7980 REAL_VALUE_TYPE r;
7981
7982 real_ceil (&r, TYPE_MODE (type), &x);
7983 return build_real (type, r);
7984 }
7985 }
7986
7987 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7988 }
7989
7990 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7991 Return NULL_TREE if no simplification can be made. */
7992
7993 static tree
7994 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7995 {
7996 if (!validate_arg (arg, REAL_TYPE))
7997 return NULL_TREE;
7998
7999 /* Optimize round of constant value. */
8000 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8001 {
8002 REAL_VALUE_TYPE x;
8003
8004 x = TREE_REAL_CST (arg);
8005 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8006 {
8007 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8008 REAL_VALUE_TYPE r;
8009
8010 real_round (&r, TYPE_MODE (type), &x);
8011 return build_real (type, r);
8012 }
8013 }
8014
8015 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8016 }
8017
8018 /* Fold function call to builtin lround, lroundf or lroundl (or the
8019 corresponding long long versions) and other rounding functions. ARG
8020 is the argument to the call. Return NULL_TREE if no simplification
8021 can be made. */
8022
8023 static tree
8024 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8025 {
8026 if (!validate_arg (arg, REAL_TYPE))
8027 return NULL_TREE;
8028
8029 /* Optimize lround of constant value. */
8030 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8031 {
8032 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8033
8034 if (real_isfinite (&x))
8035 {
8036 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8037 tree ftype = TREE_TYPE (arg);
8038 REAL_VALUE_TYPE r;
8039 bool fail = false;
8040
8041 switch (DECL_FUNCTION_CODE (fndecl))
8042 {
8043 CASE_FLT_FN (BUILT_IN_IFLOOR):
8044 CASE_FLT_FN (BUILT_IN_LFLOOR):
8045 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8046 real_floor (&r, TYPE_MODE (ftype), &x);
8047 break;
8048
8049 CASE_FLT_FN (BUILT_IN_ICEIL):
8050 CASE_FLT_FN (BUILT_IN_LCEIL):
8051 CASE_FLT_FN (BUILT_IN_LLCEIL):
8052 real_ceil (&r, TYPE_MODE (ftype), &x);
8053 break;
8054
8055 CASE_FLT_FN (BUILT_IN_IROUND):
8056 CASE_FLT_FN (BUILT_IN_LROUND):
8057 CASE_FLT_FN (BUILT_IN_LLROUND):
8058 real_round (&r, TYPE_MODE (ftype), &x);
8059 break;
8060
8061 default:
8062 gcc_unreachable ();
8063 }
8064
8065 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8066 if (!fail)
8067 return wide_int_to_tree (itype, val);
8068 }
8069 }
8070
8071 switch (DECL_FUNCTION_CODE (fndecl))
8072 {
8073 CASE_FLT_FN (BUILT_IN_LFLOOR):
8074 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8075 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8076 if (tree_expr_nonnegative_p (arg))
8077 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8078 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8079 break;
8080 default:;
8081 }
8082
8083 return fold_fixed_mathfn (loc, fndecl, arg);
8084 }
8085
8086 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8087 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8088 the argument to the call. Return NULL_TREE if no simplification can
8089 be made. */
8090
8091 static tree
8092 fold_builtin_bitop (tree fndecl, tree arg)
8093 {
8094 if (!validate_arg (arg, INTEGER_TYPE))
8095 return NULL_TREE;
8096
8097 /* Optimize for constant argument. */
8098 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8099 {
8100 tree type = TREE_TYPE (arg);
8101 int result;
8102
8103 switch (DECL_FUNCTION_CODE (fndecl))
8104 {
8105 CASE_INT_FN (BUILT_IN_FFS):
8106 result = wi::ffs (arg);
8107 break;
8108
8109 CASE_INT_FN (BUILT_IN_CLZ):
8110 if (wi::ne_p (arg, 0))
8111 result = wi::clz (arg);
8112 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8113 result = TYPE_PRECISION (type);
8114 break;
8115
8116 CASE_INT_FN (BUILT_IN_CTZ):
8117 if (wi::ne_p (arg, 0))
8118 result = wi::ctz (arg);
8119 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8120 result = TYPE_PRECISION (type);
8121 break;
8122
8123 CASE_INT_FN (BUILT_IN_CLRSB):
8124 result = wi::clrsb (arg);
8125 break;
8126
8127 CASE_INT_FN (BUILT_IN_POPCOUNT):
8128 result = wi::popcount (arg);
8129 break;
8130
8131 CASE_INT_FN (BUILT_IN_PARITY):
8132 result = wi::parity (arg);
8133 break;
8134
8135 default:
8136 gcc_unreachable ();
8137 }
8138
8139 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8140 }
8141
8142 return NULL_TREE;
8143 }
8144
8145 /* Fold function call to builtin_bswap and the short, long and long long
8146 variants. Return NULL_TREE if no simplification can be made. */
8147 static tree
8148 fold_builtin_bswap (tree fndecl, tree arg)
8149 {
8150 if (! validate_arg (arg, INTEGER_TYPE))
8151 return NULL_TREE;
8152
8153 /* Optimize constant value. */
8154 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8155 {
8156 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8157
8158 switch (DECL_FUNCTION_CODE (fndecl))
8159 {
8160 case BUILT_IN_BSWAP16:
8161 case BUILT_IN_BSWAP32:
8162 case BUILT_IN_BSWAP64:
8163 {
8164 signop sgn = TYPE_SIGN (type);
8165 tree result =
8166 wide_int_to_tree (type,
8167 wide_int::from (arg, TYPE_PRECISION (type),
8168 sgn).bswap ());
8169 return result;
8170 }
8171 default:
8172 gcc_unreachable ();
8173 }
8174 }
8175
8176 return NULL_TREE;
8177 }
8178
8179 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8180 NULL_TREE if no simplification can be made. */
8181
8182 static tree
8183 fold_builtin_hypot (location_t loc, tree fndecl,
8184 tree arg0, tree arg1, tree type)
8185 {
8186 tree res, narg0, narg1;
8187
8188 if (!validate_arg (arg0, REAL_TYPE)
8189 || !validate_arg (arg1, REAL_TYPE))
8190 return NULL_TREE;
8191
8192 /* Calculate the result when the argument is a constant. */
8193 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8194 return res;
8195
8196 /* If either argument to hypot has a negate or abs, strip that off.
8197 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8198 narg0 = fold_strip_sign_ops (arg0);
8199 narg1 = fold_strip_sign_ops (arg1);
8200 if (narg0 || narg1)
8201 {
8202 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8203 narg1 ? narg1 : arg1);
8204 }
8205
8206 /* If either argument is zero, hypot is fabs of the other. */
8207 if (real_zerop (arg0))
8208 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8209 else if (real_zerop (arg1))
8210 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8211
8212 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8213 if (flag_unsafe_math_optimizations
8214 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8215 return fold_build2_loc (loc, MULT_EXPR, type,
8216 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8217 build_real_truncate (type, dconst_sqrt2 ()));
8218
8219 return NULL_TREE;
8220 }
8221
8222
8223 /* Fold a builtin function call to pow, powf, or powl. Return
8224 NULL_TREE if no simplification can be made. */
8225 static tree
8226 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8227 {
8228 tree res;
8229
8230 if (!validate_arg (arg0, REAL_TYPE)
8231 || !validate_arg (arg1, REAL_TYPE))
8232 return NULL_TREE;
8233
8234 /* Calculate the result when the argument is a constant. */
8235 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8236 return res;
8237
8238 /* Optimize pow(1.0,y) = 1.0. */
8239 if (real_onep (arg0))
8240 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8241
8242 if (TREE_CODE (arg1) == REAL_CST
8243 && !TREE_OVERFLOW (arg1))
8244 {
8245 REAL_VALUE_TYPE cint;
8246 REAL_VALUE_TYPE c;
8247 HOST_WIDE_INT n;
8248
8249 c = TREE_REAL_CST (arg1);
8250
8251 /* Optimize pow(x,0.0) = 1.0. */
8252 if (real_equal (&c, &dconst0))
8253 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8254 arg0);
8255
8256 /* Optimize pow(x,1.0) = x. */
8257 if (real_equal (&c, &dconst1))
8258 return arg0;
8259
8260 /* Optimize pow(x,-1.0) = 1.0/x. */
8261 if (real_equal (&c, &dconstm1))
8262 return fold_build2_loc (loc, RDIV_EXPR, type,
8263 build_real (type, dconst1), arg0);
8264
8265 /* Optimize pow(x,0.5) = sqrt(x). */
8266 if (flag_unsafe_math_optimizations
8267 && real_equal (&c, &dconsthalf))
8268 {
8269 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8270
8271 if (sqrtfn != NULL_TREE)
8272 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8273 }
8274
8275 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8276 if (flag_unsafe_math_optimizations)
8277 {
8278 const REAL_VALUE_TYPE dconstroot
8279 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8280
8281 if (real_equal (&c, &dconstroot))
8282 {
8283 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8284 if (cbrtfn != NULL_TREE)
8285 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8286 }
8287 }
8288
8289 /* Check for an integer exponent. */
8290 n = real_to_integer (&c);
8291 real_from_integer (&cint, VOIDmode, n, SIGNED);
8292 if (real_identical (&c, &cint))
8293 {
8294 /* Attempt to evaluate pow at compile-time, unless this should
8295 raise an exception. */
8296 if (TREE_CODE (arg0) == REAL_CST
8297 && !TREE_OVERFLOW (arg0)
8298 && (n > 0
8299 || (!flag_trapping_math && !flag_errno_math)
8300 || !real_equal (&TREE_REAL_CST (arg0), &dconst0)))
8301 {
8302 REAL_VALUE_TYPE x;
8303 bool inexact;
8304
8305 x = TREE_REAL_CST (arg0);
8306 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8307 if (flag_unsafe_math_optimizations || !inexact)
8308 return build_real (type, x);
8309 }
8310
8311 /* Strip sign ops from even integer powers. */
8312 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8313 {
8314 tree narg0 = fold_strip_sign_ops (arg0);
8315 if (narg0)
8316 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8317 }
8318 }
8319 }
8320
8321 if (flag_unsafe_math_optimizations)
8322 {
8323 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8324
8325 /* Optimize pow(expN(x),y) = expN(x*y). */
8326 if (BUILTIN_EXPONENT_P (fcode))
8327 {
8328 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8329 tree arg = CALL_EXPR_ARG (arg0, 0);
8330 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8331 return build_call_expr_loc (loc, expfn, 1, arg);
8332 }
8333
8334 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8335 if (BUILTIN_SQRT_P (fcode))
8336 {
8337 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8338 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8339 build_real (type, dconsthalf));
8340 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8341 }
8342
8343 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8344 if (BUILTIN_CBRT_P (fcode))
8345 {
8346 tree arg = CALL_EXPR_ARG (arg0, 0);
8347 if (tree_expr_nonnegative_p (arg))
8348 {
8349 tree c = build_real_truncate (type, dconst_third ());
8350 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1, c);
8351 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8352 }
8353 }
8354
8355 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8356 if (fcode == BUILT_IN_POW
8357 || fcode == BUILT_IN_POWF
8358 || fcode == BUILT_IN_POWL)
8359 {
8360 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8361 if (tree_expr_nonnegative_p (arg00))
8362 {
8363 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8364 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8365 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8366 }
8367 }
8368 }
8369
8370 return NULL_TREE;
8371 }
8372
8373 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8374 Return NULL_TREE if no simplification can be made. */
8375 static tree
8376 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8377 tree arg0, tree arg1, tree type)
8378 {
8379 if (!validate_arg (arg0, REAL_TYPE)
8380 || !validate_arg (arg1, INTEGER_TYPE))
8381 return NULL_TREE;
8382
8383 /* Optimize pow(1.0,y) = 1.0. */
8384 if (real_onep (arg0))
8385 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8386
8387 if (tree_fits_shwi_p (arg1))
8388 {
8389 HOST_WIDE_INT c = tree_to_shwi (arg1);
8390
8391 /* Evaluate powi at compile-time. */
8392 if (TREE_CODE (arg0) == REAL_CST
8393 && !TREE_OVERFLOW (arg0))
8394 {
8395 REAL_VALUE_TYPE x;
8396 x = TREE_REAL_CST (arg0);
8397 real_powi (&x, TYPE_MODE (type), &x, c);
8398 return build_real (type, x);
8399 }
8400
8401 /* Optimize pow(x,0) = 1.0. */
8402 if (c == 0)
8403 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8404 arg0);
8405
8406 /* Optimize pow(x,1) = x. */
8407 if (c == 1)
8408 return arg0;
8409
8410 /* Optimize pow(x,-1) = 1.0/x. */
8411 if (c == -1)
8412 return fold_build2_loc (loc, RDIV_EXPR, type,
8413 build_real (type, dconst1), arg0);
8414 }
8415
8416 return NULL_TREE;
8417 }
8418
8419 /* A subroutine of fold_builtin to fold the various exponent
8420 functions. Return NULL_TREE if no simplification can be made.
8421 FUNC is the corresponding MPFR exponent function. */
8422
8423 static tree
8424 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8425 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8426 {
8427 if (validate_arg (arg, REAL_TYPE))
8428 {
8429 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8430 tree res;
8431
8432 /* Calculate the result when the argument is a constant. */
8433 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8434 return res;
8435
8436 /* Optimize expN(logN(x)) = x. */
8437 if (flag_unsafe_math_optimizations)
8438 {
8439 const enum built_in_function fcode = builtin_mathfn_code (arg);
8440
8441 if ((func == mpfr_exp
8442 && (fcode == BUILT_IN_LOG
8443 || fcode == BUILT_IN_LOGF
8444 || fcode == BUILT_IN_LOGL))
8445 || (func == mpfr_exp2
8446 && (fcode == BUILT_IN_LOG2
8447 || fcode == BUILT_IN_LOG2F
8448 || fcode == BUILT_IN_LOG2L))
8449 || (func == mpfr_exp10
8450 && (fcode == BUILT_IN_LOG10
8451 || fcode == BUILT_IN_LOG10F
8452 || fcode == BUILT_IN_LOG10L)))
8453 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8454 }
8455 }
8456
8457 return NULL_TREE;
8458 }
8459
8460 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8461 arguments to the call, and TYPE is its return type.
8462 Return NULL_TREE if no simplification can be made. */
8463
8464 static tree
8465 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8466 {
8467 if (!validate_arg (arg1, POINTER_TYPE)
8468 || !validate_arg (arg2, INTEGER_TYPE)
8469 || !validate_arg (len, INTEGER_TYPE))
8470 return NULL_TREE;
8471 else
8472 {
8473 const char *p1;
8474
8475 if (TREE_CODE (arg2) != INTEGER_CST
8476 || !tree_fits_uhwi_p (len))
8477 return NULL_TREE;
8478
8479 p1 = c_getstr (arg1);
8480 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8481 {
8482 char c;
8483 const char *r;
8484 tree tem;
8485
8486 if (target_char_cast (arg2, &c))
8487 return NULL_TREE;
8488
8489 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8490
8491 if (r == NULL)
8492 return build_int_cst (TREE_TYPE (arg1), 0);
8493
8494 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8495 return fold_convert_loc (loc, type, tem);
8496 }
8497 return NULL_TREE;
8498 }
8499 }
8500
8501 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8502 Return NULL_TREE if no simplification can be made. */
8503
8504 static tree
8505 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8506 {
8507 const char *p1, *p2;
8508
8509 if (!validate_arg (arg1, POINTER_TYPE)
8510 || !validate_arg (arg2, POINTER_TYPE)
8511 || !validate_arg (len, INTEGER_TYPE))
8512 return NULL_TREE;
8513
8514 /* If the LEN parameter is zero, return zero. */
8515 if (integer_zerop (len))
8516 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8517 arg1, arg2);
8518
8519 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8520 if (operand_equal_p (arg1, arg2, 0))
8521 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8522
8523 p1 = c_getstr (arg1);
8524 p2 = c_getstr (arg2);
8525
8526 /* If all arguments are constant, and the value of len is not greater
8527 than the lengths of arg1 and arg2, evaluate at compile-time. */
8528 if (tree_fits_uhwi_p (len) && p1 && p2
8529 && compare_tree_int (len, strlen (p1) + 1) <= 0
8530 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8531 {
8532 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8533
8534 if (r > 0)
8535 return integer_one_node;
8536 else if (r < 0)
8537 return integer_minus_one_node;
8538 else
8539 return integer_zero_node;
8540 }
8541
8542 /* If len parameter is one, return an expression corresponding to
8543 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8544 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8545 {
8546 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8547 tree cst_uchar_ptr_node
8548 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8549
8550 tree ind1
8551 = fold_convert_loc (loc, integer_type_node,
8552 build1 (INDIRECT_REF, cst_uchar_node,
8553 fold_convert_loc (loc,
8554 cst_uchar_ptr_node,
8555 arg1)));
8556 tree ind2
8557 = fold_convert_loc (loc, integer_type_node,
8558 build1 (INDIRECT_REF, cst_uchar_node,
8559 fold_convert_loc (loc,
8560 cst_uchar_ptr_node,
8561 arg2)));
8562 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8563 }
8564
8565 return NULL_TREE;
8566 }
8567
8568 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8569 Return NULL_TREE if no simplification can be made. */
8570
8571 static tree
8572 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8573 {
8574 const char *p1, *p2;
8575
8576 if (!validate_arg (arg1, POINTER_TYPE)
8577 || !validate_arg (arg2, POINTER_TYPE))
8578 return NULL_TREE;
8579
8580 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8581 if (operand_equal_p (arg1, arg2, 0))
8582 return integer_zero_node;
8583
8584 p1 = c_getstr (arg1);
8585 p2 = c_getstr (arg2);
8586
8587 if (p1 && p2)
8588 {
8589 const int i = strcmp (p1, p2);
8590 if (i < 0)
8591 return integer_minus_one_node;
8592 else if (i > 0)
8593 return integer_one_node;
8594 else
8595 return integer_zero_node;
8596 }
8597
8598 /* If the second arg is "", return *(const unsigned char*)arg1. */
8599 if (p2 && *p2 == '\0')
8600 {
8601 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8602 tree cst_uchar_ptr_node
8603 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8604
8605 return fold_convert_loc (loc, integer_type_node,
8606 build1 (INDIRECT_REF, cst_uchar_node,
8607 fold_convert_loc (loc,
8608 cst_uchar_ptr_node,
8609 arg1)));
8610 }
8611
8612 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8613 if (p1 && *p1 == '\0')
8614 {
8615 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8616 tree cst_uchar_ptr_node
8617 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8618
8619 tree temp
8620 = fold_convert_loc (loc, integer_type_node,
8621 build1 (INDIRECT_REF, cst_uchar_node,
8622 fold_convert_loc (loc,
8623 cst_uchar_ptr_node,
8624 arg2)));
8625 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8626 }
8627
8628 return NULL_TREE;
8629 }
8630
8631 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8632 Return NULL_TREE if no simplification can be made. */
8633
8634 static tree
8635 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8636 {
8637 const char *p1, *p2;
8638
8639 if (!validate_arg (arg1, POINTER_TYPE)
8640 || !validate_arg (arg2, POINTER_TYPE)
8641 || !validate_arg (len, INTEGER_TYPE))
8642 return NULL_TREE;
8643
8644 /* If the LEN parameter is zero, return zero. */
8645 if (integer_zerop (len))
8646 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8647 arg1, arg2);
8648
8649 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8650 if (operand_equal_p (arg1, arg2, 0))
8651 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8652
8653 p1 = c_getstr (arg1);
8654 p2 = c_getstr (arg2);
8655
8656 if (tree_fits_uhwi_p (len) && p1 && p2)
8657 {
8658 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8659 if (i > 0)
8660 return integer_one_node;
8661 else if (i < 0)
8662 return integer_minus_one_node;
8663 else
8664 return integer_zero_node;
8665 }
8666
8667 /* If the second arg is "", and the length is greater than zero,
8668 return *(const unsigned char*)arg1. */
8669 if (p2 && *p2 == '\0'
8670 && TREE_CODE (len) == INTEGER_CST
8671 && tree_int_cst_sgn (len) == 1)
8672 {
8673 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8674 tree cst_uchar_ptr_node
8675 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8676
8677 return fold_convert_loc (loc, integer_type_node,
8678 build1 (INDIRECT_REF, cst_uchar_node,
8679 fold_convert_loc (loc,
8680 cst_uchar_ptr_node,
8681 arg1)));
8682 }
8683
8684 /* If the first arg is "", and the length is greater than zero,
8685 return -*(const unsigned char*)arg2. */
8686 if (p1 && *p1 == '\0'
8687 && TREE_CODE (len) == INTEGER_CST
8688 && tree_int_cst_sgn (len) == 1)
8689 {
8690 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8691 tree cst_uchar_ptr_node
8692 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8693
8694 tree temp = fold_convert_loc (loc, integer_type_node,
8695 build1 (INDIRECT_REF, cst_uchar_node,
8696 fold_convert_loc (loc,
8697 cst_uchar_ptr_node,
8698 arg2)));
8699 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8700 }
8701
8702 /* If len parameter is one, return an expression corresponding to
8703 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8704 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8705 {
8706 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8707 tree cst_uchar_ptr_node
8708 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8709
8710 tree ind1 = fold_convert_loc (loc, integer_type_node,
8711 build1 (INDIRECT_REF, cst_uchar_node,
8712 fold_convert_loc (loc,
8713 cst_uchar_ptr_node,
8714 arg1)));
8715 tree ind2 = fold_convert_loc (loc, integer_type_node,
8716 build1 (INDIRECT_REF, cst_uchar_node,
8717 fold_convert_loc (loc,
8718 cst_uchar_ptr_node,
8719 arg2)));
8720 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8721 }
8722
8723 return NULL_TREE;
8724 }
8725
8726 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8727 ARG. Return NULL_TREE if no simplification can be made. */
8728
8729 static tree
8730 fold_builtin_signbit (location_t loc, tree arg, tree type)
8731 {
8732 if (!validate_arg (arg, REAL_TYPE))
8733 return NULL_TREE;
8734
8735 /* If ARG is a compile-time constant, determine the result. */
8736 if (TREE_CODE (arg) == REAL_CST
8737 && !TREE_OVERFLOW (arg))
8738 {
8739 REAL_VALUE_TYPE c;
8740
8741 c = TREE_REAL_CST (arg);
8742 return (REAL_VALUE_NEGATIVE (c)
8743 ? build_one_cst (type)
8744 : build_zero_cst (type));
8745 }
8746
8747 /* If ARG is non-negative, the result is always zero. */
8748 if (tree_expr_nonnegative_p (arg))
8749 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8750
8751 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8752 if (!HONOR_SIGNED_ZEROS (arg))
8753 return fold_convert (type,
8754 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8755 build_real (TREE_TYPE (arg), dconst0)));
8756
8757 return NULL_TREE;
8758 }
8759
8760 /* Fold function call to builtin copysign, copysignf or copysignl with
8761 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8762 be made. */
8763
8764 static tree
8765 fold_builtin_copysign (location_t loc, tree fndecl,
8766 tree arg1, tree arg2, tree type)
8767 {
8768 tree tem;
8769
8770 if (!validate_arg (arg1, REAL_TYPE)
8771 || !validate_arg (arg2, REAL_TYPE))
8772 return NULL_TREE;
8773
8774 /* copysign(X,X) is X. */
8775 if (operand_equal_p (arg1, arg2, 0))
8776 return fold_convert_loc (loc, type, arg1);
8777
8778 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8779 if (TREE_CODE (arg1) == REAL_CST
8780 && TREE_CODE (arg2) == REAL_CST
8781 && !TREE_OVERFLOW (arg1)
8782 && !TREE_OVERFLOW (arg2))
8783 {
8784 REAL_VALUE_TYPE c1, c2;
8785
8786 c1 = TREE_REAL_CST (arg1);
8787 c2 = TREE_REAL_CST (arg2);
8788 /* c1.sign := c2.sign. */
8789 real_copysign (&c1, &c2);
8790 return build_real (type, c1);
8791 }
8792
8793 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8794 Remember to evaluate Y for side-effects. */
8795 if (tree_expr_nonnegative_p (arg2))
8796 return omit_one_operand_loc (loc, type,
8797 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8798 arg2);
8799
8800 /* Strip sign changing operations for the first argument. */
8801 tem = fold_strip_sign_ops (arg1);
8802 if (tem)
8803 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8804
8805 return NULL_TREE;
8806 }
8807
8808 /* Fold a call to builtin isascii with argument ARG. */
8809
8810 static tree
8811 fold_builtin_isascii (location_t loc, tree arg)
8812 {
8813 if (!validate_arg (arg, INTEGER_TYPE))
8814 return NULL_TREE;
8815 else
8816 {
8817 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8818 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8819 build_int_cst (integer_type_node,
8820 ~ (unsigned HOST_WIDE_INT) 0x7f));
8821 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8822 arg, integer_zero_node);
8823 }
8824 }
8825
8826 /* Fold a call to builtin toascii with argument ARG. */
8827
8828 static tree
8829 fold_builtin_toascii (location_t loc, tree arg)
8830 {
8831 if (!validate_arg (arg, INTEGER_TYPE))
8832 return NULL_TREE;
8833
8834 /* Transform toascii(c) -> (c & 0x7f). */
8835 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8836 build_int_cst (integer_type_node, 0x7f));
8837 }
8838
8839 /* Fold a call to builtin isdigit with argument ARG. */
8840
8841 static tree
8842 fold_builtin_isdigit (location_t loc, tree arg)
8843 {
8844 if (!validate_arg (arg, INTEGER_TYPE))
8845 return NULL_TREE;
8846 else
8847 {
8848 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8849 /* According to the C standard, isdigit is unaffected by locale.
8850 However, it definitely is affected by the target character set. */
8851 unsigned HOST_WIDE_INT target_digit0
8852 = lang_hooks.to_target_charset ('0');
8853
8854 if (target_digit0 == 0)
8855 return NULL_TREE;
8856
8857 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8858 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8859 build_int_cst (unsigned_type_node, target_digit0));
8860 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8861 build_int_cst (unsigned_type_node, 9));
8862 }
8863 }
8864
8865 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8866
8867 static tree
8868 fold_builtin_fabs (location_t loc, tree arg, tree type)
8869 {
8870 if (!validate_arg (arg, REAL_TYPE))
8871 return NULL_TREE;
8872
8873 arg = fold_convert_loc (loc, type, arg);
8874 if (TREE_CODE (arg) == REAL_CST)
8875 return fold_abs_const (arg, type);
8876 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8877 }
8878
8879 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8880
8881 static tree
8882 fold_builtin_abs (location_t loc, tree arg, tree type)
8883 {
8884 if (!validate_arg (arg, INTEGER_TYPE))
8885 return NULL_TREE;
8886
8887 arg = fold_convert_loc (loc, type, arg);
8888 if (TREE_CODE (arg) == INTEGER_CST)
8889 return fold_abs_const (arg, type);
8890 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8891 }
8892
8893 /* Fold a fma operation with arguments ARG[012]. */
8894
8895 tree
8896 fold_fma (location_t loc ATTRIBUTE_UNUSED,
8897 tree type, tree arg0, tree arg1, tree arg2)
8898 {
8899 if (TREE_CODE (arg0) == REAL_CST
8900 && TREE_CODE (arg1) == REAL_CST
8901 && TREE_CODE (arg2) == REAL_CST)
8902 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
8903
8904 return NULL_TREE;
8905 }
8906
8907 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8908
8909 static tree
8910 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8911 {
8912 if (validate_arg (arg0, REAL_TYPE)
8913 && validate_arg (arg1, REAL_TYPE)
8914 && validate_arg (arg2, REAL_TYPE))
8915 {
8916 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
8917 if (tem)
8918 return tem;
8919
8920 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8921 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8922 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8923 }
8924 return NULL_TREE;
8925 }
8926
8927 /* Fold a call to builtin fmin or fmax. */
8928
8929 static tree
8930 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8931 tree type, bool max)
8932 {
8933 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8934 {
8935 /* Calculate the result when the argument is a constant. */
8936 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8937
8938 if (res)
8939 return res;
8940
8941 /* If either argument is NaN, return the other one. Avoid the
8942 transformation if we get (and honor) a signalling NaN. Using
8943 omit_one_operand() ensures we create a non-lvalue. */
8944 if (TREE_CODE (arg0) == REAL_CST
8945 && real_isnan (&TREE_REAL_CST (arg0))
8946 && (! HONOR_SNANS (arg0)
8947 || ! TREE_REAL_CST (arg0).signalling))
8948 return omit_one_operand_loc (loc, type, arg1, arg0);
8949 if (TREE_CODE (arg1) == REAL_CST
8950 && real_isnan (&TREE_REAL_CST (arg1))
8951 && (! HONOR_SNANS (arg1)
8952 || ! TREE_REAL_CST (arg1).signalling))
8953 return omit_one_operand_loc (loc, type, arg0, arg1);
8954
8955 /* Transform fmin/fmax(x,x) -> x. */
8956 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8957 return omit_one_operand_loc (loc, type, arg0, arg1);
8958
8959 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
8960 functions to return the numeric arg if the other one is NaN.
8961 These tree codes don't honor that, so only transform if
8962 -ffinite-math-only is set. C99 doesn't require -0.0 to be
8963 handled, so we don't have to worry about it either. */
8964 if (flag_finite_math_only)
8965 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
8966 fold_convert_loc (loc, type, arg0),
8967 fold_convert_loc (loc, type, arg1));
8968 }
8969 return NULL_TREE;
8970 }
8971
8972 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8973
8974 static tree
8975 fold_builtin_carg (location_t loc, tree arg, tree type)
8976 {
8977 if (validate_arg (arg, COMPLEX_TYPE)
8978 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8979 {
8980 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8981
8982 if (atan2_fn)
8983 {
8984 tree new_arg = builtin_save_expr (arg);
8985 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8986 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8987 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8988 }
8989 }
8990
8991 return NULL_TREE;
8992 }
8993
8994 /* Fold a call to builtin logb/ilogb. */
8995
8996 static tree
8997 fold_builtin_logb (location_t loc, tree arg, tree rettype)
8998 {
8999 if (! validate_arg (arg, REAL_TYPE))
9000 return NULL_TREE;
9001
9002 STRIP_NOPS (arg);
9003
9004 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9005 {
9006 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9007
9008 switch (value->cl)
9009 {
9010 case rvc_nan:
9011 case rvc_inf:
9012 /* If arg is Inf or NaN and we're logb, return it. */
9013 if (TREE_CODE (rettype) == REAL_TYPE)
9014 {
9015 /* For logb(-Inf) we have to return +Inf. */
9016 if (real_isinf (value) && real_isneg (value))
9017 {
9018 REAL_VALUE_TYPE tem;
9019 real_inf (&tem);
9020 return build_real (rettype, tem);
9021 }
9022 return fold_convert_loc (loc, rettype, arg);
9023 }
9024 /* Fall through... */
9025 case rvc_zero:
9026 /* Zero may set errno and/or raise an exception for logb, also
9027 for ilogb we don't know FP_ILOGB0. */
9028 return NULL_TREE;
9029 case rvc_normal:
9030 /* For normal numbers, proceed iff radix == 2. In GCC,
9031 normalized significands are in the range [0.5, 1.0). We
9032 want the exponent as if they were [1.0, 2.0) so get the
9033 exponent and subtract 1. */
9034 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9035 return fold_convert_loc (loc, rettype,
9036 build_int_cst (integer_type_node,
9037 REAL_EXP (value)-1));
9038 break;
9039 }
9040 }
9041
9042 return NULL_TREE;
9043 }
9044
9045 /* Fold a call to builtin significand, if radix == 2. */
9046
9047 static tree
9048 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9049 {
9050 if (! validate_arg (arg, REAL_TYPE))
9051 return NULL_TREE;
9052
9053 STRIP_NOPS (arg);
9054
9055 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9056 {
9057 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9058
9059 switch (value->cl)
9060 {
9061 case rvc_zero:
9062 case rvc_nan:
9063 case rvc_inf:
9064 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9065 return fold_convert_loc (loc, rettype, arg);
9066 case rvc_normal:
9067 /* For normal numbers, proceed iff radix == 2. */
9068 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9069 {
9070 REAL_VALUE_TYPE result = *value;
9071 /* In GCC, normalized significands are in the range [0.5,
9072 1.0). We want them to be [1.0, 2.0) so set the
9073 exponent to 1. */
9074 SET_REAL_EXP (&result, 1);
9075 return build_real (rettype, result);
9076 }
9077 break;
9078 }
9079 }
9080
9081 return NULL_TREE;
9082 }
9083
9084 /* Fold a call to builtin frexp, we can assume the base is 2. */
9085
9086 static tree
9087 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9088 {
9089 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9090 return NULL_TREE;
9091
9092 STRIP_NOPS (arg0);
9093
9094 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9095 return NULL_TREE;
9096
9097 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9098
9099 /* Proceed if a valid pointer type was passed in. */
9100 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9101 {
9102 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9103 tree frac, exp;
9104
9105 switch (value->cl)
9106 {
9107 case rvc_zero:
9108 /* For +-0, return (*exp = 0, +-0). */
9109 exp = integer_zero_node;
9110 frac = arg0;
9111 break;
9112 case rvc_nan:
9113 case rvc_inf:
9114 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9115 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9116 case rvc_normal:
9117 {
9118 /* Since the frexp function always expects base 2, and in
9119 GCC normalized significands are already in the range
9120 [0.5, 1.0), we have exactly what frexp wants. */
9121 REAL_VALUE_TYPE frac_rvt = *value;
9122 SET_REAL_EXP (&frac_rvt, 0);
9123 frac = build_real (rettype, frac_rvt);
9124 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9125 }
9126 break;
9127 default:
9128 gcc_unreachable ();
9129 }
9130
9131 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9132 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9133 TREE_SIDE_EFFECTS (arg1) = 1;
9134 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9135 }
9136
9137 return NULL_TREE;
9138 }
9139
9140 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9141 then we can assume the base is two. If it's false, then we have to
9142 check the mode of the TYPE parameter in certain cases. */
9143
9144 static tree
9145 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9146 tree type, bool ldexp)
9147 {
9148 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9149 {
9150 STRIP_NOPS (arg0);
9151 STRIP_NOPS (arg1);
9152
9153 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9154 if (real_zerop (arg0) || integer_zerop (arg1)
9155 || (TREE_CODE (arg0) == REAL_CST
9156 && !real_isfinite (&TREE_REAL_CST (arg0))))
9157 return omit_one_operand_loc (loc, type, arg0, arg1);
9158
9159 /* If both arguments are constant, then try to evaluate it. */
9160 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9161 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9162 && tree_fits_shwi_p (arg1))
9163 {
9164 /* Bound the maximum adjustment to twice the range of the
9165 mode's valid exponents. Use abs to ensure the range is
9166 positive as a sanity check. */
9167 const long max_exp_adj = 2 *
9168 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9169 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9170
9171 /* Get the user-requested adjustment. */
9172 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9173
9174 /* The requested adjustment must be inside this range. This
9175 is a preliminary cap to avoid things like overflow, we
9176 may still fail to compute the result for other reasons. */
9177 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9178 {
9179 REAL_VALUE_TYPE initial_result;
9180
9181 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9182
9183 /* Ensure we didn't overflow. */
9184 if (! real_isinf (&initial_result))
9185 {
9186 const REAL_VALUE_TYPE trunc_result
9187 = real_value_truncate (TYPE_MODE (type), initial_result);
9188
9189 /* Only proceed if the target mode can hold the
9190 resulting value. */
9191 if (real_equal (&initial_result, &trunc_result))
9192 return build_real (type, trunc_result);
9193 }
9194 }
9195 }
9196 }
9197
9198 return NULL_TREE;
9199 }
9200
9201 /* Fold a call to builtin modf. */
9202
9203 static tree
9204 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9205 {
9206 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9207 return NULL_TREE;
9208
9209 STRIP_NOPS (arg0);
9210
9211 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9212 return NULL_TREE;
9213
9214 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9215
9216 /* Proceed if a valid pointer type was passed in. */
9217 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9218 {
9219 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9220 REAL_VALUE_TYPE trunc, frac;
9221
9222 switch (value->cl)
9223 {
9224 case rvc_nan:
9225 case rvc_zero:
9226 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9227 trunc = frac = *value;
9228 break;
9229 case rvc_inf:
9230 /* For +-Inf, return (*arg1 = arg0, +-0). */
9231 frac = dconst0;
9232 frac.sign = value->sign;
9233 trunc = *value;
9234 break;
9235 case rvc_normal:
9236 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9237 real_trunc (&trunc, VOIDmode, value);
9238 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9239 /* If the original number was negative and already
9240 integral, then the fractional part is -0.0. */
9241 if (value->sign && frac.cl == rvc_zero)
9242 frac.sign = value->sign;
9243 break;
9244 }
9245
9246 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9247 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9248 build_real (rettype, trunc));
9249 TREE_SIDE_EFFECTS (arg1) = 1;
9250 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9251 build_real (rettype, frac));
9252 }
9253
9254 return NULL_TREE;
9255 }
9256
9257 /* Given a location LOC, an interclass builtin function decl FNDECL
9258 and its single argument ARG, return an folded expression computing
9259 the same, or NULL_TREE if we either couldn't or didn't want to fold
9260 (the latter happen if there's an RTL instruction available). */
9261
9262 static tree
9263 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9264 {
9265 machine_mode mode;
9266
9267 if (!validate_arg (arg, REAL_TYPE))
9268 return NULL_TREE;
9269
9270 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9271 return NULL_TREE;
9272
9273 mode = TYPE_MODE (TREE_TYPE (arg));
9274
9275 /* If there is no optab, try generic code. */
9276 switch (DECL_FUNCTION_CODE (fndecl))
9277 {
9278 tree result;
9279
9280 CASE_FLT_FN (BUILT_IN_ISINF):
9281 {
9282 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9283 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9284 tree const type = TREE_TYPE (arg);
9285 REAL_VALUE_TYPE r;
9286 char buf[128];
9287
9288 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9289 real_from_string (&r, buf);
9290 result = build_call_expr (isgr_fn, 2,
9291 fold_build1_loc (loc, ABS_EXPR, type, arg),
9292 build_real (type, r));
9293 return result;
9294 }
9295 CASE_FLT_FN (BUILT_IN_FINITE):
9296 case BUILT_IN_ISFINITE:
9297 {
9298 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9299 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9300 tree const type = TREE_TYPE (arg);
9301 REAL_VALUE_TYPE r;
9302 char buf[128];
9303
9304 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9305 real_from_string (&r, buf);
9306 result = build_call_expr (isle_fn, 2,
9307 fold_build1_loc (loc, ABS_EXPR, type, arg),
9308 build_real (type, r));
9309 /*result = fold_build2_loc (loc, UNGT_EXPR,
9310 TREE_TYPE (TREE_TYPE (fndecl)),
9311 fold_build1_loc (loc, ABS_EXPR, type, arg),
9312 build_real (type, r));
9313 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9314 TREE_TYPE (TREE_TYPE (fndecl)),
9315 result);*/
9316 return result;
9317 }
9318 case BUILT_IN_ISNORMAL:
9319 {
9320 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9321 islessequal(fabs(x),DBL_MAX). */
9322 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9323 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9324 tree const type = TREE_TYPE (arg);
9325 REAL_VALUE_TYPE rmax, rmin;
9326 char buf[128];
9327
9328 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9329 real_from_string (&rmax, buf);
9330 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9331 real_from_string (&rmin, buf);
9332 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9333 result = build_call_expr (isle_fn, 2, arg,
9334 build_real (type, rmax));
9335 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9336 build_call_expr (isge_fn, 2, arg,
9337 build_real (type, rmin)));
9338 return result;
9339 }
9340 default:
9341 break;
9342 }
9343
9344 return NULL_TREE;
9345 }
9346
9347 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9348 ARG is the argument for the call. */
9349
9350 static tree
9351 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9352 {
9353 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9354 REAL_VALUE_TYPE r;
9355
9356 if (!validate_arg (arg, REAL_TYPE))
9357 return NULL_TREE;
9358
9359 switch (builtin_index)
9360 {
9361 case BUILT_IN_ISINF:
9362 if (!HONOR_INFINITIES (arg))
9363 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9364
9365 if (TREE_CODE (arg) == REAL_CST)
9366 {
9367 r = TREE_REAL_CST (arg);
9368 if (real_isinf (&r))
9369 return real_compare (GT_EXPR, &r, &dconst0)
9370 ? integer_one_node : integer_minus_one_node;
9371 else
9372 return integer_zero_node;
9373 }
9374
9375 return NULL_TREE;
9376
9377 case BUILT_IN_ISINF_SIGN:
9378 {
9379 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9380 /* In a boolean context, GCC will fold the inner COND_EXPR to
9381 1. So e.g. "if (isinf_sign(x))" would be folded to just
9382 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9383 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9384 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9385 tree tmp = NULL_TREE;
9386
9387 arg = builtin_save_expr (arg);
9388
9389 if (signbit_fn && isinf_fn)
9390 {
9391 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9392 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9393
9394 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9395 signbit_call, integer_zero_node);
9396 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9397 isinf_call, integer_zero_node);
9398
9399 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9400 integer_minus_one_node, integer_one_node);
9401 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9402 isinf_call, tmp,
9403 integer_zero_node);
9404 }
9405
9406 return tmp;
9407 }
9408
9409 case BUILT_IN_ISFINITE:
9410 if (!HONOR_NANS (arg)
9411 && !HONOR_INFINITIES (arg))
9412 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9413
9414 if (TREE_CODE (arg) == REAL_CST)
9415 {
9416 r = TREE_REAL_CST (arg);
9417 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9418 }
9419
9420 return NULL_TREE;
9421
9422 case BUILT_IN_ISNAN:
9423 if (!HONOR_NANS (arg))
9424 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9425
9426 if (TREE_CODE (arg) == REAL_CST)
9427 {
9428 r = TREE_REAL_CST (arg);
9429 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9430 }
9431
9432 arg = builtin_save_expr (arg);
9433 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9434
9435 default:
9436 gcc_unreachable ();
9437 }
9438 }
9439
9440 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9441 This builtin will generate code to return the appropriate floating
9442 point classification depending on the value of the floating point
9443 number passed in. The possible return values must be supplied as
9444 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9445 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9446 one floating point argument which is "type generic". */
9447
9448 static tree
9449 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9450 {
9451 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9452 arg, type, res, tmp;
9453 machine_mode mode;
9454 REAL_VALUE_TYPE r;
9455 char buf[128];
9456
9457 /* Verify the required arguments in the original call. */
9458 if (nargs != 6
9459 || !validate_arg (args[0], INTEGER_TYPE)
9460 || !validate_arg (args[1], INTEGER_TYPE)
9461 || !validate_arg (args[2], INTEGER_TYPE)
9462 || !validate_arg (args[3], INTEGER_TYPE)
9463 || !validate_arg (args[4], INTEGER_TYPE)
9464 || !validate_arg (args[5], REAL_TYPE))
9465 return NULL_TREE;
9466
9467 fp_nan = args[0];
9468 fp_infinite = args[1];
9469 fp_normal = args[2];
9470 fp_subnormal = args[3];
9471 fp_zero = args[4];
9472 arg = args[5];
9473 type = TREE_TYPE (arg);
9474 mode = TYPE_MODE (type);
9475 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9476
9477 /* fpclassify(x) ->
9478 isnan(x) ? FP_NAN :
9479 (fabs(x) == Inf ? FP_INFINITE :
9480 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9481 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9482
9483 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9484 build_real (type, dconst0));
9485 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9486 tmp, fp_zero, fp_subnormal);
9487
9488 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9489 real_from_string (&r, buf);
9490 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9491 arg, build_real (type, r));
9492 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9493
9494 if (HONOR_INFINITIES (mode))
9495 {
9496 real_inf (&r);
9497 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9498 build_real (type, r));
9499 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9500 fp_infinite, res);
9501 }
9502
9503 if (HONOR_NANS (mode))
9504 {
9505 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9506 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9507 }
9508
9509 return res;
9510 }
9511
9512 /* Fold a call to an unordered comparison function such as
9513 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9514 being called and ARG0 and ARG1 are the arguments for the call.
9515 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9516 the opposite of the desired result. UNORDERED_CODE is used
9517 for modes that can hold NaNs and ORDERED_CODE is used for
9518 the rest. */
9519
9520 static tree
9521 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9522 enum tree_code unordered_code,
9523 enum tree_code ordered_code)
9524 {
9525 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9526 enum tree_code code;
9527 tree type0, type1;
9528 enum tree_code code0, code1;
9529 tree cmp_type = NULL_TREE;
9530
9531 type0 = TREE_TYPE (arg0);
9532 type1 = TREE_TYPE (arg1);
9533
9534 code0 = TREE_CODE (type0);
9535 code1 = TREE_CODE (type1);
9536
9537 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9538 /* Choose the wider of two real types. */
9539 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9540 ? type0 : type1;
9541 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9542 cmp_type = type0;
9543 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9544 cmp_type = type1;
9545
9546 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9547 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9548
9549 if (unordered_code == UNORDERED_EXPR)
9550 {
9551 if (!HONOR_NANS (arg0))
9552 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9553 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9554 }
9555
9556 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9557 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9558 fold_build2_loc (loc, code, type, arg0, arg1));
9559 }
9560
9561 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9562 arithmetics if it can never overflow, or into internal functions that
9563 return both result of arithmetics and overflowed boolean flag in
9564 a complex integer result, or some other check for overflow. */
9565
9566 static tree
9567 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9568 tree arg0, tree arg1, tree arg2)
9569 {
9570 enum internal_fn ifn = IFN_LAST;
9571 tree type = TREE_TYPE (TREE_TYPE (arg2));
9572 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9573 switch (fcode)
9574 {
9575 case BUILT_IN_ADD_OVERFLOW:
9576 case BUILT_IN_SADD_OVERFLOW:
9577 case BUILT_IN_SADDL_OVERFLOW:
9578 case BUILT_IN_SADDLL_OVERFLOW:
9579 case BUILT_IN_UADD_OVERFLOW:
9580 case BUILT_IN_UADDL_OVERFLOW:
9581 case BUILT_IN_UADDLL_OVERFLOW:
9582 ifn = IFN_ADD_OVERFLOW;
9583 break;
9584 case BUILT_IN_SUB_OVERFLOW:
9585 case BUILT_IN_SSUB_OVERFLOW:
9586 case BUILT_IN_SSUBL_OVERFLOW:
9587 case BUILT_IN_SSUBLL_OVERFLOW:
9588 case BUILT_IN_USUB_OVERFLOW:
9589 case BUILT_IN_USUBL_OVERFLOW:
9590 case BUILT_IN_USUBLL_OVERFLOW:
9591 ifn = IFN_SUB_OVERFLOW;
9592 break;
9593 case BUILT_IN_MUL_OVERFLOW:
9594 case BUILT_IN_SMUL_OVERFLOW:
9595 case BUILT_IN_SMULL_OVERFLOW:
9596 case BUILT_IN_SMULLL_OVERFLOW:
9597 case BUILT_IN_UMUL_OVERFLOW:
9598 case BUILT_IN_UMULL_OVERFLOW:
9599 case BUILT_IN_UMULLL_OVERFLOW:
9600 ifn = IFN_MUL_OVERFLOW;
9601 break;
9602 default:
9603 gcc_unreachable ();
9604 }
9605 tree ctype = build_complex_type (type);
9606 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9607 2, arg0, arg1);
9608 tree tgt = save_expr (call);
9609 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9610 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9611 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9612 tree store
9613 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9614 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9615 }
9616
9617 /* Fold a call to built-in function FNDECL with 0 arguments.
9618 This function returns NULL_TREE if no simplification was possible. */
9619
9620 static tree
9621 fold_builtin_0 (location_t loc, tree fndecl)
9622 {
9623 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9624 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9625 switch (fcode)
9626 {
9627 CASE_FLT_FN (BUILT_IN_INF):
9628 case BUILT_IN_INFD32:
9629 case BUILT_IN_INFD64:
9630 case BUILT_IN_INFD128:
9631 return fold_builtin_inf (loc, type, true);
9632
9633 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9634 return fold_builtin_inf (loc, type, false);
9635
9636 case BUILT_IN_CLASSIFY_TYPE:
9637 return fold_builtin_classify_type (NULL_TREE);
9638
9639 default:
9640 break;
9641 }
9642 return NULL_TREE;
9643 }
9644
9645 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9646 This function returns NULL_TREE if no simplification was possible. */
9647
9648 static tree
9649 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9650 {
9651 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9652 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9653 switch (fcode)
9654 {
9655 case BUILT_IN_CONSTANT_P:
9656 {
9657 tree val = fold_builtin_constant_p (arg0);
9658
9659 /* Gimplification will pull the CALL_EXPR for the builtin out of
9660 an if condition. When not optimizing, we'll not CSE it back.
9661 To avoid link error types of regressions, return false now. */
9662 if (!val && !optimize)
9663 val = integer_zero_node;
9664
9665 return val;
9666 }
9667
9668 case BUILT_IN_CLASSIFY_TYPE:
9669 return fold_builtin_classify_type (arg0);
9670
9671 case BUILT_IN_STRLEN:
9672 return fold_builtin_strlen (loc, type, arg0);
9673
9674 CASE_FLT_FN (BUILT_IN_FABS):
9675 case BUILT_IN_FABSD32:
9676 case BUILT_IN_FABSD64:
9677 case BUILT_IN_FABSD128:
9678 return fold_builtin_fabs (loc, arg0, type);
9679
9680 case BUILT_IN_ABS:
9681 case BUILT_IN_LABS:
9682 case BUILT_IN_LLABS:
9683 case BUILT_IN_IMAXABS:
9684 return fold_builtin_abs (loc, arg0, type);
9685
9686 CASE_FLT_FN (BUILT_IN_CONJ):
9687 if (validate_arg (arg0, COMPLEX_TYPE)
9688 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9689 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9690 break;
9691
9692 CASE_FLT_FN (BUILT_IN_CREAL):
9693 if (validate_arg (arg0, COMPLEX_TYPE)
9694 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9695 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9696 break;
9697
9698 CASE_FLT_FN (BUILT_IN_CIMAG):
9699 if (validate_arg (arg0, COMPLEX_TYPE)
9700 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9701 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9702 break;
9703
9704 CASE_FLT_FN (BUILT_IN_CCOS):
9705 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9706
9707 CASE_FLT_FN (BUILT_IN_CCOSH):
9708 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9709
9710 CASE_FLT_FN (BUILT_IN_CPROJ):
9711 return fold_builtin_cproj (loc, arg0, type);
9712
9713 CASE_FLT_FN (BUILT_IN_CSIN):
9714 if (validate_arg (arg0, COMPLEX_TYPE)
9715 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9716 return do_mpc_arg1 (arg0, type, mpc_sin);
9717 break;
9718
9719 CASE_FLT_FN (BUILT_IN_CSINH):
9720 if (validate_arg (arg0, COMPLEX_TYPE)
9721 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9722 return do_mpc_arg1 (arg0, type, mpc_sinh);
9723 break;
9724
9725 CASE_FLT_FN (BUILT_IN_CTAN):
9726 if (validate_arg (arg0, COMPLEX_TYPE)
9727 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9728 return do_mpc_arg1 (arg0, type, mpc_tan);
9729 break;
9730
9731 CASE_FLT_FN (BUILT_IN_CTANH):
9732 if (validate_arg (arg0, COMPLEX_TYPE)
9733 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9734 return do_mpc_arg1 (arg0, type, mpc_tanh);
9735 break;
9736
9737 CASE_FLT_FN (BUILT_IN_CLOG):
9738 if (validate_arg (arg0, COMPLEX_TYPE)
9739 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9740 return do_mpc_arg1 (arg0, type, mpc_log);
9741 break;
9742
9743 CASE_FLT_FN (BUILT_IN_CSQRT):
9744 if (validate_arg (arg0, COMPLEX_TYPE)
9745 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9746 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9747 break;
9748
9749 CASE_FLT_FN (BUILT_IN_CASIN):
9750 if (validate_arg (arg0, COMPLEX_TYPE)
9751 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9752 return do_mpc_arg1 (arg0, type, mpc_asin);
9753 break;
9754
9755 CASE_FLT_FN (BUILT_IN_CACOS):
9756 if (validate_arg (arg0, COMPLEX_TYPE)
9757 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9758 return do_mpc_arg1 (arg0, type, mpc_acos);
9759 break;
9760
9761 CASE_FLT_FN (BUILT_IN_CATAN):
9762 if (validate_arg (arg0, COMPLEX_TYPE)
9763 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9764 return do_mpc_arg1 (arg0, type, mpc_atan);
9765 break;
9766
9767 CASE_FLT_FN (BUILT_IN_CASINH):
9768 if (validate_arg (arg0, COMPLEX_TYPE)
9769 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9770 return do_mpc_arg1 (arg0, type, mpc_asinh);
9771 break;
9772
9773 CASE_FLT_FN (BUILT_IN_CACOSH):
9774 if (validate_arg (arg0, COMPLEX_TYPE)
9775 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9776 return do_mpc_arg1 (arg0, type, mpc_acosh);
9777 break;
9778
9779 CASE_FLT_FN (BUILT_IN_CATANH):
9780 if (validate_arg (arg0, COMPLEX_TYPE)
9781 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9782 return do_mpc_arg1 (arg0, type, mpc_atanh);
9783 break;
9784
9785 CASE_FLT_FN (BUILT_IN_CABS):
9786 return fold_builtin_cabs (loc, arg0, type, fndecl);
9787
9788 CASE_FLT_FN (BUILT_IN_CARG):
9789 return fold_builtin_carg (loc, arg0, type);
9790
9791 CASE_FLT_FN (BUILT_IN_SQRT):
9792 if (validate_arg (arg0, REAL_TYPE))
9793 return do_mpfr_arg1 (arg0, type, mpfr_sqrt, &dconst0, NULL, true);
9794 break;
9795
9796 CASE_FLT_FN (BUILT_IN_CBRT):
9797 if (validate_arg (arg0, REAL_TYPE))
9798 return do_mpfr_arg1 (arg0, type, mpfr_cbrt, NULL, NULL, 0);
9799 break;
9800
9801 CASE_FLT_FN (BUILT_IN_ASIN):
9802 if (validate_arg (arg0, REAL_TYPE))
9803 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9804 &dconstm1, &dconst1, true);
9805 break;
9806
9807 CASE_FLT_FN (BUILT_IN_ACOS):
9808 if (validate_arg (arg0, REAL_TYPE))
9809 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9810 &dconstm1, &dconst1, true);
9811 break;
9812
9813 CASE_FLT_FN (BUILT_IN_ATAN):
9814 if (validate_arg (arg0, REAL_TYPE))
9815 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9816 break;
9817
9818 CASE_FLT_FN (BUILT_IN_ASINH):
9819 if (validate_arg (arg0, REAL_TYPE))
9820 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9821 break;
9822
9823 CASE_FLT_FN (BUILT_IN_ACOSH):
9824 if (validate_arg (arg0, REAL_TYPE))
9825 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9826 &dconst1, NULL, true);
9827 break;
9828
9829 CASE_FLT_FN (BUILT_IN_ATANH):
9830 if (validate_arg (arg0, REAL_TYPE))
9831 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9832 &dconstm1, &dconst1, false);
9833 break;
9834
9835 CASE_FLT_FN (BUILT_IN_SIN):
9836 if (validate_arg (arg0, REAL_TYPE))
9837 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9838 break;
9839
9840 CASE_FLT_FN (BUILT_IN_COS):
9841 return fold_builtin_cos (loc, arg0, type, fndecl);
9842
9843 CASE_FLT_FN (BUILT_IN_TAN):
9844 return fold_builtin_tan (arg0, type);
9845
9846 CASE_FLT_FN (BUILT_IN_CEXP):
9847 return fold_builtin_cexp (loc, arg0, type);
9848
9849 CASE_FLT_FN (BUILT_IN_CEXPI):
9850 if (validate_arg (arg0, REAL_TYPE))
9851 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9852 break;
9853
9854 CASE_FLT_FN (BUILT_IN_SINH):
9855 if (validate_arg (arg0, REAL_TYPE))
9856 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9857 break;
9858
9859 CASE_FLT_FN (BUILT_IN_COSH):
9860 return fold_builtin_cosh (loc, arg0, type, fndecl);
9861
9862 CASE_FLT_FN (BUILT_IN_TANH):
9863 if (validate_arg (arg0, REAL_TYPE))
9864 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9865 break;
9866
9867 CASE_FLT_FN (BUILT_IN_ERF):
9868 if (validate_arg (arg0, REAL_TYPE))
9869 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9870 break;
9871
9872 CASE_FLT_FN (BUILT_IN_ERFC):
9873 if (validate_arg (arg0, REAL_TYPE))
9874 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9875 break;
9876
9877 CASE_FLT_FN (BUILT_IN_TGAMMA):
9878 if (validate_arg (arg0, REAL_TYPE))
9879 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9880 break;
9881
9882 CASE_FLT_FN (BUILT_IN_EXP):
9883 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9884
9885 CASE_FLT_FN (BUILT_IN_EXP2):
9886 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9887
9888 CASE_FLT_FN (BUILT_IN_EXP10):
9889 CASE_FLT_FN (BUILT_IN_POW10):
9890 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9891
9892 CASE_FLT_FN (BUILT_IN_EXPM1):
9893 if (validate_arg (arg0, REAL_TYPE))
9894 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9895 break;
9896
9897 CASE_FLT_FN (BUILT_IN_LOG):
9898 if (validate_arg (arg0, REAL_TYPE))
9899 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
9900 break;
9901
9902 CASE_FLT_FN (BUILT_IN_LOG2):
9903 if (validate_arg (arg0, REAL_TYPE))
9904 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
9905 break;
9906
9907 CASE_FLT_FN (BUILT_IN_LOG10):
9908 if (validate_arg (arg0, REAL_TYPE))
9909 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
9910 break;
9911
9912 CASE_FLT_FN (BUILT_IN_LOG1P):
9913 if (validate_arg (arg0, REAL_TYPE))
9914 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9915 &dconstm1, NULL, false);
9916 break;
9917
9918 CASE_FLT_FN (BUILT_IN_J0):
9919 if (validate_arg (arg0, REAL_TYPE))
9920 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9921 NULL, NULL, 0);
9922 break;
9923
9924 CASE_FLT_FN (BUILT_IN_J1):
9925 if (validate_arg (arg0, REAL_TYPE))
9926 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9927 NULL, NULL, 0);
9928 break;
9929
9930 CASE_FLT_FN (BUILT_IN_Y0):
9931 if (validate_arg (arg0, REAL_TYPE))
9932 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9933 &dconst0, NULL, false);
9934 break;
9935
9936 CASE_FLT_FN (BUILT_IN_Y1):
9937 if (validate_arg (arg0, REAL_TYPE))
9938 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9939 &dconst0, NULL, false);
9940 break;
9941
9942 CASE_FLT_FN (BUILT_IN_NAN):
9943 case BUILT_IN_NAND32:
9944 case BUILT_IN_NAND64:
9945 case BUILT_IN_NAND128:
9946 return fold_builtin_nan (arg0, type, true);
9947
9948 CASE_FLT_FN (BUILT_IN_NANS):
9949 return fold_builtin_nan (arg0, type, false);
9950
9951 CASE_FLT_FN (BUILT_IN_FLOOR):
9952 return fold_builtin_floor (loc, fndecl, arg0);
9953
9954 CASE_FLT_FN (BUILT_IN_CEIL):
9955 return fold_builtin_ceil (loc, fndecl, arg0);
9956
9957 CASE_FLT_FN (BUILT_IN_TRUNC):
9958 return fold_builtin_trunc (loc, fndecl, arg0);
9959
9960 CASE_FLT_FN (BUILT_IN_ROUND):
9961 return fold_builtin_round (loc, fndecl, arg0);
9962
9963 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9964 CASE_FLT_FN (BUILT_IN_RINT):
9965 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9966
9967 CASE_FLT_FN (BUILT_IN_ICEIL):
9968 CASE_FLT_FN (BUILT_IN_LCEIL):
9969 CASE_FLT_FN (BUILT_IN_LLCEIL):
9970 CASE_FLT_FN (BUILT_IN_LFLOOR):
9971 CASE_FLT_FN (BUILT_IN_IFLOOR):
9972 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9973 CASE_FLT_FN (BUILT_IN_IROUND):
9974 CASE_FLT_FN (BUILT_IN_LROUND):
9975 CASE_FLT_FN (BUILT_IN_LLROUND):
9976 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9977
9978 CASE_FLT_FN (BUILT_IN_IRINT):
9979 CASE_FLT_FN (BUILT_IN_LRINT):
9980 CASE_FLT_FN (BUILT_IN_LLRINT):
9981 return fold_fixed_mathfn (loc, fndecl, arg0);
9982
9983 case BUILT_IN_BSWAP16:
9984 case BUILT_IN_BSWAP32:
9985 case BUILT_IN_BSWAP64:
9986 return fold_builtin_bswap (fndecl, arg0);
9987
9988 CASE_INT_FN (BUILT_IN_FFS):
9989 CASE_INT_FN (BUILT_IN_CLZ):
9990 CASE_INT_FN (BUILT_IN_CTZ):
9991 CASE_INT_FN (BUILT_IN_CLRSB):
9992 CASE_INT_FN (BUILT_IN_POPCOUNT):
9993 CASE_INT_FN (BUILT_IN_PARITY):
9994 return fold_builtin_bitop (fndecl, arg0);
9995
9996 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9997 return fold_builtin_signbit (loc, arg0, type);
9998
9999 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10000 return fold_builtin_significand (loc, arg0, type);
10001
10002 CASE_FLT_FN (BUILT_IN_ILOGB):
10003 CASE_FLT_FN (BUILT_IN_LOGB):
10004 return fold_builtin_logb (loc, arg0, type);
10005
10006 case BUILT_IN_ISASCII:
10007 return fold_builtin_isascii (loc, arg0);
10008
10009 case BUILT_IN_TOASCII:
10010 return fold_builtin_toascii (loc, arg0);
10011
10012 case BUILT_IN_ISDIGIT:
10013 return fold_builtin_isdigit (loc, arg0);
10014
10015 CASE_FLT_FN (BUILT_IN_FINITE):
10016 case BUILT_IN_FINITED32:
10017 case BUILT_IN_FINITED64:
10018 case BUILT_IN_FINITED128:
10019 case BUILT_IN_ISFINITE:
10020 {
10021 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10022 if (ret)
10023 return ret;
10024 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10025 }
10026
10027 CASE_FLT_FN (BUILT_IN_ISINF):
10028 case BUILT_IN_ISINFD32:
10029 case BUILT_IN_ISINFD64:
10030 case BUILT_IN_ISINFD128:
10031 {
10032 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10033 if (ret)
10034 return ret;
10035 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10036 }
10037
10038 case BUILT_IN_ISNORMAL:
10039 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10040
10041 case BUILT_IN_ISINF_SIGN:
10042 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10043
10044 CASE_FLT_FN (BUILT_IN_ISNAN):
10045 case BUILT_IN_ISNAND32:
10046 case BUILT_IN_ISNAND64:
10047 case BUILT_IN_ISNAND128:
10048 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10049
10050 case BUILT_IN_FREE:
10051 if (integer_zerop (arg0))
10052 return build_empty_stmt (loc);
10053 break;
10054
10055 default:
10056 break;
10057 }
10058
10059 return NULL_TREE;
10060
10061 }
10062
10063 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10064 This function returns NULL_TREE if no simplification was possible. */
10065
10066 static tree
10067 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
10068 {
10069 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10070 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10071
10072 switch (fcode)
10073 {
10074 CASE_FLT_FN (BUILT_IN_JN):
10075 if (validate_arg (arg0, INTEGER_TYPE)
10076 && validate_arg (arg1, REAL_TYPE))
10077 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10078 break;
10079
10080 CASE_FLT_FN (BUILT_IN_YN):
10081 if (validate_arg (arg0, INTEGER_TYPE)
10082 && validate_arg (arg1, REAL_TYPE))
10083 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10084 &dconst0, false);
10085 break;
10086
10087 CASE_FLT_FN (BUILT_IN_DREM):
10088 CASE_FLT_FN (BUILT_IN_REMAINDER):
10089 if (validate_arg (arg0, REAL_TYPE)
10090 && validate_arg (arg1, REAL_TYPE))
10091 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10092 break;
10093
10094 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10095 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10096 if (validate_arg (arg0, REAL_TYPE)
10097 && validate_arg (arg1, POINTER_TYPE))
10098 return do_mpfr_lgamma_r (arg0, arg1, type);
10099 break;
10100
10101 CASE_FLT_FN (BUILT_IN_ATAN2):
10102 if (validate_arg (arg0, REAL_TYPE)
10103 && validate_arg (arg1, REAL_TYPE))
10104 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10105 break;
10106
10107 CASE_FLT_FN (BUILT_IN_FDIM):
10108 if (validate_arg (arg0, REAL_TYPE)
10109 && validate_arg (arg1, REAL_TYPE))
10110 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10111 break;
10112
10113 CASE_FLT_FN (BUILT_IN_HYPOT):
10114 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10115
10116 CASE_FLT_FN (BUILT_IN_CPOW):
10117 if (validate_arg (arg0, COMPLEX_TYPE)
10118 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10119 && validate_arg (arg1, COMPLEX_TYPE)
10120 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10121 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10122 break;
10123
10124 CASE_FLT_FN (BUILT_IN_LDEXP):
10125 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10126 CASE_FLT_FN (BUILT_IN_SCALBN):
10127 CASE_FLT_FN (BUILT_IN_SCALBLN):
10128 return fold_builtin_load_exponent (loc, arg0, arg1,
10129 type, /*ldexp=*/false);
10130
10131 CASE_FLT_FN (BUILT_IN_FREXP):
10132 return fold_builtin_frexp (loc, arg0, arg1, type);
10133
10134 CASE_FLT_FN (BUILT_IN_MODF):
10135 return fold_builtin_modf (loc, arg0, arg1, type);
10136
10137 case BUILT_IN_STRSTR:
10138 return fold_builtin_strstr (loc, arg0, arg1, type);
10139
10140 case BUILT_IN_STRSPN:
10141 return fold_builtin_strspn (loc, arg0, arg1);
10142
10143 case BUILT_IN_STRCSPN:
10144 return fold_builtin_strcspn (loc, arg0, arg1);
10145
10146 case BUILT_IN_STRCHR:
10147 case BUILT_IN_INDEX:
10148 return fold_builtin_strchr (loc, arg0, arg1, type);
10149
10150 case BUILT_IN_STRRCHR:
10151 case BUILT_IN_RINDEX:
10152 return fold_builtin_strrchr (loc, arg0, arg1, type);
10153
10154 case BUILT_IN_STRCMP:
10155 return fold_builtin_strcmp (loc, arg0, arg1);
10156
10157 case BUILT_IN_STRPBRK:
10158 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10159
10160 case BUILT_IN_EXPECT:
10161 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10162
10163 CASE_FLT_FN (BUILT_IN_POW):
10164 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10165
10166 CASE_FLT_FN (BUILT_IN_POWI):
10167 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10168
10169 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10170 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10171
10172 CASE_FLT_FN (BUILT_IN_FMIN):
10173 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10174
10175 CASE_FLT_FN (BUILT_IN_FMAX):
10176 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10177
10178 case BUILT_IN_ISGREATER:
10179 return fold_builtin_unordered_cmp (loc, fndecl,
10180 arg0, arg1, UNLE_EXPR, LE_EXPR);
10181 case BUILT_IN_ISGREATEREQUAL:
10182 return fold_builtin_unordered_cmp (loc, fndecl,
10183 arg0, arg1, UNLT_EXPR, LT_EXPR);
10184 case BUILT_IN_ISLESS:
10185 return fold_builtin_unordered_cmp (loc, fndecl,
10186 arg0, arg1, UNGE_EXPR, GE_EXPR);
10187 case BUILT_IN_ISLESSEQUAL:
10188 return fold_builtin_unordered_cmp (loc, fndecl,
10189 arg0, arg1, UNGT_EXPR, GT_EXPR);
10190 case BUILT_IN_ISLESSGREATER:
10191 return fold_builtin_unordered_cmp (loc, fndecl,
10192 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10193 case BUILT_IN_ISUNORDERED:
10194 return fold_builtin_unordered_cmp (loc, fndecl,
10195 arg0, arg1, UNORDERED_EXPR,
10196 NOP_EXPR);
10197
10198 /* We do the folding for va_start in the expander. */
10199 case BUILT_IN_VA_START:
10200 break;
10201
10202 case BUILT_IN_OBJECT_SIZE:
10203 return fold_builtin_object_size (arg0, arg1);
10204
10205 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10206 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10207
10208 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10209 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10210
10211 default:
10212 break;
10213 }
10214 return NULL_TREE;
10215 }
10216
10217 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10218 and ARG2.
10219 This function returns NULL_TREE if no simplification was possible. */
10220
10221 static tree
10222 fold_builtin_3 (location_t loc, tree fndecl,
10223 tree arg0, tree arg1, tree arg2)
10224 {
10225 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10226 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10227 switch (fcode)
10228 {
10229
10230 CASE_FLT_FN (BUILT_IN_SINCOS):
10231 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10232
10233 CASE_FLT_FN (BUILT_IN_FMA):
10234 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10235 break;
10236
10237 CASE_FLT_FN (BUILT_IN_REMQUO):
10238 if (validate_arg (arg0, REAL_TYPE)
10239 && validate_arg (arg1, REAL_TYPE)
10240 && validate_arg (arg2, POINTER_TYPE))
10241 return do_mpfr_remquo (arg0, arg1, arg2);
10242 break;
10243
10244 case BUILT_IN_STRNCMP:
10245 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10246
10247 case BUILT_IN_MEMCHR:
10248 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10249
10250 case BUILT_IN_BCMP:
10251 case BUILT_IN_MEMCMP:
10252 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10253
10254 case BUILT_IN_EXPECT:
10255 return fold_builtin_expect (loc, arg0, arg1, arg2);
10256
10257 case BUILT_IN_ADD_OVERFLOW:
10258 case BUILT_IN_SUB_OVERFLOW:
10259 case BUILT_IN_MUL_OVERFLOW:
10260 case BUILT_IN_SADD_OVERFLOW:
10261 case BUILT_IN_SADDL_OVERFLOW:
10262 case BUILT_IN_SADDLL_OVERFLOW:
10263 case BUILT_IN_SSUB_OVERFLOW:
10264 case BUILT_IN_SSUBL_OVERFLOW:
10265 case BUILT_IN_SSUBLL_OVERFLOW:
10266 case BUILT_IN_SMUL_OVERFLOW:
10267 case BUILT_IN_SMULL_OVERFLOW:
10268 case BUILT_IN_SMULLL_OVERFLOW:
10269 case BUILT_IN_UADD_OVERFLOW:
10270 case BUILT_IN_UADDL_OVERFLOW:
10271 case BUILT_IN_UADDLL_OVERFLOW:
10272 case BUILT_IN_USUB_OVERFLOW:
10273 case BUILT_IN_USUBL_OVERFLOW:
10274 case BUILT_IN_USUBLL_OVERFLOW:
10275 case BUILT_IN_UMUL_OVERFLOW:
10276 case BUILT_IN_UMULL_OVERFLOW:
10277 case BUILT_IN_UMULLL_OVERFLOW:
10278 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10279
10280 default:
10281 break;
10282 }
10283 return NULL_TREE;
10284 }
10285
10286 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10287 arguments. IGNORE is true if the result of the
10288 function call is ignored. This function returns NULL_TREE if no
10289 simplification was possible. */
10290
10291 tree
10292 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
10293 {
10294 tree ret = NULL_TREE;
10295
10296 switch (nargs)
10297 {
10298 case 0:
10299 ret = fold_builtin_0 (loc, fndecl);
10300 break;
10301 case 1:
10302 ret = fold_builtin_1 (loc, fndecl, args[0]);
10303 break;
10304 case 2:
10305 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
10306 break;
10307 case 3:
10308 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10309 break;
10310 default:
10311 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10312 break;
10313 }
10314 if (ret)
10315 {
10316 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10317 SET_EXPR_LOCATION (ret, loc);
10318 TREE_NO_WARNING (ret) = 1;
10319 return ret;
10320 }
10321 return NULL_TREE;
10322 }
10323
10324 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10325 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10326 of arguments in ARGS to be omitted. OLDNARGS is the number of
10327 elements in ARGS. */
10328
10329 static tree
10330 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10331 int skip, tree fndecl, int n, va_list newargs)
10332 {
10333 int nargs = oldnargs - skip + n;
10334 tree *buffer;
10335
10336 if (n > 0)
10337 {
10338 int i, j;
10339
10340 buffer = XALLOCAVEC (tree, nargs);
10341 for (i = 0; i < n; i++)
10342 buffer[i] = va_arg (newargs, tree);
10343 for (j = skip; j < oldnargs; j++, i++)
10344 buffer[i] = args[j];
10345 }
10346 else
10347 buffer = args + skip;
10348
10349 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10350 }
10351
10352 /* Return true if FNDECL shouldn't be folded right now.
10353 If a built-in function has an inline attribute always_inline
10354 wrapper, defer folding it after always_inline functions have
10355 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10356 might not be performed. */
10357
10358 bool
10359 avoid_folding_inline_builtin (tree fndecl)
10360 {
10361 return (DECL_DECLARED_INLINE_P (fndecl)
10362 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10363 && cfun
10364 && !cfun->always_inline_functions_inlined
10365 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10366 }
10367
10368 /* A wrapper function for builtin folding that prevents warnings for
10369 "statement without effect" and the like, caused by removing the
10370 call node earlier than the warning is generated. */
10371
10372 tree
10373 fold_call_expr (location_t loc, tree exp, bool ignore)
10374 {
10375 tree ret = NULL_TREE;
10376 tree fndecl = get_callee_fndecl (exp);
10377 if (fndecl
10378 && TREE_CODE (fndecl) == FUNCTION_DECL
10379 && DECL_BUILT_IN (fndecl)
10380 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10381 yet. Defer folding until we see all the arguments
10382 (after inlining). */
10383 && !CALL_EXPR_VA_ARG_PACK (exp))
10384 {
10385 int nargs = call_expr_nargs (exp);
10386
10387 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10388 instead last argument is __builtin_va_arg_pack (). Defer folding
10389 even in that case, until arguments are finalized. */
10390 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10391 {
10392 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10393 if (fndecl2
10394 && TREE_CODE (fndecl2) == FUNCTION_DECL
10395 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10396 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10397 return NULL_TREE;
10398 }
10399
10400 if (avoid_folding_inline_builtin (fndecl))
10401 return NULL_TREE;
10402
10403 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10404 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10405 CALL_EXPR_ARGP (exp), ignore);
10406 else
10407 {
10408 tree *args = CALL_EXPR_ARGP (exp);
10409 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10410 if (ret)
10411 return ret;
10412 }
10413 }
10414 return NULL_TREE;
10415 }
10416
10417 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10418 N arguments are passed in the array ARGARRAY. Return a folded
10419 expression or NULL_TREE if no simplification was possible. */
10420
10421 tree
10422 fold_builtin_call_array (location_t loc, tree,
10423 tree fn,
10424 int n,
10425 tree *argarray)
10426 {
10427 if (TREE_CODE (fn) != ADDR_EXPR)
10428 return NULL_TREE;
10429
10430 tree fndecl = TREE_OPERAND (fn, 0);
10431 if (TREE_CODE (fndecl) == FUNCTION_DECL
10432 && DECL_BUILT_IN (fndecl))
10433 {
10434 /* If last argument is __builtin_va_arg_pack (), arguments to this
10435 function are not finalized yet. Defer folding until they are. */
10436 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10437 {
10438 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10439 if (fndecl2
10440 && TREE_CODE (fndecl2) == FUNCTION_DECL
10441 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10442 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10443 return NULL_TREE;
10444 }
10445 if (avoid_folding_inline_builtin (fndecl))
10446 return NULL_TREE;
10447 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10448 return targetm.fold_builtin (fndecl, n, argarray, false);
10449 else
10450 return fold_builtin_n (loc, fndecl, argarray, n, false);
10451 }
10452
10453 return NULL_TREE;
10454 }
10455
10456 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10457 along with N new arguments specified as the "..." parameters. SKIP
10458 is the number of arguments in EXP to be omitted. This function is used
10459 to do varargs-to-varargs transformations. */
10460
10461 static tree
10462 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10463 {
10464 va_list ap;
10465 tree t;
10466
10467 va_start (ap, n);
10468 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10469 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10470 va_end (ap);
10471
10472 return t;
10473 }
10474
10475 /* Validate a single argument ARG against a tree code CODE representing
10476 a type. */
10477
10478 static bool
10479 validate_arg (const_tree arg, enum tree_code code)
10480 {
10481 if (!arg)
10482 return false;
10483 else if (code == POINTER_TYPE)
10484 return POINTER_TYPE_P (TREE_TYPE (arg));
10485 else if (code == INTEGER_TYPE)
10486 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10487 return code == TREE_CODE (TREE_TYPE (arg));
10488 }
10489
10490 /* This function validates the types of a function call argument list
10491 against a specified list of tree_codes. If the last specifier is a 0,
10492 that represents an ellipses, otherwise the last specifier must be a
10493 VOID_TYPE.
10494
10495 This is the GIMPLE version of validate_arglist. Eventually we want to
10496 completely convert builtins.c to work from GIMPLEs and the tree based
10497 validate_arglist will then be removed. */
10498
10499 bool
10500 validate_gimple_arglist (const gcall *call, ...)
10501 {
10502 enum tree_code code;
10503 bool res = 0;
10504 va_list ap;
10505 const_tree arg;
10506 size_t i;
10507
10508 va_start (ap, call);
10509 i = 0;
10510
10511 do
10512 {
10513 code = (enum tree_code) va_arg (ap, int);
10514 switch (code)
10515 {
10516 case 0:
10517 /* This signifies an ellipses, any further arguments are all ok. */
10518 res = true;
10519 goto end;
10520 case VOID_TYPE:
10521 /* This signifies an endlink, if no arguments remain, return
10522 true, otherwise return false. */
10523 res = (i == gimple_call_num_args (call));
10524 goto end;
10525 default:
10526 /* If no parameters remain or the parameter's code does not
10527 match the specified code, return false. Otherwise continue
10528 checking any remaining arguments. */
10529 arg = gimple_call_arg (call, i++);
10530 if (!validate_arg (arg, code))
10531 goto end;
10532 break;
10533 }
10534 }
10535 while (1);
10536
10537 /* We need gotos here since we can only have one VA_CLOSE in a
10538 function. */
10539 end: ;
10540 va_end (ap);
10541
10542 return res;
10543 }
10544
10545 /* Default target-specific builtin expander that does nothing. */
10546
10547 rtx
10548 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10549 rtx target ATTRIBUTE_UNUSED,
10550 rtx subtarget ATTRIBUTE_UNUSED,
10551 machine_mode mode ATTRIBUTE_UNUSED,
10552 int ignore ATTRIBUTE_UNUSED)
10553 {
10554 return NULL_RTX;
10555 }
10556
10557 /* Returns true is EXP represents data that would potentially reside
10558 in a readonly section. */
10559
10560 bool
10561 readonly_data_expr (tree exp)
10562 {
10563 STRIP_NOPS (exp);
10564
10565 if (TREE_CODE (exp) != ADDR_EXPR)
10566 return false;
10567
10568 exp = get_base_address (TREE_OPERAND (exp, 0));
10569 if (!exp)
10570 return false;
10571
10572 /* Make sure we call decl_readonly_section only for trees it
10573 can handle (since it returns true for everything it doesn't
10574 understand). */
10575 if (TREE_CODE (exp) == STRING_CST
10576 || TREE_CODE (exp) == CONSTRUCTOR
10577 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10578 return decl_readonly_section (exp, 0);
10579 else
10580 return false;
10581 }
10582
10583 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10584 to the call, and TYPE is its return type.
10585
10586 Return NULL_TREE if no simplification was possible, otherwise return the
10587 simplified form of the call as a tree.
10588
10589 The simplified form may be a constant or other expression which
10590 computes the same value, but in a more efficient manner (including
10591 calls to other builtin functions).
10592
10593 The call may contain arguments which need to be evaluated, but
10594 which are not useful to determine the result of the call. In
10595 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10596 COMPOUND_EXPR will be an argument which must be evaluated.
10597 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10598 COMPOUND_EXPR in the chain will contain the tree for the simplified
10599 form of the builtin function call. */
10600
10601 static tree
10602 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10603 {
10604 if (!validate_arg (s1, POINTER_TYPE)
10605 || !validate_arg (s2, POINTER_TYPE))
10606 return NULL_TREE;
10607 else
10608 {
10609 tree fn;
10610 const char *p1, *p2;
10611
10612 p2 = c_getstr (s2);
10613 if (p2 == NULL)
10614 return NULL_TREE;
10615
10616 p1 = c_getstr (s1);
10617 if (p1 != NULL)
10618 {
10619 const char *r = strstr (p1, p2);
10620 tree tem;
10621
10622 if (r == NULL)
10623 return build_int_cst (TREE_TYPE (s1), 0);
10624
10625 /* Return an offset into the constant string argument. */
10626 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10627 return fold_convert_loc (loc, type, tem);
10628 }
10629
10630 /* The argument is const char *, and the result is char *, so we need
10631 a type conversion here to avoid a warning. */
10632 if (p2[0] == '\0')
10633 return fold_convert_loc (loc, type, s1);
10634
10635 if (p2[1] != '\0')
10636 return NULL_TREE;
10637
10638 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10639 if (!fn)
10640 return NULL_TREE;
10641
10642 /* New argument list transforming strstr(s1, s2) to
10643 strchr(s1, s2[0]). */
10644 return build_call_expr_loc (loc, fn, 2, s1,
10645 build_int_cst (integer_type_node, p2[0]));
10646 }
10647 }
10648
10649 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10650 the call, and TYPE is its return type.
10651
10652 Return NULL_TREE if no simplification was possible, otherwise return the
10653 simplified form of the call as a tree.
10654
10655 The simplified form may be a constant or other expression which
10656 computes the same value, but in a more efficient manner (including
10657 calls to other builtin functions).
10658
10659 The call may contain arguments which need to be evaluated, but
10660 which are not useful to determine the result of the call. In
10661 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10662 COMPOUND_EXPR will be an argument which must be evaluated.
10663 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10664 COMPOUND_EXPR in the chain will contain the tree for the simplified
10665 form of the builtin function call. */
10666
10667 static tree
10668 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10669 {
10670 if (!validate_arg (s1, POINTER_TYPE)
10671 || !validate_arg (s2, INTEGER_TYPE))
10672 return NULL_TREE;
10673 else
10674 {
10675 const char *p1;
10676
10677 if (TREE_CODE (s2) != INTEGER_CST)
10678 return NULL_TREE;
10679
10680 p1 = c_getstr (s1);
10681 if (p1 != NULL)
10682 {
10683 char c;
10684 const char *r;
10685 tree tem;
10686
10687 if (target_char_cast (s2, &c))
10688 return NULL_TREE;
10689
10690 r = strchr (p1, c);
10691
10692 if (r == NULL)
10693 return build_int_cst (TREE_TYPE (s1), 0);
10694
10695 /* Return an offset into the constant string argument. */
10696 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10697 return fold_convert_loc (loc, type, tem);
10698 }
10699 return NULL_TREE;
10700 }
10701 }
10702
10703 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10704 the call, and TYPE is its return type.
10705
10706 Return NULL_TREE if no simplification was possible, otherwise return the
10707 simplified form of the call as a tree.
10708
10709 The simplified form may be a constant or other expression which
10710 computes the same value, but in a more efficient manner (including
10711 calls to other builtin functions).
10712
10713 The call may contain arguments which need to be evaluated, but
10714 which are not useful to determine the result of the call. In
10715 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10716 COMPOUND_EXPR will be an argument which must be evaluated.
10717 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10718 COMPOUND_EXPR in the chain will contain the tree for the simplified
10719 form of the builtin function call. */
10720
10721 static tree
10722 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10723 {
10724 if (!validate_arg (s1, POINTER_TYPE)
10725 || !validate_arg (s2, INTEGER_TYPE))
10726 return NULL_TREE;
10727 else
10728 {
10729 tree fn;
10730 const char *p1;
10731
10732 if (TREE_CODE (s2) != INTEGER_CST)
10733 return NULL_TREE;
10734
10735 p1 = c_getstr (s1);
10736 if (p1 != NULL)
10737 {
10738 char c;
10739 const char *r;
10740 tree tem;
10741
10742 if (target_char_cast (s2, &c))
10743 return NULL_TREE;
10744
10745 r = strrchr (p1, c);
10746
10747 if (r == NULL)
10748 return build_int_cst (TREE_TYPE (s1), 0);
10749
10750 /* Return an offset into the constant string argument. */
10751 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10752 return fold_convert_loc (loc, type, tem);
10753 }
10754
10755 if (! integer_zerop (s2))
10756 return NULL_TREE;
10757
10758 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10759 if (!fn)
10760 return NULL_TREE;
10761
10762 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10763 return build_call_expr_loc (loc, fn, 2, s1, s2);
10764 }
10765 }
10766
10767 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10768 to the call, and TYPE is its return type.
10769
10770 Return NULL_TREE if no simplification was possible, otherwise return the
10771 simplified form of the call as a tree.
10772
10773 The simplified form may be a constant or other expression which
10774 computes the same value, but in a more efficient manner (including
10775 calls to other builtin functions).
10776
10777 The call may contain arguments which need to be evaluated, but
10778 which are not useful to determine the result of the call. In
10779 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10780 COMPOUND_EXPR will be an argument which must be evaluated.
10781 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10782 COMPOUND_EXPR in the chain will contain the tree for the simplified
10783 form of the builtin function call. */
10784
10785 static tree
10786 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10787 {
10788 if (!validate_arg (s1, POINTER_TYPE)
10789 || !validate_arg (s2, POINTER_TYPE))
10790 return NULL_TREE;
10791 else
10792 {
10793 tree fn;
10794 const char *p1, *p2;
10795
10796 p2 = c_getstr (s2);
10797 if (p2 == NULL)
10798 return NULL_TREE;
10799
10800 p1 = c_getstr (s1);
10801 if (p1 != NULL)
10802 {
10803 const char *r = strpbrk (p1, p2);
10804 tree tem;
10805
10806 if (r == NULL)
10807 return build_int_cst (TREE_TYPE (s1), 0);
10808
10809 /* Return an offset into the constant string argument. */
10810 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10811 return fold_convert_loc (loc, type, tem);
10812 }
10813
10814 if (p2[0] == '\0')
10815 /* strpbrk(x, "") == NULL.
10816 Evaluate and ignore s1 in case it had side-effects. */
10817 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
10818
10819 if (p2[1] != '\0')
10820 return NULL_TREE; /* Really call strpbrk. */
10821
10822 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10823 if (!fn)
10824 return NULL_TREE;
10825
10826 /* New argument list transforming strpbrk(s1, s2) to
10827 strchr(s1, s2[0]). */
10828 return build_call_expr_loc (loc, fn, 2, s1,
10829 build_int_cst (integer_type_node, p2[0]));
10830 }
10831 }
10832
10833 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10834 to the call.
10835
10836 Return NULL_TREE if no simplification was possible, otherwise return the
10837 simplified form of the call as a tree.
10838
10839 The simplified form may be a constant or other expression which
10840 computes the same value, but in a more efficient manner (including
10841 calls to other builtin functions).
10842
10843 The call may contain arguments which need to be evaluated, but
10844 which are not useful to determine the result of the call. In
10845 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10846 COMPOUND_EXPR will be an argument which must be evaluated.
10847 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10848 COMPOUND_EXPR in the chain will contain the tree for the simplified
10849 form of the builtin function call. */
10850
10851 static tree
10852 fold_builtin_strspn (location_t loc, tree s1, tree s2)
10853 {
10854 if (!validate_arg (s1, POINTER_TYPE)
10855 || !validate_arg (s2, POINTER_TYPE))
10856 return NULL_TREE;
10857 else
10858 {
10859 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10860
10861 /* If both arguments are constants, evaluate at compile-time. */
10862 if (p1 && p2)
10863 {
10864 const size_t r = strspn (p1, p2);
10865 return build_int_cst (size_type_node, r);
10866 }
10867
10868 /* If either argument is "", return NULL_TREE. */
10869 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10870 /* Evaluate and ignore both arguments in case either one has
10871 side-effects. */
10872 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10873 s1, s2);
10874 return NULL_TREE;
10875 }
10876 }
10877
10878 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10879 to the call.
10880
10881 Return NULL_TREE if no simplification was possible, otherwise return the
10882 simplified form of the call as a tree.
10883
10884 The simplified form may be a constant or other expression which
10885 computes the same value, but in a more efficient manner (including
10886 calls to other builtin functions).
10887
10888 The call may contain arguments which need to be evaluated, but
10889 which are not useful to determine the result of the call. In
10890 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10891 COMPOUND_EXPR will be an argument which must be evaluated.
10892 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10893 COMPOUND_EXPR in the chain will contain the tree for the simplified
10894 form of the builtin function call. */
10895
10896 static tree
10897 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
10898 {
10899 if (!validate_arg (s1, POINTER_TYPE)
10900 || !validate_arg (s2, POINTER_TYPE))
10901 return NULL_TREE;
10902 else
10903 {
10904 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10905
10906 /* If both arguments are constants, evaluate at compile-time. */
10907 if (p1 && p2)
10908 {
10909 const size_t r = strcspn (p1, p2);
10910 return build_int_cst (size_type_node, r);
10911 }
10912
10913 /* If the first argument is "", return NULL_TREE. */
10914 if (p1 && *p1 == '\0')
10915 {
10916 /* Evaluate and ignore argument s2 in case it has
10917 side-effects. */
10918 return omit_one_operand_loc (loc, size_type_node,
10919 size_zero_node, s2);
10920 }
10921
10922 /* If the second argument is "", return __builtin_strlen(s1). */
10923 if (p2 && *p2 == '\0')
10924 {
10925 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10926
10927 /* If the replacement _DECL isn't initialized, don't do the
10928 transformation. */
10929 if (!fn)
10930 return NULL_TREE;
10931
10932 return build_call_expr_loc (loc, fn, 1, s1);
10933 }
10934 return NULL_TREE;
10935 }
10936 }
10937
10938 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10939 produced. False otherwise. This is done so that we don't output the error
10940 or warning twice or three times. */
10941
10942 bool
10943 fold_builtin_next_arg (tree exp, bool va_start_p)
10944 {
10945 tree fntype = TREE_TYPE (current_function_decl);
10946 int nargs = call_expr_nargs (exp);
10947 tree arg;
10948 /* There is good chance the current input_location points inside the
10949 definition of the va_start macro (perhaps on the token for
10950 builtin) in a system header, so warnings will not be emitted.
10951 Use the location in real source code. */
10952 source_location current_location =
10953 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10954 NULL);
10955
10956 if (!stdarg_p (fntype))
10957 {
10958 error ("%<va_start%> used in function with fixed args");
10959 return true;
10960 }
10961
10962 if (va_start_p)
10963 {
10964 if (va_start_p && (nargs != 2))
10965 {
10966 error ("wrong number of arguments to function %<va_start%>");
10967 return true;
10968 }
10969 arg = CALL_EXPR_ARG (exp, 1);
10970 }
10971 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10972 when we checked the arguments and if needed issued a warning. */
10973 else
10974 {
10975 if (nargs == 0)
10976 {
10977 /* Evidently an out of date version of <stdarg.h>; can't validate
10978 va_start's second argument, but can still work as intended. */
10979 warning_at (current_location,
10980 OPT_Wvarargs,
10981 "%<__builtin_next_arg%> called without an argument");
10982 return true;
10983 }
10984 else if (nargs > 1)
10985 {
10986 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10987 return true;
10988 }
10989 arg = CALL_EXPR_ARG (exp, 0);
10990 }
10991
10992 if (TREE_CODE (arg) == SSA_NAME)
10993 arg = SSA_NAME_VAR (arg);
10994
10995 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10996 or __builtin_next_arg (0) the first time we see it, after checking
10997 the arguments and if needed issuing a warning. */
10998 if (!integer_zerop (arg))
10999 {
11000 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11001
11002 /* Strip off all nops for the sake of the comparison. This
11003 is not quite the same as STRIP_NOPS. It does more.
11004 We must also strip off INDIRECT_EXPR for C++ reference
11005 parameters. */
11006 while (CONVERT_EXPR_P (arg)
11007 || TREE_CODE (arg) == INDIRECT_REF)
11008 arg = TREE_OPERAND (arg, 0);
11009 if (arg != last_parm)
11010 {
11011 /* FIXME: Sometimes with the tree optimizers we can get the
11012 not the last argument even though the user used the last
11013 argument. We just warn and set the arg to be the last
11014 argument so that we will get wrong-code because of
11015 it. */
11016 warning_at (current_location,
11017 OPT_Wvarargs,
11018 "second parameter of %<va_start%> not last named argument");
11019 }
11020
11021 /* Undefined by C99 7.15.1.4p4 (va_start):
11022 "If the parameter parmN is declared with the register storage
11023 class, with a function or array type, or with a type that is
11024 not compatible with the type that results after application of
11025 the default argument promotions, the behavior is undefined."
11026 */
11027 else if (DECL_REGISTER (arg))
11028 {
11029 warning_at (current_location,
11030 OPT_Wvarargs,
11031 "undefined behaviour when second parameter of "
11032 "%<va_start%> is declared with %<register%> storage");
11033 }
11034
11035 /* We want to verify the second parameter just once before the tree
11036 optimizers are run and then avoid keeping it in the tree,
11037 as otherwise we could warn even for correct code like:
11038 void foo (int i, ...)
11039 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11040 if (va_start_p)
11041 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11042 else
11043 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11044 }
11045 return false;
11046 }
11047
11048
11049 /* Expand a call EXP to __builtin_object_size. */
11050
11051 static rtx
11052 expand_builtin_object_size (tree exp)
11053 {
11054 tree ost;
11055 int object_size_type;
11056 tree fndecl = get_callee_fndecl (exp);
11057
11058 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11059 {
11060 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11061 exp, fndecl);
11062 expand_builtin_trap ();
11063 return const0_rtx;
11064 }
11065
11066 ost = CALL_EXPR_ARG (exp, 1);
11067 STRIP_NOPS (ost);
11068
11069 if (TREE_CODE (ost) != INTEGER_CST
11070 || tree_int_cst_sgn (ost) < 0
11071 || compare_tree_int (ost, 3) > 0)
11072 {
11073 error ("%Klast argument of %D is not integer constant between 0 and 3",
11074 exp, fndecl);
11075 expand_builtin_trap ();
11076 return const0_rtx;
11077 }
11078
11079 object_size_type = tree_to_shwi (ost);
11080
11081 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11082 }
11083
11084 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11085 FCODE is the BUILT_IN_* to use.
11086 Return NULL_RTX if we failed; the caller should emit a normal call,
11087 otherwise try to get the result in TARGET, if convenient (and in
11088 mode MODE if that's convenient). */
11089
11090 static rtx
11091 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11092 enum built_in_function fcode)
11093 {
11094 tree dest, src, len, size;
11095
11096 if (!validate_arglist (exp,
11097 POINTER_TYPE,
11098 fcode == BUILT_IN_MEMSET_CHK
11099 ? INTEGER_TYPE : POINTER_TYPE,
11100 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11101 return NULL_RTX;
11102
11103 dest = CALL_EXPR_ARG (exp, 0);
11104 src = CALL_EXPR_ARG (exp, 1);
11105 len = CALL_EXPR_ARG (exp, 2);
11106 size = CALL_EXPR_ARG (exp, 3);
11107
11108 if (! tree_fits_uhwi_p (size))
11109 return NULL_RTX;
11110
11111 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11112 {
11113 tree fn;
11114
11115 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11116 {
11117 warning_at (tree_nonartificial_location (exp),
11118 0, "%Kcall to %D will always overflow destination buffer",
11119 exp, get_callee_fndecl (exp));
11120 return NULL_RTX;
11121 }
11122
11123 fn = NULL_TREE;
11124 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11125 mem{cpy,pcpy,move,set} is available. */
11126 switch (fcode)
11127 {
11128 case BUILT_IN_MEMCPY_CHK:
11129 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11130 break;
11131 case BUILT_IN_MEMPCPY_CHK:
11132 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11133 break;
11134 case BUILT_IN_MEMMOVE_CHK:
11135 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11136 break;
11137 case BUILT_IN_MEMSET_CHK:
11138 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11139 break;
11140 default:
11141 break;
11142 }
11143
11144 if (! fn)
11145 return NULL_RTX;
11146
11147 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11148 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11149 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11150 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11151 }
11152 else if (fcode == BUILT_IN_MEMSET_CHK)
11153 return NULL_RTX;
11154 else
11155 {
11156 unsigned int dest_align = get_pointer_alignment (dest);
11157
11158 /* If DEST is not a pointer type, call the normal function. */
11159 if (dest_align == 0)
11160 return NULL_RTX;
11161
11162 /* If SRC and DEST are the same (and not volatile), do nothing. */
11163 if (operand_equal_p (src, dest, 0))
11164 {
11165 tree expr;
11166
11167 if (fcode != BUILT_IN_MEMPCPY_CHK)
11168 {
11169 /* Evaluate and ignore LEN in case it has side-effects. */
11170 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11171 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11172 }
11173
11174 expr = fold_build_pointer_plus (dest, len);
11175 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11176 }
11177
11178 /* __memmove_chk special case. */
11179 if (fcode == BUILT_IN_MEMMOVE_CHK)
11180 {
11181 unsigned int src_align = get_pointer_alignment (src);
11182
11183 if (src_align == 0)
11184 return NULL_RTX;
11185
11186 /* If src is categorized for a readonly section we can use
11187 normal __memcpy_chk. */
11188 if (readonly_data_expr (src))
11189 {
11190 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11191 if (!fn)
11192 return NULL_RTX;
11193 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11194 dest, src, len, size);
11195 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11196 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11197 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11198 }
11199 }
11200 return NULL_RTX;
11201 }
11202 }
11203
11204 /* Emit warning if a buffer overflow is detected at compile time. */
11205
11206 static void
11207 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11208 {
11209 int is_strlen = 0;
11210 tree len, size;
11211 location_t loc = tree_nonartificial_location (exp);
11212
11213 switch (fcode)
11214 {
11215 case BUILT_IN_STRCPY_CHK:
11216 case BUILT_IN_STPCPY_CHK:
11217 /* For __strcat_chk the warning will be emitted only if overflowing
11218 by at least strlen (dest) + 1 bytes. */
11219 case BUILT_IN_STRCAT_CHK:
11220 len = CALL_EXPR_ARG (exp, 1);
11221 size = CALL_EXPR_ARG (exp, 2);
11222 is_strlen = 1;
11223 break;
11224 case BUILT_IN_STRNCAT_CHK:
11225 case BUILT_IN_STRNCPY_CHK:
11226 case BUILT_IN_STPNCPY_CHK:
11227 len = CALL_EXPR_ARG (exp, 2);
11228 size = CALL_EXPR_ARG (exp, 3);
11229 break;
11230 case BUILT_IN_SNPRINTF_CHK:
11231 case BUILT_IN_VSNPRINTF_CHK:
11232 len = CALL_EXPR_ARG (exp, 1);
11233 size = CALL_EXPR_ARG (exp, 3);
11234 break;
11235 default:
11236 gcc_unreachable ();
11237 }
11238
11239 if (!len || !size)
11240 return;
11241
11242 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11243 return;
11244
11245 if (is_strlen)
11246 {
11247 len = c_strlen (len, 1);
11248 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11249 return;
11250 }
11251 else if (fcode == BUILT_IN_STRNCAT_CHK)
11252 {
11253 tree src = CALL_EXPR_ARG (exp, 1);
11254 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11255 return;
11256 src = c_strlen (src, 1);
11257 if (! src || ! tree_fits_uhwi_p (src))
11258 {
11259 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11260 exp, get_callee_fndecl (exp));
11261 return;
11262 }
11263 else if (tree_int_cst_lt (src, size))
11264 return;
11265 }
11266 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11267 return;
11268
11269 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11270 exp, get_callee_fndecl (exp));
11271 }
11272
11273 /* Emit warning if a buffer overflow is detected at compile time
11274 in __sprintf_chk/__vsprintf_chk calls. */
11275
11276 static void
11277 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11278 {
11279 tree size, len, fmt;
11280 const char *fmt_str;
11281 int nargs = call_expr_nargs (exp);
11282
11283 /* Verify the required arguments in the original call. */
11284
11285 if (nargs < 4)
11286 return;
11287 size = CALL_EXPR_ARG (exp, 2);
11288 fmt = CALL_EXPR_ARG (exp, 3);
11289
11290 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11291 return;
11292
11293 /* Check whether the format is a literal string constant. */
11294 fmt_str = c_getstr (fmt);
11295 if (fmt_str == NULL)
11296 return;
11297
11298 if (!init_target_chars ())
11299 return;
11300
11301 /* If the format doesn't contain % args or %%, we know its size. */
11302 if (strchr (fmt_str, target_percent) == 0)
11303 len = build_int_cstu (size_type_node, strlen (fmt_str));
11304 /* If the format is "%s" and first ... argument is a string literal,
11305 we know it too. */
11306 else if (fcode == BUILT_IN_SPRINTF_CHK
11307 && strcmp (fmt_str, target_percent_s) == 0)
11308 {
11309 tree arg;
11310
11311 if (nargs < 5)
11312 return;
11313 arg = CALL_EXPR_ARG (exp, 4);
11314 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11315 return;
11316
11317 len = c_strlen (arg, 1);
11318 if (!len || ! tree_fits_uhwi_p (len))
11319 return;
11320 }
11321 else
11322 return;
11323
11324 if (! tree_int_cst_lt (len, size))
11325 warning_at (tree_nonartificial_location (exp),
11326 0, "%Kcall to %D will always overflow destination buffer",
11327 exp, get_callee_fndecl (exp));
11328 }
11329
11330 /* Emit warning if a free is called with address of a variable. */
11331
11332 static void
11333 maybe_emit_free_warning (tree exp)
11334 {
11335 tree arg = CALL_EXPR_ARG (exp, 0);
11336
11337 STRIP_NOPS (arg);
11338 if (TREE_CODE (arg) != ADDR_EXPR)
11339 return;
11340
11341 arg = get_base_address (TREE_OPERAND (arg, 0));
11342 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11343 return;
11344
11345 if (SSA_VAR_P (arg))
11346 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11347 "%Kattempt to free a non-heap object %qD", exp, arg);
11348 else
11349 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11350 "%Kattempt to free a non-heap object", exp);
11351 }
11352
11353 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11354 if possible. */
11355
11356 static tree
11357 fold_builtin_object_size (tree ptr, tree ost)
11358 {
11359 unsigned HOST_WIDE_INT bytes;
11360 int object_size_type;
11361
11362 if (!validate_arg (ptr, POINTER_TYPE)
11363 || !validate_arg (ost, INTEGER_TYPE))
11364 return NULL_TREE;
11365
11366 STRIP_NOPS (ost);
11367
11368 if (TREE_CODE (ost) != INTEGER_CST
11369 || tree_int_cst_sgn (ost) < 0
11370 || compare_tree_int (ost, 3) > 0)
11371 return NULL_TREE;
11372
11373 object_size_type = tree_to_shwi (ost);
11374
11375 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11376 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11377 and (size_t) 0 for types 2 and 3. */
11378 if (TREE_SIDE_EFFECTS (ptr))
11379 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11380
11381 if (TREE_CODE (ptr) == ADDR_EXPR)
11382 {
11383 bytes = compute_builtin_object_size (ptr, object_size_type);
11384 if (wi::fits_to_tree_p (bytes, size_type_node))
11385 return build_int_cstu (size_type_node, bytes);
11386 }
11387 else if (TREE_CODE (ptr) == SSA_NAME)
11388 {
11389 /* If object size is not known yet, delay folding until
11390 later. Maybe subsequent passes will help determining
11391 it. */
11392 bytes = compute_builtin_object_size (ptr, object_size_type);
11393 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11394 && wi::fits_to_tree_p (bytes, size_type_node))
11395 return build_int_cstu (size_type_node, bytes);
11396 }
11397
11398 return NULL_TREE;
11399 }
11400
11401 /* Builtins with folding operations that operate on "..." arguments
11402 need special handling; we need to store the arguments in a convenient
11403 data structure before attempting any folding. Fortunately there are
11404 only a few builtins that fall into this category. FNDECL is the
11405 function, EXP is the CALL_EXPR for the call. */
11406
11407 static tree
11408 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11409 {
11410 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11411 tree ret = NULL_TREE;
11412
11413 switch (fcode)
11414 {
11415 case BUILT_IN_FPCLASSIFY:
11416 ret = fold_builtin_fpclassify (loc, args, nargs);
11417 break;
11418
11419 default:
11420 break;
11421 }
11422 if (ret)
11423 {
11424 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11425 SET_EXPR_LOCATION (ret, loc);
11426 TREE_NO_WARNING (ret) = 1;
11427 return ret;
11428 }
11429 return NULL_TREE;
11430 }
11431
11432 /* Initialize format string characters in the target charset. */
11433
11434 bool
11435 init_target_chars (void)
11436 {
11437 static bool init;
11438 if (!init)
11439 {
11440 target_newline = lang_hooks.to_target_charset ('\n');
11441 target_percent = lang_hooks.to_target_charset ('%');
11442 target_c = lang_hooks.to_target_charset ('c');
11443 target_s = lang_hooks.to_target_charset ('s');
11444 if (target_newline == 0 || target_percent == 0 || target_c == 0
11445 || target_s == 0)
11446 return false;
11447
11448 target_percent_c[0] = target_percent;
11449 target_percent_c[1] = target_c;
11450 target_percent_c[2] = '\0';
11451
11452 target_percent_s[0] = target_percent;
11453 target_percent_s[1] = target_s;
11454 target_percent_s[2] = '\0';
11455
11456 target_percent_s_newline[0] = target_percent;
11457 target_percent_s_newline[1] = target_s;
11458 target_percent_s_newline[2] = target_newline;
11459 target_percent_s_newline[3] = '\0';
11460
11461 init = true;
11462 }
11463 return true;
11464 }
11465
11466 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11467 and no overflow/underflow occurred. INEXACT is true if M was not
11468 exactly calculated. TYPE is the tree type for the result. This
11469 function assumes that you cleared the MPFR flags and then
11470 calculated M to see if anything subsequently set a flag prior to
11471 entering this function. Return NULL_TREE if any checks fail. */
11472
11473 static tree
11474 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11475 {
11476 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11477 overflow/underflow occurred. If -frounding-math, proceed iff the
11478 result of calling FUNC was exact. */
11479 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11480 && (!flag_rounding_math || !inexact))
11481 {
11482 REAL_VALUE_TYPE rr;
11483
11484 real_from_mpfr (&rr, m, type, GMP_RNDN);
11485 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11486 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11487 but the mpft_t is not, then we underflowed in the
11488 conversion. */
11489 if (real_isfinite (&rr)
11490 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11491 {
11492 REAL_VALUE_TYPE rmode;
11493
11494 real_convert (&rmode, TYPE_MODE (type), &rr);
11495 /* Proceed iff the specified mode can hold the value. */
11496 if (real_identical (&rmode, &rr))
11497 return build_real (type, rmode);
11498 }
11499 }
11500 return NULL_TREE;
11501 }
11502
11503 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11504 number and no overflow/underflow occurred. INEXACT is true if M
11505 was not exactly calculated. TYPE is the tree type for the result.
11506 This function assumes that you cleared the MPFR flags and then
11507 calculated M to see if anything subsequently set a flag prior to
11508 entering this function. Return NULL_TREE if any checks fail, if
11509 FORCE_CONVERT is true, then bypass the checks. */
11510
11511 static tree
11512 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11513 {
11514 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11515 overflow/underflow occurred. If -frounding-math, proceed iff the
11516 result of calling FUNC was exact. */
11517 if (force_convert
11518 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11519 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11520 && (!flag_rounding_math || !inexact)))
11521 {
11522 REAL_VALUE_TYPE re, im;
11523
11524 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11525 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11526 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11527 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11528 but the mpft_t is not, then we underflowed in the
11529 conversion. */
11530 if (force_convert
11531 || (real_isfinite (&re) && real_isfinite (&im)
11532 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11533 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11534 {
11535 REAL_VALUE_TYPE re_mode, im_mode;
11536
11537 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11538 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11539 /* Proceed iff the specified mode can hold the value. */
11540 if (force_convert
11541 || (real_identical (&re_mode, &re)
11542 && real_identical (&im_mode, &im)))
11543 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11544 build_real (TREE_TYPE (type), im_mode));
11545 }
11546 }
11547 return NULL_TREE;
11548 }
11549
11550 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11551 FUNC on it and return the resulting value as a tree with type TYPE.
11552 If MIN and/or MAX are not NULL, then the supplied ARG must be
11553 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11554 acceptable values, otherwise they are not. The mpfr precision is
11555 set to the precision of TYPE. We assume that function FUNC returns
11556 zero if the result could be calculated exactly within the requested
11557 precision. */
11558
11559 static tree
11560 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11561 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11562 bool inclusive)
11563 {
11564 tree result = NULL_TREE;
11565
11566 STRIP_NOPS (arg);
11567
11568 /* To proceed, MPFR must exactly represent the target floating point
11569 format, which only happens when the target base equals two. */
11570 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11571 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
11572 {
11573 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11574
11575 if (real_isfinite (ra)
11576 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11577 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
11578 {
11579 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11580 const int prec = fmt->p;
11581 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11582 int inexact;
11583 mpfr_t m;
11584
11585 mpfr_init2 (m, prec);
11586 mpfr_from_real (m, ra, GMP_RNDN);
11587 mpfr_clear_flags ();
11588 inexact = func (m, m, rnd);
11589 result = do_mpfr_ckconv (m, type, inexact);
11590 mpfr_clear (m);
11591 }
11592 }
11593
11594 return result;
11595 }
11596
11597 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11598 FUNC on it and return the resulting value as a tree with type TYPE.
11599 The mpfr precision is set to the precision of TYPE. We assume that
11600 function FUNC returns zero if the result could be calculated
11601 exactly within the requested precision. */
11602
11603 static tree
11604 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11605 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11606 {
11607 tree result = NULL_TREE;
11608
11609 STRIP_NOPS (arg1);
11610 STRIP_NOPS (arg2);
11611
11612 /* To proceed, MPFR must exactly represent the target floating point
11613 format, which only happens when the target base equals two. */
11614 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11615 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11616 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11617 {
11618 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11619 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11620
11621 if (real_isfinite (ra1) && real_isfinite (ra2))
11622 {
11623 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11624 const int prec = fmt->p;
11625 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11626 int inexact;
11627 mpfr_t m1, m2;
11628
11629 mpfr_inits2 (prec, m1, m2, NULL);
11630 mpfr_from_real (m1, ra1, GMP_RNDN);
11631 mpfr_from_real (m2, ra2, GMP_RNDN);
11632 mpfr_clear_flags ();
11633 inexact = func (m1, m1, m2, rnd);
11634 result = do_mpfr_ckconv (m1, type, inexact);
11635 mpfr_clears (m1, m2, NULL);
11636 }
11637 }
11638
11639 return result;
11640 }
11641
11642 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11643 FUNC on it and return the resulting value as a tree with type TYPE.
11644 The mpfr precision is set to the precision of TYPE. We assume that
11645 function FUNC returns zero if the result could be calculated
11646 exactly within the requested precision. */
11647
11648 static tree
11649 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11650 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11651 {
11652 tree result = NULL_TREE;
11653
11654 STRIP_NOPS (arg1);
11655 STRIP_NOPS (arg2);
11656 STRIP_NOPS (arg3);
11657
11658 /* To proceed, MPFR must exactly represent the target floating point
11659 format, which only happens when the target base equals two. */
11660 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11661 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11662 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11663 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
11664 {
11665 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11666 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11667 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11668
11669 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
11670 {
11671 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11672 const int prec = fmt->p;
11673 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11674 int inexact;
11675 mpfr_t m1, m2, m3;
11676
11677 mpfr_inits2 (prec, m1, m2, m3, NULL);
11678 mpfr_from_real (m1, ra1, GMP_RNDN);
11679 mpfr_from_real (m2, ra2, GMP_RNDN);
11680 mpfr_from_real (m3, ra3, GMP_RNDN);
11681 mpfr_clear_flags ();
11682 inexact = func (m1, m1, m2, m3, rnd);
11683 result = do_mpfr_ckconv (m1, type, inexact);
11684 mpfr_clears (m1, m2, m3, NULL);
11685 }
11686 }
11687
11688 return result;
11689 }
11690
11691 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11692 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11693 If ARG_SINP and ARG_COSP are NULL then the result is returned
11694 as a complex value.
11695 The type is taken from the type of ARG and is used for setting the
11696 precision of the calculation and results. */
11697
11698 static tree
11699 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11700 {
11701 tree const type = TREE_TYPE (arg);
11702 tree result = NULL_TREE;
11703
11704 STRIP_NOPS (arg);
11705
11706 /* To proceed, MPFR must exactly represent the target floating point
11707 format, which only happens when the target base equals two. */
11708 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11709 && TREE_CODE (arg) == REAL_CST
11710 && !TREE_OVERFLOW (arg))
11711 {
11712 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11713
11714 if (real_isfinite (ra))
11715 {
11716 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11717 const int prec = fmt->p;
11718 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11719 tree result_s, result_c;
11720 int inexact;
11721 mpfr_t m, ms, mc;
11722
11723 mpfr_inits2 (prec, m, ms, mc, NULL);
11724 mpfr_from_real (m, ra, GMP_RNDN);
11725 mpfr_clear_flags ();
11726 inexact = mpfr_sin_cos (ms, mc, m, rnd);
11727 result_s = do_mpfr_ckconv (ms, type, inexact);
11728 result_c = do_mpfr_ckconv (mc, type, inexact);
11729 mpfr_clears (m, ms, mc, NULL);
11730 if (result_s && result_c)
11731 {
11732 /* If we are to return in a complex value do so. */
11733 if (!arg_sinp && !arg_cosp)
11734 return build_complex (build_complex_type (type),
11735 result_c, result_s);
11736
11737 /* Dereference the sin/cos pointer arguments. */
11738 arg_sinp = build_fold_indirect_ref (arg_sinp);
11739 arg_cosp = build_fold_indirect_ref (arg_cosp);
11740 /* Proceed if valid pointer type were passed in. */
11741 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
11742 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
11743 {
11744 /* Set the values. */
11745 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
11746 result_s);
11747 TREE_SIDE_EFFECTS (result_s) = 1;
11748 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
11749 result_c);
11750 TREE_SIDE_EFFECTS (result_c) = 1;
11751 /* Combine the assignments into a compound expr. */
11752 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11753 result_s, result_c));
11754 }
11755 }
11756 }
11757 }
11758 return result;
11759 }
11760
11761 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
11762 two-argument mpfr order N Bessel function FUNC on them and return
11763 the resulting value as a tree with type TYPE. The mpfr precision
11764 is set to the precision of TYPE. We assume that function FUNC
11765 returns zero if the result could be calculated exactly within the
11766 requested precision. */
11767 static tree
11768 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
11769 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
11770 const REAL_VALUE_TYPE *min, bool inclusive)
11771 {
11772 tree result = NULL_TREE;
11773
11774 STRIP_NOPS (arg1);
11775 STRIP_NOPS (arg2);
11776
11777 /* To proceed, MPFR must exactly represent the target floating point
11778 format, which only happens when the target base equals two. */
11779 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11780 && tree_fits_shwi_p (arg1)
11781 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11782 {
11783 const HOST_WIDE_INT n = tree_to_shwi (arg1);
11784 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
11785
11786 if (n == (long)n
11787 && real_isfinite (ra)
11788 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
11789 {
11790 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11791 const int prec = fmt->p;
11792 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11793 int inexact;
11794 mpfr_t m;
11795
11796 mpfr_init2 (m, prec);
11797 mpfr_from_real (m, ra, GMP_RNDN);
11798 mpfr_clear_flags ();
11799 inexact = func (m, n, m, rnd);
11800 result = do_mpfr_ckconv (m, type, inexact);
11801 mpfr_clear (m);
11802 }
11803 }
11804
11805 return result;
11806 }
11807
11808 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11809 the pointer *(ARG_QUO) and return the result. The type is taken
11810 from the type of ARG0 and is used for setting the precision of the
11811 calculation and results. */
11812
11813 static tree
11814 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11815 {
11816 tree const type = TREE_TYPE (arg0);
11817 tree result = NULL_TREE;
11818
11819 STRIP_NOPS (arg0);
11820 STRIP_NOPS (arg1);
11821
11822 /* To proceed, MPFR must exactly represent the target floating point
11823 format, which only happens when the target base equals two. */
11824 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11825 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
11826 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
11827 {
11828 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
11829 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
11830
11831 if (real_isfinite (ra0) && real_isfinite (ra1))
11832 {
11833 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11834 const int prec = fmt->p;
11835 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11836 tree result_rem;
11837 long integer_quo;
11838 mpfr_t m0, m1;
11839
11840 mpfr_inits2 (prec, m0, m1, NULL);
11841 mpfr_from_real (m0, ra0, GMP_RNDN);
11842 mpfr_from_real (m1, ra1, GMP_RNDN);
11843 mpfr_clear_flags ();
11844 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
11845 /* Remquo is independent of the rounding mode, so pass
11846 inexact=0 to do_mpfr_ckconv(). */
11847 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
11848 mpfr_clears (m0, m1, NULL);
11849 if (result_rem)
11850 {
11851 /* MPFR calculates quo in the host's long so it may
11852 return more bits in quo than the target int can hold
11853 if sizeof(host long) > sizeof(target int). This can
11854 happen even for native compilers in LP64 mode. In
11855 these cases, modulo the quo value with the largest
11856 number that the target int can hold while leaving one
11857 bit for the sign. */
11858 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
11859 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
11860
11861 /* Dereference the quo pointer argument. */
11862 arg_quo = build_fold_indirect_ref (arg_quo);
11863 /* Proceed iff a valid pointer type was passed in. */
11864 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
11865 {
11866 /* Set the value. */
11867 tree result_quo
11868 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
11869 build_int_cst (TREE_TYPE (arg_quo),
11870 integer_quo));
11871 TREE_SIDE_EFFECTS (result_quo) = 1;
11872 /* Combine the quo assignment with the rem. */
11873 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11874 result_quo, result_rem));
11875 }
11876 }
11877 }
11878 }
11879 return result;
11880 }
11881
11882 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11883 resulting value as a tree with type TYPE. The mpfr precision is
11884 set to the precision of TYPE. We assume that this mpfr function
11885 returns zero if the result could be calculated exactly within the
11886 requested precision. In addition, the integer pointer represented
11887 by ARG_SG will be dereferenced and set to the appropriate signgam
11888 (-1,1) value. */
11889
11890 static tree
11891 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11892 {
11893 tree result = NULL_TREE;
11894
11895 STRIP_NOPS (arg);
11896
11897 /* To proceed, MPFR must exactly represent the target floating point
11898 format, which only happens when the target base equals two. Also
11899 verify ARG is a constant and that ARG_SG is an int pointer. */
11900 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11901 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11902 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11903 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11904 {
11905 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11906
11907 /* In addition to NaN and Inf, the argument cannot be zero or a
11908 negative integer. */
11909 if (real_isfinite (ra)
11910 && ra->cl != rvc_zero
11911 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
11912 {
11913 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11914 const int prec = fmt->p;
11915 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11916 int inexact, sg;
11917 mpfr_t m;
11918 tree result_lg;
11919
11920 mpfr_init2 (m, prec);
11921 mpfr_from_real (m, ra, GMP_RNDN);
11922 mpfr_clear_flags ();
11923 inexact = mpfr_lgamma (m, &sg, m, rnd);
11924 result_lg = do_mpfr_ckconv (m, type, inexact);
11925 mpfr_clear (m);
11926 if (result_lg)
11927 {
11928 tree result_sg;
11929
11930 /* Dereference the arg_sg pointer argument. */
11931 arg_sg = build_fold_indirect_ref (arg_sg);
11932 /* Assign the signgam value into *arg_sg. */
11933 result_sg = fold_build2 (MODIFY_EXPR,
11934 TREE_TYPE (arg_sg), arg_sg,
11935 build_int_cst (TREE_TYPE (arg_sg), sg));
11936 TREE_SIDE_EFFECTS (result_sg) = 1;
11937 /* Combine the signgam assignment with the lgamma result. */
11938 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11939 result_sg, result_lg));
11940 }
11941 }
11942 }
11943
11944 return result;
11945 }
11946
11947 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
11948 function FUNC on it and return the resulting value as a tree with
11949 type TYPE. The mpfr precision is set to the precision of TYPE. We
11950 assume that function FUNC returns zero if the result could be
11951 calculated exactly within the requested precision. */
11952
11953 static tree
11954 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
11955 {
11956 tree result = NULL_TREE;
11957
11958 STRIP_NOPS (arg);
11959
11960 /* To proceed, MPFR must exactly represent the target floating point
11961 format, which only happens when the target base equals two. */
11962 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
11963 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
11964 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
11965 {
11966 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
11967 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
11968
11969 if (real_isfinite (re) && real_isfinite (im))
11970 {
11971 const struct real_format *const fmt =
11972 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11973 const int prec = fmt->p;
11974 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11975 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11976 int inexact;
11977 mpc_t m;
11978
11979 mpc_init2 (m, prec);
11980 mpfr_from_real (mpc_realref (m), re, rnd);
11981 mpfr_from_real (mpc_imagref (m), im, rnd);
11982 mpfr_clear_flags ();
11983 inexact = func (m, m, crnd);
11984 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
11985 mpc_clear (m);
11986 }
11987 }
11988
11989 return result;
11990 }
11991
11992 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11993 mpc function FUNC on it and return the resulting value as a tree
11994 with type TYPE. The mpfr precision is set to the precision of
11995 TYPE. We assume that function FUNC returns zero if the result
11996 could be calculated exactly within the requested precision. If
11997 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11998 in the arguments and/or results. */
11999
12000 tree
12001 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12002 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12003 {
12004 tree result = NULL_TREE;
12005
12006 STRIP_NOPS (arg0);
12007 STRIP_NOPS (arg1);
12008
12009 /* To proceed, MPFR must exactly represent the target floating point
12010 format, which only happens when the target base equals two. */
12011 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12012 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12013 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12014 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12015 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12016 {
12017 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12018 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12019 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12020 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12021
12022 if (do_nonfinite
12023 || (real_isfinite (re0) && real_isfinite (im0)
12024 && real_isfinite (re1) && real_isfinite (im1)))
12025 {
12026 const struct real_format *const fmt =
12027 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12028 const int prec = fmt->p;
12029 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12030 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12031 int inexact;
12032 mpc_t m0, m1;
12033
12034 mpc_init2 (m0, prec);
12035 mpc_init2 (m1, prec);
12036 mpfr_from_real (mpc_realref (m0), re0, rnd);
12037 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12038 mpfr_from_real (mpc_realref (m1), re1, rnd);
12039 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12040 mpfr_clear_flags ();
12041 inexact = func (m0, m0, m1, crnd);
12042 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12043 mpc_clear (m0);
12044 mpc_clear (m1);
12045 }
12046 }
12047
12048 return result;
12049 }
12050
12051 /* A wrapper function for builtin folding that prevents warnings for
12052 "statement without effect" and the like, caused by removing the
12053 call node earlier than the warning is generated. */
12054
12055 tree
12056 fold_call_stmt (gcall *stmt, bool ignore)
12057 {
12058 tree ret = NULL_TREE;
12059 tree fndecl = gimple_call_fndecl (stmt);
12060 location_t loc = gimple_location (stmt);
12061 if (fndecl
12062 && TREE_CODE (fndecl) == FUNCTION_DECL
12063 && DECL_BUILT_IN (fndecl)
12064 && !gimple_call_va_arg_pack_p (stmt))
12065 {
12066 int nargs = gimple_call_num_args (stmt);
12067 tree *args = (nargs > 0
12068 ? gimple_call_arg_ptr (stmt, 0)
12069 : &error_mark_node);
12070
12071 if (avoid_folding_inline_builtin (fndecl))
12072 return NULL_TREE;
12073 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12074 {
12075 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12076 }
12077 else
12078 {
12079 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12080 if (ret)
12081 {
12082 /* Propagate location information from original call to
12083 expansion of builtin. Otherwise things like
12084 maybe_emit_chk_warning, that operate on the expansion
12085 of a builtin, will use the wrong location information. */
12086 if (gimple_has_location (stmt))
12087 {
12088 tree realret = ret;
12089 if (TREE_CODE (ret) == NOP_EXPR)
12090 realret = TREE_OPERAND (ret, 0);
12091 if (CAN_HAVE_LOCATION_P (realret)
12092 && !EXPR_HAS_LOCATION (realret))
12093 SET_EXPR_LOCATION (realret, loc);
12094 return realret;
12095 }
12096 return ret;
12097 }
12098 }
12099 }
12100 return NULL_TREE;
12101 }
12102
12103 /* Look up the function in builtin_decl that corresponds to DECL
12104 and set ASMSPEC as its user assembler name. DECL must be a
12105 function decl that declares a builtin. */
12106
12107 void
12108 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12109 {
12110 tree builtin;
12111 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12112 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12113 && asmspec != 0);
12114
12115 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12116 set_user_assembler_name (builtin, asmspec);
12117 switch (DECL_FUNCTION_CODE (decl))
12118 {
12119 case BUILT_IN_MEMCPY:
12120 init_block_move_fn (asmspec);
12121 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12122 break;
12123 case BUILT_IN_MEMSET:
12124 init_block_clear_fn (asmspec);
12125 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12126 break;
12127 case BUILT_IN_MEMMOVE:
12128 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12129 break;
12130 case BUILT_IN_MEMCMP:
12131 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12132 break;
12133 case BUILT_IN_ABORT:
12134 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12135 break;
12136 case BUILT_IN_FFS:
12137 if (INT_TYPE_SIZE < BITS_PER_WORD)
12138 {
12139 set_user_assembler_libfunc ("ffs", asmspec);
12140 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12141 MODE_INT, 0), "ffs");
12142 }
12143 break;
12144 default:
12145 break;
12146 }
12147 }
12148
12149 /* Return true if DECL is a builtin that expands to a constant or similarly
12150 simple code. */
12151 bool
12152 is_simple_builtin (tree decl)
12153 {
12154 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12155 switch (DECL_FUNCTION_CODE (decl))
12156 {
12157 /* Builtins that expand to constants. */
12158 case BUILT_IN_CONSTANT_P:
12159 case BUILT_IN_EXPECT:
12160 case BUILT_IN_OBJECT_SIZE:
12161 case BUILT_IN_UNREACHABLE:
12162 /* Simple register moves or loads from stack. */
12163 case BUILT_IN_ASSUME_ALIGNED:
12164 case BUILT_IN_RETURN_ADDRESS:
12165 case BUILT_IN_EXTRACT_RETURN_ADDR:
12166 case BUILT_IN_FROB_RETURN_ADDR:
12167 case BUILT_IN_RETURN:
12168 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12169 case BUILT_IN_FRAME_ADDRESS:
12170 case BUILT_IN_VA_END:
12171 case BUILT_IN_STACK_SAVE:
12172 case BUILT_IN_STACK_RESTORE:
12173 /* Exception state returns or moves registers around. */
12174 case BUILT_IN_EH_FILTER:
12175 case BUILT_IN_EH_POINTER:
12176 case BUILT_IN_EH_COPY_VALUES:
12177 return true;
12178
12179 default:
12180 return false;
12181 }
12182
12183 return false;
12184 }
12185
12186 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12187 most probably expanded inline into reasonably simple code. This is a
12188 superset of is_simple_builtin. */
12189 bool
12190 is_inexpensive_builtin (tree decl)
12191 {
12192 if (!decl)
12193 return false;
12194 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12195 return true;
12196 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12197 switch (DECL_FUNCTION_CODE (decl))
12198 {
12199 case BUILT_IN_ABS:
12200 case BUILT_IN_ALLOCA:
12201 case BUILT_IN_ALLOCA_WITH_ALIGN:
12202 case BUILT_IN_BSWAP16:
12203 case BUILT_IN_BSWAP32:
12204 case BUILT_IN_BSWAP64:
12205 case BUILT_IN_CLZ:
12206 case BUILT_IN_CLZIMAX:
12207 case BUILT_IN_CLZL:
12208 case BUILT_IN_CLZLL:
12209 case BUILT_IN_CTZ:
12210 case BUILT_IN_CTZIMAX:
12211 case BUILT_IN_CTZL:
12212 case BUILT_IN_CTZLL:
12213 case BUILT_IN_FFS:
12214 case BUILT_IN_FFSIMAX:
12215 case BUILT_IN_FFSL:
12216 case BUILT_IN_FFSLL:
12217 case BUILT_IN_IMAXABS:
12218 case BUILT_IN_FINITE:
12219 case BUILT_IN_FINITEF:
12220 case BUILT_IN_FINITEL:
12221 case BUILT_IN_FINITED32:
12222 case BUILT_IN_FINITED64:
12223 case BUILT_IN_FINITED128:
12224 case BUILT_IN_FPCLASSIFY:
12225 case BUILT_IN_ISFINITE:
12226 case BUILT_IN_ISINF_SIGN:
12227 case BUILT_IN_ISINF:
12228 case BUILT_IN_ISINFF:
12229 case BUILT_IN_ISINFL:
12230 case BUILT_IN_ISINFD32:
12231 case BUILT_IN_ISINFD64:
12232 case BUILT_IN_ISINFD128:
12233 case BUILT_IN_ISNAN:
12234 case BUILT_IN_ISNANF:
12235 case BUILT_IN_ISNANL:
12236 case BUILT_IN_ISNAND32:
12237 case BUILT_IN_ISNAND64:
12238 case BUILT_IN_ISNAND128:
12239 case BUILT_IN_ISNORMAL:
12240 case BUILT_IN_ISGREATER:
12241 case BUILT_IN_ISGREATEREQUAL:
12242 case BUILT_IN_ISLESS:
12243 case BUILT_IN_ISLESSEQUAL:
12244 case BUILT_IN_ISLESSGREATER:
12245 case BUILT_IN_ISUNORDERED:
12246 case BUILT_IN_VA_ARG_PACK:
12247 case BUILT_IN_VA_ARG_PACK_LEN:
12248 case BUILT_IN_VA_COPY:
12249 case BUILT_IN_TRAP:
12250 case BUILT_IN_SAVEREGS:
12251 case BUILT_IN_POPCOUNTL:
12252 case BUILT_IN_POPCOUNTLL:
12253 case BUILT_IN_POPCOUNTIMAX:
12254 case BUILT_IN_POPCOUNT:
12255 case BUILT_IN_PARITYL:
12256 case BUILT_IN_PARITYLL:
12257 case BUILT_IN_PARITYIMAX:
12258 case BUILT_IN_PARITY:
12259 case BUILT_IN_LABS:
12260 case BUILT_IN_LLABS:
12261 case BUILT_IN_PREFETCH:
12262 case BUILT_IN_ACC_ON_DEVICE:
12263 return true;
12264
12265 default:
12266 return is_simple_builtin (decl);
12267 }
12268
12269 return false;
12270 }