Move sqrt and cbrt simplifications to match.pd
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "predict.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "rtl.h"
32 #include "alias.h"
33 #include "fold-const.h"
34 #include "stringpool.h"
35 #include "stor-layout.h"
36 #include "calls.h"
37 #include "varasm.h"
38 #include "tree-object-size.h"
39 #include "realmpfr.h"
40 #include "cfgrtl.h"
41 #include "internal-fn.h"
42 #include "flags.h"
43 #include "regs.h"
44 #include "except.h"
45 #include "insn-config.h"
46 #include "expmed.h"
47 #include "dojump.h"
48 #include "explow.h"
49 #include "emit-rtl.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "insn-codes.h"
53 #include "optabs.h"
54 #include "libfuncs.h"
55 #include "recog.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "tm_p.h"
59 #include "target.h"
60 #include "langhooks.h"
61 #include "tree-ssanames.h"
62 #include "tree-dfa.h"
63 #include "value-prof.h"
64 #include "diagnostic-core.h"
65 #include "builtins.h"
66 #include "asan.h"
67 #include "cilk.h"
68 #include "cgraph.h"
69 #include "tree-chkp.h"
70 #include "rtl-chkp.h"
71
72
73 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
74
75 struct target_builtins default_target_builtins;
76 #if SWITCHABLE_TARGET
77 struct target_builtins *this_target_builtins = &default_target_builtins;
78 #endif
79
80 /* Define the names of the builtin function types and codes. */
81 const char *const built_in_class_names[BUILT_IN_LAST]
82 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83
84 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
85 const char * built_in_names[(int) END_BUILTINS] =
86 {
87 #include "builtins.def"
88 };
89 #undef DEF_BUILTIN
90
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
94
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
97
98 static rtx c_readstr (const char *, machine_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static void expand_errno_check (tree, rtx);
112 static rtx expand_builtin_mathfn (tree, rtx, rtx);
113 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
114 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
115 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
116 static rtx expand_builtin_interclass_mathfn (tree, rtx);
117 static rtx expand_builtin_sincos (tree);
118 static rtx expand_builtin_cexpi (tree, rtx);
119 static rtx expand_builtin_int_roundingfn (tree, rtx);
120 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
121 static rtx expand_builtin_next_arg (void);
122 static rtx expand_builtin_va_start (tree);
123 static rtx expand_builtin_va_end (tree);
124 static rtx expand_builtin_va_copy (tree);
125 static rtx expand_builtin_strcmp (tree, rtx);
126 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
127 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
128 static rtx expand_builtin_memcpy (tree, rtx);
129 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
130 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
131 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
132 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
133 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
134 machine_mode, int, tree);
135 static rtx expand_builtin_strcpy (tree, rtx);
136 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
137 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
138 static rtx expand_builtin_strncpy (tree, rtx);
139 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
140 static rtx expand_builtin_memset (tree, rtx, machine_mode);
141 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_alloca (tree, bool);
146 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static tree stabilize_va_list_loc (location_t, tree, int);
149 static rtx expand_builtin_expect (tree, rtx);
150 static tree fold_builtin_constant_p (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (location_t, tree, tree);
153 static tree fold_builtin_inf (location_t, tree, int);
154 static tree fold_builtin_nan (tree, tree, int);
155 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
156 static bool validate_arg (const_tree, enum tree_code code);
157 static bool integer_valued_real_p (tree);
158 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
159 static rtx expand_builtin_fabs (tree, rtx, rtx);
160 static rtx expand_builtin_signbit (tree, rtx);
161 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
163 static tree fold_builtin_cos (location_t, tree, tree, tree);
164 static tree fold_builtin_cosh (location_t, tree, tree, tree);
165 static tree fold_builtin_tan (tree, tree);
166 static tree fold_builtin_trunc (location_t, tree, tree);
167 static tree fold_builtin_floor (location_t, tree, tree);
168 static tree fold_builtin_ceil (location_t, tree, tree);
169 static tree fold_builtin_round (location_t, tree, tree);
170 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
171 static tree fold_builtin_bitop (tree, tree);
172 static tree fold_builtin_strchr (location_t, tree, tree, tree);
173 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
174 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
175 static tree fold_builtin_strcmp (location_t, tree, tree);
176 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
177 static tree fold_builtin_signbit (location_t, tree, tree);
178 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
179 static tree fold_builtin_isascii (location_t, tree);
180 static tree fold_builtin_toascii (location_t, tree);
181 static tree fold_builtin_isdigit (location_t, tree);
182 static tree fold_builtin_fabs (location_t, tree, tree);
183 static tree fold_builtin_abs (location_t, tree, tree);
184 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
185 enum tree_code);
186 static tree fold_builtin_0 (location_t, tree);
187 static tree fold_builtin_1 (location_t, tree, tree);
188 static tree fold_builtin_2 (location_t, tree, tree, tree);
189 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
190 static tree fold_builtin_varargs (location_t, tree, tree*, int);
191
192 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
193 static tree fold_builtin_strstr (location_t, tree, tree, tree);
194 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
195 static tree fold_builtin_strspn (location_t, tree, tree);
196 static tree fold_builtin_strcspn (location_t, tree, tree);
197
198 static rtx expand_builtin_object_size (tree);
199 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
200 enum built_in_function);
201 static void maybe_emit_chk_warning (tree, enum built_in_function);
202 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
203 static void maybe_emit_free_warning (tree);
204 static tree fold_builtin_object_size (tree, tree);
205
206 unsigned HOST_WIDE_INT target_newline;
207 unsigned HOST_WIDE_INT target_percent;
208 static unsigned HOST_WIDE_INT target_c;
209 static unsigned HOST_WIDE_INT target_s;
210 char target_percent_c[3];
211 char target_percent_s[3];
212 char target_percent_s_newline[4];
213 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
214 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
215 static tree do_mpfr_arg2 (tree, tree, tree,
216 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
217 static tree do_mpfr_arg3 (tree, tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_sincos (tree, tree, tree);
220 static tree do_mpfr_bessel_n (tree, tree, tree,
221 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
222 const REAL_VALUE_TYPE *, bool);
223 static tree do_mpfr_remquo (tree, tree, tree);
224 static tree do_mpfr_lgamma_r (tree, tree, tree);
225 static void expand_builtin_sync_synchronize (void);
226
227 /* Return true if NAME starts with __builtin_ or __sync_. */
228
229 static bool
230 is_builtin_name (const char *name)
231 {
232 if (strncmp (name, "__builtin_", 10) == 0)
233 return true;
234 if (strncmp (name, "__sync_", 7) == 0)
235 return true;
236 if (strncmp (name, "__atomic_", 9) == 0)
237 return true;
238 if (flag_cilkplus
239 && (!strcmp (name, "__cilkrts_detach")
240 || !strcmp (name, "__cilkrts_pop_frame")))
241 return true;
242 return false;
243 }
244
245
246 /* Return true if DECL is a function symbol representing a built-in. */
247
248 bool
249 is_builtin_fn (tree decl)
250 {
251 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
252 }
253
254 /* Return true if NODE should be considered for inline expansion regardless
255 of the optimization level. This means whenever a function is invoked with
256 its "internal" name, which normally contains the prefix "__builtin". */
257
258 static bool
259 called_as_built_in (tree node)
260 {
261 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
262 we want the name used to call the function, not the name it
263 will have. */
264 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
265 return is_builtin_name (name);
266 }
267
268 /* Compute values M and N such that M divides (address of EXP - N) and such
269 that N < M. If these numbers can be determined, store M in alignp and N in
270 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
271 *alignp and any bit-offset to *bitposp.
272
273 Note that the address (and thus the alignment) computed here is based
274 on the address to which a symbol resolves, whereas DECL_ALIGN is based
275 on the address at which an object is actually located. These two
276 addresses are not always the same. For example, on ARM targets,
277 the address &foo of a Thumb function foo() has the lowest bit set,
278 whereas foo() itself starts on an even address.
279
280 If ADDR_P is true we are taking the address of the memory reference EXP
281 and thus cannot rely on the access taking place. */
282
283 static bool
284 get_object_alignment_2 (tree exp, unsigned int *alignp,
285 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
286 {
287 HOST_WIDE_INT bitsize, bitpos;
288 tree offset;
289 machine_mode mode;
290 int unsignedp, volatilep;
291 unsigned int align = BITS_PER_UNIT;
292 bool known_alignment = false;
293
294 /* Get the innermost object and the constant (bitpos) and possibly
295 variable (offset) offset of the access. */
296 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
297 &mode, &unsignedp, &volatilep, true);
298
299 /* Extract alignment information from the innermost object and
300 possibly adjust bitpos and offset. */
301 if (TREE_CODE (exp) == FUNCTION_DECL)
302 {
303 /* Function addresses can encode extra information besides their
304 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
305 allows the low bit to be used as a virtual bit, we know
306 that the address itself must be at least 2-byte aligned. */
307 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
308 align = 2 * BITS_PER_UNIT;
309 }
310 else if (TREE_CODE (exp) == LABEL_DECL)
311 ;
312 else if (TREE_CODE (exp) == CONST_DECL)
313 {
314 /* The alignment of a CONST_DECL is determined by its initializer. */
315 exp = DECL_INITIAL (exp);
316 align = TYPE_ALIGN (TREE_TYPE (exp));
317 if (CONSTANT_CLASS_P (exp))
318 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
319
320 known_alignment = true;
321 }
322 else if (DECL_P (exp))
323 {
324 align = DECL_ALIGN (exp);
325 known_alignment = true;
326 }
327 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
328 {
329 align = TYPE_ALIGN (TREE_TYPE (exp));
330 }
331 else if (TREE_CODE (exp) == INDIRECT_REF
332 || TREE_CODE (exp) == MEM_REF
333 || TREE_CODE (exp) == TARGET_MEM_REF)
334 {
335 tree addr = TREE_OPERAND (exp, 0);
336 unsigned ptr_align;
337 unsigned HOST_WIDE_INT ptr_bitpos;
338 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
339
340 /* If the address is explicitely aligned, handle that. */
341 if (TREE_CODE (addr) == BIT_AND_EXPR
342 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
343 {
344 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
345 ptr_bitmask *= BITS_PER_UNIT;
346 align = ptr_bitmask & -ptr_bitmask;
347 addr = TREE_OPERAND (addr, 0);
348 }
349
350 known_alignment
351 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
352 align = MAX (ptr_align, align);
353
354 /* Re-apply explicit alignment to the bitpos. */
355 ptr_bitpos &= ptr_bitmask;
356
357 /* The alignment of the pointer operand in a TARGET_MEM_REF
358 has to take the variable offset parts into account. */
359 if (TREE_CODE (exp) == TARGET_MEM_REF)
360 {
361 if (TMR_INDEX (exp))
362 {
363 unsigned HOST_WIDE_INT step = 1;
364 if (TMR_STEP (exp))
365 step = TREE_INT_CST_LOW (TMR_STEP (exp));
366 align = MIN (align, (step & -step) * BITS_PER_UNIT);
367 }
368 if (TMR_INDEX2 (exp))
369 align = BITS_PER_UNIT;
370 known_alignment = false;
371 }
372
373 /* When EXP is an actual memory reference then we can use
374 TYPE_ALIGN of a pointer indirection to derive alignment.
375 Do so only if get_pointer_alignment_1 did not reveal absolute
376 alignment knowledge and if using that alignment would
377 improve the situation. */
378 if (!addr_p && !known_alignment
379 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
380 align = TYPE_ALIGN (TREE_TYPE (exp));
381 else
382 {
383 /* Else adjust bitpos accordingly. */
384 bitpos += ptr_bitpos;
385 if (TREE_CODE (exp) == MEM_REF
386 || TREE_CODE (exp) == TARGET_MEM_REF)
387 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
388 }
389 }
390 else if (TREE_CODE (exp) == STRING_CST)
391 {
392 /* STRING_CST are the only constant objects we allow to be not
393 wrapped inside a CONST_DECL. */
394 align = TYPE_ALIGN (TREE_TYPE (exp));
395 if (CONSTANT_CLASS_P (exp))
396 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
397
398 known_alignment = true;
399 }
400
401 /* If there is a non-constant offset part extract the maximum
402 alignment that can prevail. */
403 if (offset)
404 {
405 unsigned int trailing_zeros = tree_ctz (offset);
406 if (trailing_zeros < HOST_BITS_PER_INT)
407 {
408 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
409 if (inner)
410 align = MIN (align, inner);
411 }
412 }
413
414 *alignp = align;
415 *bitposp = bitpos & (*alignp - 1);
416 return known_alignment;
417 }
418
419 /* For a memory reference expression EXP compute values M and N such that M
420 divides (&EXP - N) and such that N < M. If these numbers can be determined,
421 store M in alignp and N in *BITPOSP and return true. Otherwise return false
422 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
423
424 bool
425 get_object_alignment_1 (tree exp, unsigned int *alignp,
426 unsigned HOST_WIDE_INT *bitposp)
427 {
428 return get_object_alignment_2 (exp, alignp, bitposp, false);
429 }
430
431 /* Return the alignment in bits of EXP, an object. */
432
433 unsigned int
434 get_object_alignment (tree exp)
435 {
436 unsigned HOST_WIDE_INT bitpos = 0;
437 unsigned int align;
438
439 get_object_alignment_1 (exp, &align, &bitpos);
440
441 /* align and bitpos now specify known low bits of the pointer.
442 ptr & (align - 1) == bitpos. */
443
444 if (bitpos != 0)
445 align = (bitpos & -bitpos);
446 return align;
447 }
448
449 /* For a pointer valued expression EXP compute values M and N such that M
450 divides (EXP - N) and such that N < M. If these numbers can be determined,
451 store M in alignp and N in *BITPOSP and return true. Return false if
452 the results are just a conservative approximation.
453
454 If EXP is not a pointer, false is returned too. */
455
456 bool
457 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
458 unsigned HOST_WIDE_INT *bitposp)
459 {
460 STRIP_NOPS (exp);
461
462 if (TREE_CODE (exp) == ADDR_EXPR)
463 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
464 alignp, bitposp, true);
465 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
466 {
467 unsigned int align;
468 unsigned HOST_WIDE_INT bitpos;
469 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
470 &align, &bitpos);
471 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
472 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
473 else
474 {
475 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
476 if (trailing_zeros < HOST_BITS_PER_INT)
477 {
478 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
479 if (inner)
480 align = MIN (align, inner);
481 }
482 }
483 *alignp = align;
484 *bitposp = bitpos & (align - 1);
485 return res;
486 }
487 else if (TREE_CODE (exp) == SSA_NAME
488 && POINTER_TYPE_P (TREE_TYPE (exp)))
489 {
490 unsigned int ptr_align, ptr_misalign;
491 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
492
493 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
494 {
495 *bitposp = ptr_misalign * BITS_PER_UNIT;
496 *alignp = ptr_align * BITS_PER_UNIT;
497 /* We cannot really tell whether this result is an approximation. */
498 return true;
499 }
500 else
501 {
502 *bitposp = 0;
503 *alignp = BITS_PER_UNIT;
504 return false;
505 }
506 }
507 else if (TREE_CODE (exp) == INTEGER_CST)
508 {
509 *alignp = BIGGEST_ALIGNMENT;
510 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
511 & (BIGGEST_ALIGNMENT - 1));
512 return true;
513 }
514
515 *bitposp = 0;
516 *alignp = BITS_PER_UNIT;
517 return false;
518 }
519
520 /* Return the alignment in bits of EXP, a pointer valued expression.
521 The alignment returned is, by default, the alignment of the thing that
522 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
523
524 Otherwise, look at the expression to see if we can do better, i.e., if the
525 expression is actually pointing at an object whose alignment is tighter. */
526
527 unsigned int
528 get_pointer_alignment (tree exp)
529 {
530 unsigned HOST_WIDE_INT bitpos = 0;
531 unsigned int align;
532
533 get_pointer_alignment_1 (exp, &align, &bitpos);
534
535 /* align and bitpos now specify known low bits of the pointer.
536 ptr & (align - 1) == bitpos. */
537
538 if (bitpos != 0)
539 align = (bitpos & -bitpos);
540
541 return align;
542 }
543
544 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
545 way, because it could contain a zero byte in the middle.
546 TREE_STRING_LENGTH is the size of the character array, not the string.
547
548 ONLY_VALUE should be nonzero if the result is not going to be emitted
549 into the instruction stream and zero if it is going to be expanded.
550 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
551 is returned, otherwise NULL, since
552 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
553 evaluate the side-effects.
554
555 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
556 accesses. Note that this implies the result is not going to be emitted
557 into the instruction stream.
558
559 The value returned is of type `ssizetype'.
560
561 Unfortunately, string_constant can't access the values of const char
562 arrays with initializers, so neither can we do so here. */
563
564 tree
565 c_strlen (tree src, int only_value)
566 {
567 tree offset_node;
568 HOST_WIDE_INT offset;
569 int max;
570 const char *ptr;
571 location_t loc;
572
573 STRIP_NOPS (src);
574 if (TREE_CODE (src) == COND_EXPR
575 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
576 {
577 tree len1, len2;
578
579 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
580 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
581 if (tree_int_cst_equal (len1, len2))
582 return len1;
583 }
584
585 if (TREE_CODE (src) == COMPOUND_EXPR
586 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
587 return c_strlen (TREE_OPERAND (src, 1), only_value);
588
589 loc = EXPR_LOC_OR_LOC (src, input_location);
590
591 src = string_constant (src, &offset_node);
592 if (src == 0)
593 return NULL_TREE;
594
595 max = TREE_STRING_LENGTH (src) - 1;
596 ptr = TREE_STRING_POINTER (src);
597
598 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
599 {
600 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
601 compute the offset to the following null if we don't know where to
602 start searching for it. */
603 int i;
604
605 for (i = 0; i < max; i++)
606 if (ptr[i] == 0)
607 return NULL_TREE;
608
609 /* We don't know the starting offset, but we do know that the string
610 has no internal zero bytes. We can assume that the offset falls
611 within the bounds of the string; otherwise, the programmer deserves
612 what he gets. Subtract the offset from the length of the string,
613 and return that. This would perhaps not be valid if we were dealing
614 with named arrays in addition to literal string constants. */
615
616 return size_diffop_loc (loc, size_int (max), offset_node);
617 }
618
619 /* We have a known offset into the string. Start searching there for
620 a null character if we can represent it as a single HOST_WIDE_INT. */
621 if (offset_node == 0)
622 offset = 0;
623 else if (! tree_fits_shwi_p (offset_node))
624 offset = -1;
625 else
626 offset = tree_to_shwi (offset_node);
627
628 /* If the offset is known to be out of bounds, warn, and call strlen at
629 runtime. */
630 if (offset < 0 || offset > max)
631 {
632 /* Suppress multiple warnings for propagated constant strings. */
633 if (only_value != 2
634 && !TREE_NO_WARNING (src))
635 {
636 warning_at (loc, 0, "offset outside bounds of constant string");
637 TREE_NO_WARNING (src) = 1;
638 }
639 return NULL_TREE;
640 }
641
642 /* Use strlen to search for the first zero byte. Since any strings
643 constructed with build_string will have nulls appended, we win even
644 if we get handed something like (char[4])"abcd".
645
646 Since OFFSET is our starting index into the string, no further
647 calculation is needed. */
648 return ssize_int (strlen (ptr + offset));
649 }
650
651 /* Return a char pointer for a C string if it is a string constant
652 or sum of string constant and integer constant. */
653
654 const char *
655 c_getstr (tree src)
656 {
657 tree offset_node;
658
659 src = string_constant (src, &offset_node);
660 if (src == 0)
661 return 0;
662
663 if (offset_node == 0)
664 return TREE_STRING_POINTER (src);
665 else if (!tree_fits_uhwi_p (offset_node)
666 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
667 return 0;
668
669 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
670 }
671
672 /* Return a constant integer corresponding to target reading
673 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
674
675 static rtx
676 c_readstr (const char *str, machine_mode mode)
677 {
678 HOST_WIDE_INT ch;
679 unsigned int i, j;
680 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
681
682 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
683 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
684 / HOST_BITS_PER_WIDE_INT;
685
686 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
687 for (i = 0; i < len; i++)
688 tmp[i] = 0;
689
690 ch = 1;
691 for (i = 0; i < GET_MODE_SIZE (mode); i++)
692 {
693 j = i;
694 if (WORDS_BIG_ENDIAN)
695 j = GET_MODE_SIZE (mode) - i - 1;
696 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
697 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
698 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
699 j *= BITS_PER_UNIT;
700
701 if (ch)
702 ch = (unsigned char) str[i];
703 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
704 }
705
706 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
707 return immed_wide_int_const (c, mode);
708 }
709
710 /* Cast a target constant CST to target CHAR and if that value fits into
711 host char type, return zero and put that value into variable pointed to by
712 P. */
713
714 static int
715 target_char_cast (tree cst, char *p)
716 {
717 unsigned HOST_WIDE_INT val, hostval;
718
719 if (TREE_CODE (cst) != INTEGER_CST
720 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
721 return 1;
722
723 /* Do not care if it fits or not right here. */
724 val = TREE_INT_CST_LOW (cst);
725
726 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
727 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
728
729 hostval = val;
730 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
731 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
732
733 if (val != hostval)
734 return 1;
735
736 *p = hostval;
737 return 0;
738 }
739
740 /* Similar to save_expr, but assumes that arbitrary code is not executed
741 in between the multiple evaluations. In particular, we assume that a
742 non-addressable local variable will not be modified. */
743
744 static tree
745 builtin_save_expr (tree exp)
746 {
747 if (TREE_CODE (exp) == SSA_NAME
748 || (TREE_ADDRESSABLE (exp) == 0
749 && (TREE_CODE (exp) == PARM_DECL
750 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
751 return exp;
752
753 return save_expr (exp);
754 }
755
756 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
757 times to get the address of either a higher stack frame, or a return
758 address located within it (depending on FNDECL_CODE). */
759
760 static rtx
761 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
762 {
763 int i;
764
765 #ifdef INITIAL_FRAME_ADDRESS_RTX
766 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
767 #else
768 rtx tem;
769
770 /* For a zero count with __builtin_return_address, we don't care what
771 frame address we return, because target-specific definitions will
772 override us. Therefore frame pointer elimination is OK, and using
773 the soft frame pointer is OK.
774
775 For a nonzero count, or a zero count with __builtin_frame_address,
776 we require a stable offset from the current frame pointer to the
777 previous one, so we must use the hard frame pointer, and
778 we must disable frame pointer elimination. */
779 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
780 tem = frame_pointer_rtx;
781 else
782 {
783 tem = hard_frame_pointer_rtx;
784
785 /* Tell reload not to eliminate the frame pointer. */
786 crtl->accesses_prior_frames = 1;
787 }
788 #endif
789
790 /* Some machines need special handling before we can access
791 arbitrary frames. For example, on the SPARC, we must first flush
792 all register windows to the stack. */
793 #ifdef SETUP_FRAME_ADDRESSES
794 if (count > 0)
795 SETUP_FRAME_ADDRESSES ();
796 #endif
797
798 /* On the SPARC, the return address is not in the frame, it is in a
799 register. There is no way to access it off of the current frame
800 pointer, but it can be accessed off the previous frame pointer by
801 reading the value from the register window save area. */
802 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
803 count--;
804
805 /* Scan back COUNT frames to the specified frame. */
806 for (i = 0; i < count; i++)
807 {
808 /* Assume the dynamic chain pointer is in the word that the
809 frame address points to, unless otherwise specified. */
810 #ifdef DYNAMIC_CHAIN_ADDRESS
811 tem = DYNAMIC_CHAIN_ADDRESS (tem);
812 #endif
813 tem = memory_address (Pmode, tem);
814 tem = gen_frame_mem (Pmode, tem);
815 tem = copy_to_reg (tem);
816 }
817
818 /* For __builtin_frame_address, return what we've got. But, on
819 the SPARC for example, we may have to add a bias. */
820 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
821 #ifdef FRAME_ADDR_RTX
822 return FRAME_ADDR_RTX (tem);
823 #else
824 return tem;
825 #endif
826
827 /* For __builtin_return_address, get the return address from that frame. */
828 #ifdef RETURN_ADDR_RTX
829 tem = RETURN_ADDR_RTX (count, tem);
830 #else
831 tem = memory_address (Pmode,
832 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
833 tem = gen_frame_mem (Pmode, tem);
834 #endif
835 return tem;
836 }
837
838 /* Alias set used for setjmp buffer. */
839 static alias_set_type setjmp_alias_set = -1;
840
841 /* Construct the leading half of a __builtin_setjmp call. Control will
842 return to RECEIVER_LABEL. This is also called directly by the SJLJ
843 exception handling code. */
844
845 void
846 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
847 {
848 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
849 rtx stack_save;
850 rtx mem;
851
852 if (setjmp_alias_set == -1)
853 setjmp_alias_set = new_alias_set ();
854
855 buf_addr = convert_memory_address (Pmode, buf_addr);
856
857 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
858
859 /* We store the frame pointer and the address of receiver_label in
860 the buffer and use the rest of it for the stack save area, which
861 is machine-dependent. */
862
863 mem = gen_rtx_MEM (Pmode, buf_addr);
864 set_mem_alias_set (mem, setjmp_alias_set);
865 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
866
867 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
868 GET_MODE_SIZE (Pmode))),
869 set_mem_alias_set (mem, setjmp_alias_set);
870
871 emit_move_insn (validize_mem (mem),
872 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
873
874 stack_save = gen_rtx_MEM (sa_mode,
875 plus_constant (Pmode, buf_addr,
876 2 * GET_MODE_SIZE (Pmode)));
877 set_mem_alias_set (stack_save, setjmp_alias_set);
878 emit_stack_save (SAVE_NONLOCAL, &stack_save);
879
880 /* If there is further processing to do, do it. */
881 if (targetm.have_builtin_setjmp_setup ())
882 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
883
884 /* We have a nonlocal label. */
885 cfun->has_nonlocal_label = 1;
886 }
887
888 /* Construct the trailing part of a __builtin_setjmp call. This is
889 also called directly by the SJLJ exception handling code.
890 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
891
892 void
893 expand_builtin_setjmp_receiver (rtx receiver_label)
894 {
895 rtx chain;
896
897 /* Mark the FP as used when we get here, so we have to make sure it's
898 marked as used by this function. */
899 emit_use (hard_frame_pointer_rtx);
900
901 /* Mark the static chain as clobbered here so life information
902 doesn't get messed up for it. */
903 chain = targetm.calls.static_chain (current_function_decl, true);
904 if (chain && REG_P (chain))
905 emit_clobber (chain);
906
907 /* Now put in the code to restore the frame pointer, and argument
908 pointer, if needed. */
909 if (! targetm.have_nonlocal_goto ())
910 {
911 /* First adjust our frame pointer to its actual value. It was
912 previously set to the start of the virtual area corresponding to
913 the stacked variables when we branched here and now needs to be
914 adjusted to the actual hardware fp value.
915
916 Assignments to virtual registers are converted by
917 instantiate_virtual_regs into the corresponding assignment
918 to the underlying register (fp in this case) that makes
919 the original assignment true.
920 So the following insn will actually be decrementing fp by
921 STARTING_FRAME_OFFSET. */
922 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
923
924 /* Restoring the frame pointer also modifies the hard frame pointer.
925 Mark it used (so that the previous assignment remains live once
926 the frame pointer is eliminated) and clobbered (to represent the
927 implicit update from the assignment). */
928 emit_use (hard_frame_pointer_rtx);
929 emit_clobber (hard_frame_pointer_rtx);
930 }
931
932 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
933 if (fixed_regs[ARG_POINTER_REGNUM])
934 {
935 #ifdef ELIMINABLE_REGS
936 /* If the argument pointer can be eliminated in favor of the
937 frame pointer, we don't need to restore it. We assume here
938 that if such an elimination is present, it can always be used.
939 This is the case on all known machines; if we don't make this
940 assumption, we do unnecessary saving on many machines. */
941 size_t i;
942 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
943
944 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
945 if (elim_regs[i].from == ARG_POINTER_REGNUM
946 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
947 break;
948
949 if (i == ARRAY_SIZE (elim_regs))
950 #endif
951 {
952 /* Now restore our arg pointer from the address at which it
953 was saved in our stack frame. */
954 emit_move_insn (crtl->args.internal_arg_pointer,
955 copy_to_reg (get_arg_pointer_save_area ()));
956 }
957 }
958 #endif
959
960 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
961 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
962 else if (targetm.have_nonlocal_goto_receiver ())
963 emit_insn (targetm.gen_nonlocal_goto_receiver ());
964 else
965 { /* Nothing */ }
966
967 /* We must not allow the code we just generated to be reordered by
968 scheduling. Specifically, the update of the frame pointer must
969 happen immediately, not later. */
970 emit_insn (gen_blockage ());
971 }
972
973 /* __builtin_longjmp is passed a pointer to an array of five words (not
974 all will be used on all machines). It operates similarly to the C
975 library function of the same name, but is more efficient. Much of
976 the code below is copied from the handling of non-local gotos. */
977
978 static void
979 expand_builtin_longjmp (rtx buf_addr, rtx value)
980 {
981 rtx fp, lab, stack;
982 rtx_insn *insn, *last;
983 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
984
985 /* DRAP is needed for stack realign if longjmp is expanded to current
986 function */
987 if (SUPPORTS_STACK_ALIGNMENT)
988 crtl->need_drap = true;
989
990 if (setjmp_alias_set == -1)
991 setjmp_alias_set = new_alias_set ();
992
993 buf_addr = convert_memory_address (Pmode, buf_addr);
994
995 buf_addr = force_reg (Pmode, buf_addr);
996
997 /* We require that the user must pass a second argument of 1, because
998 that is what builtin_setjmp will return. */
999 gcc_assert (value == const1_rtx);
1000
1001 last = get_last_insn ();
1002 if (targetm.have_builtin_longjmp ())
1003 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1004 else
1005 {
1006 fp = gen_rtx_MEM (Pmode, buf_addr);
1007 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1008 GET_MODE_SIZE (Pmode)));
1009
1010 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1011 2 * GET_MODE_SIZE (Pmode)));
1012 set_mem_alias_set (fp, setjmp_alias_set);
1013 set_mem_alias_set (lab, setjmp_alias_set);
1014 set_mem_alias_set (stack, setjmp_alias_set);
1015
1016 /* Pick up FP, label, and SP from the block and jump. This code is
1017 from expand_goto in stmt.c; see there for detailed comments. */
1018 if (targetm.have_nonlocal_goto ())
1019 /* We have to pass a value to the nonlocal_goto pattern that will
1020 get copied into the static_chain pointer, but it does not matter
1021 what that value is, because builtin_setjmp does not use it. */
1022 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1023 else
1024 {
1025 lab = copy_to_reg (lab);
1026
1027 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1028 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1029
1030 emit_move_insn (hard_frame_pointer_rtx, fp);
1031 emit_stack_restore (SAVE_NONLOCAL, stack);
1032
1033 emit_use (hard_frame_pointer_rtx);
1034 emit_use (stack_pointer_rtx);
1035 emit_indirect_jump (lab);
1036 }
1037 }
1038
1039 /* Search backwards and mark the jump insn as a non-local goto.
1040 Note that this precludes the use of __builtin_longjmp to a
1041 __builtin_setjmp target in the same function. However, we've
1042 already cautioned the user that these functions are for
1043 internal exception handling use only. */
1044 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1045 {
1046 gcc_assert (insn != last);
1047
1048 if (JUMP_P (insn))
1049 {
1050 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1051 break;
1052 }
1053 else if (CALL_P (insn))
1054 break;
1055 }
1056 }
1057
1058 static inline bool
1059 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1060 {
1061 return (iter->i < iter->n);
1062 }
1063
1064 /* This function validates the types of a function call argument list
1065 against a specified list of tree_codes. If the last specifier is a 0,
1066 that represents an ellipses, otherwise the last specifier must be a
1067 VOID_TYPE. */
1068
1069 static bool
1070 validate_arglist (const_tree callexpr, ...)
1071 {
1072 enum tree_code code;
1073 bool res = 0;
1074 va_list ap;
1075 const_call_expr_arg_iterator iter;
1076 const_tree arg;
1077
1078 va_start (ap, callexpr);
1079 init_const_call_expr_arg_iterator (callexpr, &iter);
1080
1081 do
1082 {
1083 code = (enum tree_code) va_arg (ap, int);
1084 switch (code)
1085 {
1086 case 0:
1087 /* This signifies an ellipses, any further arguments are all ok. */
1088 res = true;
1089 goto end;
1090 case VOID_TYPE:
1091 /* This signifies an endlink, if no arguments remain, return
1092 true, otherwise return false. */
1093 res = !more_const_call_expr_args_p (&iter);
1094 goto end;
1095 default:
1096 /* If no parameters remain or the parameter's code does not
1097 match the specified code, return false. Otherwise continue
1098 checking any remaining arguments. */
1099 arg = next_const_call_expr_arg (&iter);
1100 if (!validate_arg (arg, code))
1101 goto end;
1102 break;
1103 }
1104 }
1105 while (1);
1106
1107 /* We need gotos here since we can only have one VA_CLOSE in a
1108 function. */
1109 end: ;
1110 va_end (ap);
1111
1112 return res;
1113 }
1114
1115 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1116 and the address of the save area. */
1117
1118 static rtx
1119 expand_builtin_nonlocal_goto (tree exp)
1120 {
1121 tree t_label, t_save_area;
1122 rtx r_label, r_save_area, r_fp, r_sp;
1123 rtx_insn *insn;
1124
1125 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1126 return NULL_RTX;
1127
1128 t_label = CALL_EXPR_ARG (exp, 0);
1129 t_save_area = CALL_EXPR_ARG (exp, 1);
1130
1131 r_label = expand_normal (t_label);
1132 r_label = convert_memory_address (Pmode, r_label);
1133 r_save_area = expand_normal (t_save_area);
1134 r_save_area = convert_memory_address (Pmode, r_save_area);
1135 /* Copy the address of the save location to a register just in case it was
1136 based on the frame pointer. */
1137 r_save_area = copy_to_reg (r_save_area);
1138 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1139 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1140 plus_constant (Pmode, r_save_area,
1141 GET_MODE_SIZE (Pmode)));
1142
1143 crtl->has_nonlocal_goto = 1;
1144
1145 /* ??? We no longer need to pass the static chain value, afaik. */
1146 if (targetm.have_nonlocal_goto ())
1147 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1148 else
1149 {
1150 r_label = copy_to_reg (r_label);
1151
1152 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1153 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1154
1155 /* Restore frame pointer for containing function. */
1156 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1157 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1158
1159 /* USE of hard_frame_pointer_rtx added for consistency;
1160 not clear if really needed. */
1161 emit_use (hard_frame_pointer_rtx);
1162 emit_use (stack_pointer_rtx);
1163
1164 /* If the architecture is using a GP register, we must
1165 conservatively assume that the target function makes use of it.
1166 The prologue of functions with nonlocal gotos must therefore
1167 initialize the GP register to the appropriate value, and we
1168 must then make sure that this value is live at the point
1169 of the jump. (Note that this doesn't necessarily apply
1170 to targets with a nonlocal_goto pattern; they are free
1171 to implement it in their own way. Note also that this is
1172 a no-op if the GP register is a global invariant.) */
1173 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1174 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1175 emit_use (pic_offset_table_rtx);
1176
1177 emit_indirect_jump (r_label);
1178 }
1179
1180 /* Search backwards to the jump insn and mark it as a
1181 non-local goto. */
1182 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1183 {
1184 if (JUMP_P (insn))
1185 {
1186 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1187 break;
1188 }
1189 else if (CALL_P (insn))
1190 break;
1191 }
1192
1193 return const0_rtx;
1194 }
1195
1196 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1197 (not all will be used on all machines) that was passed to __builtin_setjmp.
1198 It updates the stack pointer in that block to the current value. This is
1199 also called directly by the SJLJ exception handling code. */
1200
1201 void
1202 expand_builtin_update_setjmp_buf (rtx buf_addr)
1203 {
1204 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1205 rtx stack_save
1206 = gen_rtx_MEM (sa_mode,
1207 memory_address
1208 (sa_mode,
1209 plus_constant (Pmode, buf_addr,
1210 2 * GET_MODE_SIZE (Pmode))));
1211
1212 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1213 }
1214
1215 /* Expand a call to __builtin_prefetch. For a target that does not support
1216 data prefetch, evaluate the memory address argument in case it has side
1217 effects. */
1218
1219 static void
1220 expand_builtin_prefetch (tree exp)
1221 {
1222 tree arg0, arg1, arg2;
1223 int nargs;
1224 rtx op0, op1, op2;
1225
1226 if (!validate_arglist (exp, POINTER_TYPE, 0))
1227 return;
1228
1229 arg0 = CALL_EXPR_ARG (exp, 0);
1230
1231 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1232 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1233 locality). */
1234 nargs = call_expr_nargs (exp);
1235 if (nargs > 1)
1236 arg1 = CALL_EXPR_ARG (exp, 1);
1237 else
1238 arg1 = integer_zero_node;
1239 if (nargs > 2)
1240 arg2 = CALL_EXPR_ARG (exp, 2);
1241 else
1242 arg2 = integer_three_node;
1243
1244 /* Argument 0 is an address. */
1245 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1246
1247 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1248 if (TREE_CODE (arg1) != INTEGER_CST)
1249 {
1250 error ("second argument to %<__builtin_prefetch%> must be a constant");
1251 arg1 = integer_zero_node;
1252 }
1253 op1 = expand_normal (arg1);
1254 /* Argument 1 must be either zero or one. */
1255 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1256 {
1257 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1258 " using zero");
1259 op1 = const0_rtx;
1260 }
1261
1262 /* Argument 2 (locality) must be a compile-time constant int. */
1263 if (TREE_CODE (arg2) != INTEGER_CST)
1264 {
1265 error ("third argument to %<__builtin_prefetch%> must be a constant");
1266 arg2 = integer_zero_node;
1267 }
1268 op2 = expand_normal (arg2);
1269 /* Argument 2 must be 0, 1, 2, or 3. */
1270 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1271 {
1272 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1273 op2 = const0_rtx;
1274 }
1275
1276 if (targetm.have_prefetch ())
1277 {
1278 struct expand_operand ops[3];
1279
1280 create_address_operand (&ops[0], op0);
1281 create_integer_operand (&ops[1], INTVAL (op1));
1282 create_integer_operand (&ops[2], INTVAL (op2));
1283 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1284 return;
1285 }
1286
1287 /* Don't do anything with direct references to volatile memory, but
1288 generate code to handle other side effects. */
1289 if (!MEM_P (op0) && side_effects_p (op0))
1290 emit_insn (op0);
1291 }
1292
1293 /* Get a MEM rtx for expression EXP which is the address of an operand
1294 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1295 the maximum length of the block of memory that might be accessed or
1296 NULL if unknown. */
1297
1298 static rtx
1299 get_memory_rtx (tree exp, tree len)
1300 {
1301 tree orig_exp = exp;
1302 rtx addr, mem;
1303
1304 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1305 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1306 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1307 exp = TREE_OPERAND (exp, 0);
1308
1309 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1310 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1311
1312 /* Get an expression we can use to find the attributes to assign to MEM.
1313 First remove any nops. */
1314 while (CONVERT_EXPR_P (exp)
1315 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1316 exp = TREE_OPERAND (exp, 0);
1317
1318 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1319 (as builtin stringops may alias with anything). */
1320 exp = fold_build2 (MEM_REF,
1321 build_array_type (char_type_node,
1322 build_range_type (sizetype,
1323 size_one_node, len)),
1324 exp, build_int_cst (ptr_type_node, 0));
1325
1326 /* If the MEM_REF has no acceptable address, try to get the base object
1327 from the original address we got, and build an all-aliasing
1328 unknown-sized access to that one. */
1329 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1330 set_mem_attributes (mem, exp, 0);
1331 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1332 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1333 0))))
1334 {
1335 exp = build_fold_addr_expr (exp);
1336 exp = fold_build2 (MEM_REF,
1337 build_array_type (char_type_node,
1338 build_range_type (sizetype,
1339 size_zero_node,
1340 NULL)),
1341 exp, build_int_cst (ptr_type_node, 0));
1342 set_mem_attributes (mem, exp, 0);
1343 }
1344 set_mem_alias_set (mem, 0);
1345 return mem;
1346 }
1347 \f
1348 /* Built-in functions to perform an untyped call and return. */
1349
1350 #define apply_args_mode \
1351 (this_target_builtins->x_apply_args_mode)
1352 #define apply_result_mode \
1353 (this_target_builtins->x_apply_result_mode)
1354
1355 /* Return the size required for the block returned by __builtin_apply_args,
1356 and initialize apply_args_mode. */
1357
1358 static int
1359 apply_args_size (void)
1360 {
1361 static int size = -1;
1362 int align;
1363 unsigned int regno;
1364 machine_mode mode;
1365
1366 /* The values computed by this function never change. */
1367 if (size < 0)
1368 {
1369 /* The first value is the incoming arg-pointer. */
1370 size = GET_MODE_SIZE (Pmode);
1371
1372 /* The second value is the structure value address unless this is
1373 passed as an "invisible" first argument. */
1374 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1375 size += GET_MODE_SIZE (Pmode);
1376
1377 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1378 if (FUNCTION_ARG_REGNO_P (regno))
1379 {
1380 mode = targetm.calls.get_raw_arg_mode (regno);
1381
1382 gcc_assert (mode != VOIDmode);
1383
1384 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1385 if (size % align != 0)
1386 size = CEIL (size, align) * align;
1387 size += GET_MODE_SIZE (mode);
1388 apply_args_mode[regno] = mode;
1389 }
1390 else
1391 {
1392 apply_args_mode[regno] = VOIDmode;
1393 }
1394 }
1395 return size;
1396 }
1397
1398 /* Return the size required for the block returned by __builtin_apply,
1399 and initialize apply_result_mode. */
1400
1401 static int
1402 apply_result_size (void)
1403 {
1404 static int size = -1;
1405 int align, regno;
1406 machine_mode mode;
1407
1408 /* The values computed by this function never change. */
1409 if (size < 0)
1410 {
1411 size = 0;
1412
1413 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1414 if (targetm.calls.function_value_regno_p (regno))
1415 {
1416 mode = targetm.calls.get_raw_result_mode (regno);
1417
1418 gcc_assert (mode != VOIDmode);
1419
1420 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1421 if (size % align != 0)
1422 size = CEIL (size, align) * align;
1423 size += GET_MODE_SIZE (mode);
1424 apply_result_mode[regno] = mode;
1425 }
1426 else
1427 apply_result_mode[regno] = VOIDmode;
1428
1429 /* Allow targets that use untyped_call and untyped_return to override
1430 the size so that machine-specific information can be stored here. */
1431 #ifdef APPLY_RESULT_SIZE
1432 size = APPLY_RESULT_SIZE;
1433 #endif
1434 }
1435 return size;
1436 }
1437
1438 /* Create a vector describing the result block RESULT. If SAVEP is true,
1439 the result block is used to save the values; otherwise it is used to
1440 restore the values. */
1441
1442 static rtx
1443 result_vector (int savep, rtx result)
1444 {
1445 int regno, size, align, nelts;
1446 machine_mode mode;
1447 rtx reg, mem;
1448 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1449
1450 size = nelts = 0;
1451 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1452 if ((mode = apply_result_mode[regno]) != VOIDmode)
1453 {
1454 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1455 if (size % align != 0)
1456 size = CEIL (size, align) * align;
1457 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1458 mem = adjust_address (result, mode, size);
1459 savevec[nelts++] = (savep
1460 ? gen_rtx_SET (mem, reg)
1461 : gen_rtx_SET (reg, mem));
1462 size += GET_MODE_SIZE (mode);
1463 }
1464 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1465 }
1466
1467 /* Save the state required to perform an untyped call with the same
1468 arguments as were passed to the current function. */
1469
1470 static rtx
1471 expand_builtin_apply_args_1 (void)
1472 {
1473 rtx registers, tem;
1474 int size, align, regno;
1475 machine_mode mode;
1476 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1477
1478 /* Create a block where the arg-pointer, structure value address,
1479 and argument registers can be saved. */
1480 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1481
1482 /* Walk past the arg-pointer and structure value address. */
1483 size = GET_MODE_SIZE (Pmode);
1484 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1485 size += GET_MODE_SIZE (Pmode);
1486
1487 /* Save each register used in calling a function to the block. */
1488 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1489 if ((mode = apply_args_mode[regno]) != VOIDmode)
1490 {
1491 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1492 if (size % align != 0)
1493 size = CEIL (size, align) * align;
1494
1495 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1496
1497 emit_move_insn (adjust_address (registers, mode, size), tem);
1498 size += GET_MODE_SIZE (mode);
1499 }
1500
1501 /* Save the arg pointer to the block. */
1502 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1503 /* We need the pointer as the caller actually passed them to us, not
1504 as we might have pretended they were passed. Make sure it's a valid
1505 operand, as emit_move_insn isn't expected to handle a PLUS. */
1506 if (STACK_GROWS_DOWNWARD)
1507 tem
1508 = force_operand (plus_constant (Pmode, tem,
1509 crtl->args.pretend_args_size),
1510 NULL_RTX);
1511 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1512
1513 size = GET_MODE_SIZE (Pmode);
1514
1515 /* Save the structure value address unless this is passed as an
1516 "invisible" first argument. */
1517 if (struct_incoming_value)
1518 {
1519 emit_move_insn (adjust_address (registers, Pmode, size),
1520 copy_to_reg (struct_incoming_value));
1521 size += GET_MODE_SIZE (Pmode);
1522 }
1523
1524 /* Return the address of the block. */
1525 return copy_addr_to_reg (XEXP (registers, 0));
1526 }
1527
1528 /* __builtin_apply_args returns block of memory allocated on
1529 the stack into which is stored the arg pointer, structure
1530 value address, static chain, and all the registers that might
1531 possibly be used in performing a function call. The code is
1532 moved to the start of the function so the incoming values are
1533 saved. */
1534
1535 static rtx
1536 expand_builtin_apply_args (void)
1537 {
1538 /* Don't do __builtin_apply_args more than once in a function.
1539 Save the result of the first call and reuse it. */
1540 if (apply_args_value != 0)
1541 return apply_args_value;
1542 {
1543 /* When this function is called, it means that registers must be
1544 saved on entry to this function. So we migrate the
1545 call to the first insn of this function. */
1546 rtx temp;
1547
1548 start_sequence ();
1549 temp = expand_builtin_apply_args_1 ();
1550 rtx_insn *seq = get_insns ();
1551 end_sequence ();
1552
1553 apply_args_value = temp;
1554
1555 /* Put the insns after the NOTE that starts the function.
1556 If this is inside a start_sequence, make the outer-level insn
1557 chain current, so the code is placed at the start of the
1558 function. If internal_arg_pointer is a non-virtual pseudo,
1559 it needs to be placed after the function that initializes
1560 that pseudo. */
1561 push_topmost_sequence ();
1562 if (REG_P (crtl->args.internal_arg_pointer)
1563 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1564 emit_insn_before (seq, parm_birth_insn);
1565 else
1566 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1567 pop_topmost_sequence ();
1568 return temp;
1569 }
1570 }
1571
1572 /* Perform an untyped call and save the state required to perform an
1573 untyped return of whatever value was returned by the given function. */
1574
1575 static rtx
1576 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1577 {
1578 int size, align, regno;
1579 machine_mode mode;
1580 rtx incoming_args, result, reg, dest, src;
1581 rtx_call_insn *call_insn;
1582 rtx old_stack_level = 0;
1583 rtx call_fusage = 0;
1584 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1585
1586 arguments = convert_memory_address (Pmode, arguments);
1587
1588 /* Create a block where the return registers can be saved. */
1589 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1590
1591 /* Fetch the arg pointer from the ARGUMENTS block. */
1592 incoming_args = gen_reg_rtx (Pmode);
1593 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1594 if (!STACK_GROWS_DOWNWARD)
1595 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1596 incoming_args, 0, OPTAB_LIB_WIDEN);
1597
1598 /* Push a new argument block and copy the arguments. Do not allow
1599 the (potential) memcpy call below to interfere with our stack
1600 manipulations. */
1601 do_pending_stack_adjust ();
1602 NO_DEFER_POP;
1603
1604 /* Save the stack with nonlocal if available. */
1605 if (targetm.have_save_stack_nonlocal ())
1606 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1607 else
1608 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1609
1610 /* Allocate a block of memory onto the stack and copy the memory
1611 arguments to the outgoing arguments address. We can pass TRUE
1612 as the 4th argument because we just saved the stack pointer
1613 and will restore it right after the call. */
1614 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1615
1616 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1617 may have already set current_function_calls_alloca to true.
1618 current_function_calls_alloca won't be set if argsize is zero,
1619 so we have to guarantee need_drap is true here. */
1620 if (SUPPORTS_STACK_ALIGNMENT)
1621 crtl->need_drap = true;
1622
1623 dest = virtual_outgoing_args_rtx;
1624 if (!STACK_GROWS_DOWNWARD)
1625 {
1626 if (CONST_INT_P (argsize))
1627 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1628 else
1629 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1630 }
1631 dest = gen_rtx_MEM (BLKmode, dest);
1632 set_mem_align (dest, PARM_BOUNDARY);
1633 src = gen_rtx_MEM (BLKmode, incoming_args);
1634 set_mem_align (src, PARM_BOUNDARY);
1635 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1636
1637 /* Refer to the argument block. */
1638 apply_args_size ();
1639 arguments = gen_rtx_MEM (BLKmode, arguments);
1640 set_mem_align (arguments, PARM_BOUNDARY);
1641
1642 /* Walk past the arg-pointer and structure value address. */
1643 size = GET_MODE_SIZE (Pmode);
1644 if (struct_value)
1645 size += GET_MODE_SIZE (Pmode);
1646
1647 /* Restore each of the registers previously saved. Make USE insns
1648 for each of these registers for use in making the call. */
1649 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1650 if ((mode = apply_args_mode[regno]) != VOIDmode)
1651 {
1652 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1653 if (size % align != 0)
1654 size = CEIL (size, align) * align;
1655 reg = gen_rtx_REG (mode, regno);
1656 emit_move_insn (reg, adjust_address (arguments, mode, size));
1657 use_reg (&call_fusage, reg);
1658 size += GET_MODE_SIZE (mode);
1659 }
1660
1661 /* Restore the structure value address unless this is passed as an
1662 "invisible" first argument. */
1663 size = GET_MODE_SIZE (Pmode);
1664 if (struct_value)
1665 {
1666 rtx value = gen_reg_rtx (Pmode);
1667 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1668 emit_move_insn (struct_value, value);
1669 if (REG_P (struct_value))
1670 use_reg (&call_fusage, struct_value);
1671 size += GET_MODE_SIZE (Pmode);
1672 }
1673
1674 /* All arguments and registers used for the call are set up by now! */
1675 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1676
1677 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1678 and we don't want to load it into a register as an optimization,
1679 because prepare_call_address already did it if it should be done. */
1680 if (GET_CODE (function) != SYMBOL_REF)
1681 function = memory_address (FUNCTION_MODE, function);
1682
1683 /* Generate the actual call instruction and save the return value. */
1684 if (targetm.have_untyped_call ())
1685 {
1686 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1687 emit_call_insn (targetm.gen_untyped_call (mem, result,
1688 result_vector (1, result)));
1689 }
1690 else if (targetm.have_call_value ())
1691 {
1692 rtx valreg = 0;
1693
1694 /* Locate the unique return register. It is not possible to
1695 express a call that sets more than one return register using
1696 call_value; use untyped_call for that. In fact, untyped_call
1697 only needs to save the return registers in the given block. */
1698 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1699 if ((mode = apply_result_mode[regno]) != VOIDmode)
1700 {
1701 gcc_assert (!valreg); /* have_untyped_call required. */
1702
1703 valreg = gen_rtx_REG (mode, regno);
1704 }
1705
1706 emit_insn (targetm.gen_call_value (valreg,
1707 gen_rtx_MEM (FUNCTION_MODE, function),
1708 const0_rtx, NULL_RTX, const0_rtx));
1709
1710 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1711 }
1712 else
1713 gcc_unreachable ();
1714
1715 /* Find the CALL insn we just emitted, and attach the register usage
1716 information. */
1717 call_insn = last_call_insn ();
1718 add_function_usage_to (call_insn, call_fusage);
1719
1720 /* Restore the stack. */
1721 if (targetm.have_save_stack_nonlocal ())
1722 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1723 else
1724 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1725 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1726
1727 OK_DEFER_POP;
1728
1729 /* Return the address of the result block. */
1730 result = copy_addr_to_reg (XEXP (result, 0));
1731 return convert_memory_address (ptr_mode, result);
1732 }
1733
1734 /* Perform an untyped return. */
1735
1736 static void
1737 expand_builtin_return (rtx result)
1738 {
1739 int size, align, regno;
1740 machine_mode mode;
1741 rtx reg;
1742 rtx_insn *call_fusage = 0;
1743
1744 result = convert_memory_address (Pmode, result);
1745
1746 apply_result_size ();
1747 result = gen_rtx_MEM (BLKmode, result);
1748
1749 if (targetm.have_untyped_return ())
1750 {
1751 rtx vector = result_vector (0, result);
1752 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1753 emit_barrier ();
1754 return;
1755 }
1756
1757 /* Restore the return value and note that each value is used. */
1758 size = 0;
1759 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1760 if ((mode = apply_result_mode[regno]) != VOIDmode)
1761 {
1762 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1763 if (size % align != 0)
1764 size = CEIL (size, align) * align;
1765 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1766 emit_move_insn (reg, adjust_address (result, mode, size));
1767
1768 push_to_sequence (call_fusage);
1769 emit_use (reg);
1770 call_fusage = get_insns ();
1771 end_sequence ();
1772 size += GET_MODE_SIZE (mode);
1773 }
1774
1775 /* Put the USE insns before the return. */
1776 emit_insn (call_fusage);
1777
1778 /* Return whatever values was restored by jumping directly to the end
1779 of the function. */
1780 expand_naked_return ();
1781 }
1782
1783 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1784
1785 static enum type_class
1786 type_to_class (tree type)
1787 {
1788 switch (TREE_CODE (type))
1789 {
1790 case VOID_TYPE: return void_type_class;
1791 case INTEGER_TYPE: return integer_type_class;
1792 case ENUMERAL_TYPE: return enumeral_type_class;
1793 case BOOLEAN_TYPE: return boolean_type_class;
1794 case POINTER_TYPE: return pointer_type_class;
1795 case REFERENCE_TYPE: return reference_type_class;
1796 case OFFSET_TYPE: return offset_type_class;
1797 case REAL_TYPE: return real_type_class;
1798 case COMPLEX_TYPE: return complex_type_class;
1799 case FUNCTION_TYPE: return function_type_class;
1800 case METHOD_TYPE: return method_type_class;
1801 case RECORD_TYPE: return record_type_class;
1802 case UNION_TYPE:
1803 case QUAL_UNION_TYPE: return union_type_class;
1804 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1805 ? string_type_class : array_type_class);
1806 case LANG_TYPE: return lang_type_class;
1807 default: return no_type_class;
1808 }
1809 }
1810
1811 /* Expand a call EXP to __builtin_classify_type. */
1812
1813 static rtx
1814 expand_builtin_classify_type (tree exp)
1815 {
1816 if (call_expr_nargs (exp))
1817 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1818 return GEN_INT (no_type_class);
1819 }
1820
1821 /* This helper macro, meant to be used in mathfn_built_in below,
1822 determines which among a set of three builtin math functions is
1823 appropriate for a given type mode. The `F' and `L' cases are
1824 automatically generated from the `double' case. */
1825 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1826 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1827 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1828 fcodel = BUILT_IN_MATHFN##L ; break;
1829 /* Similar to above, but appends _R after any F/L suffix. */
1830 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1831 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1832 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1833 fcodel = BUILT_IN_MATHFN##L_R ; break;
1834
1835 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1836 if available. If IMPLICIT is true use the implicit builtin declaration,
1837 otherwise use the explicit declaration. If we can't do the conversion,
1838 return zero. */
1839
1840 static tree
1841 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1842 {
1843 enum built_in_function fcode, fcodef, fcodel, fcode2;
1844
1845 switch (fn)
1846 {
1847 CASE_MATHFN (BUILT_IN_ACOS)
1848 CASE_MATHFN (BUILT_IN_ACOSH)
1849 CASE_MATHFN (BUILT_IN_ASIN)
1850 CASE_MATHFN (BUILT_IN_ASINH)
1851 CASE_MATHFN (BUILT_IN_ATAN)
1852 CASE_MATHFN (BUILT_IN_ATAN2)
1853 CASE_MATHFN (BUILT_IN_ATANH)
1854 CASE_MATHFN (BUILT_IN_CBRT)
1855 CASE_MATHFN (BUILT_IN_CEIL)
1856 CASE_MATHFN (BUILT_IN_CEXPI)
1857 CASE_MATHFN (BUILT_IN_COPYSIGN)
1858 CASE_MATHFN (BUILT_IN_COS)
1859 CASE_MATHFN (BUILT_IN_COSH)
1860 CASE_MATHFN (BUILT_IN_DREM)
1861 CASE_MATHFN (BUILT_IN_ERF)
1862 CASE_MATHFN (BUILT_IN_ERFC)
1863 CASE_MATHFN (BUILT_IN_EXP)
1864 CASE_MATHFN (BUILT_IN_EXP10)
1865 CASE_MATHFN (BUILT_IN_EXP2)
1866 CASE_MATHFN (BUILT_IN_EXPM1)
1867 CASE_MATHFN (BUILT_IN_FABS)
1868 CASE_MATHFN (BUILT_IN_FDIM)
1869 CASE_MATHFN (BUILT_IN_FLOOR)
1870 CASE_MATHFN (BUILT_IN_FMA)
1871 CASE_MATHFN (BUILT_IN_FMAX)
1872 CASE_MATHFN (BUILT_IN_FMIN)
1873 CASE_MATHFN (BUILT_IN_FMOD)
1874 CASE_MATHFN (BUILT_IN_FREXP)
1875 CASE_MATHFN (BUILT_IN_GAMMA)
1876 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1877 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1878 CASE_MATHFN (BUILT_IN_HYPOT)
1879 CASE_MATHFN (BUILT_IN_ILOGB)
1880 CASE_MATHFN (BUILT_IN_ICEIL)
1881 CASE_MATHFN (BUILT_IN_IFLOOR)
1882 CASE_MATHFN (BUILT_IN_INF)
1883 CASE_MATHFN (BUILT_IN_IRINT)
1884 CASE_MATHFN (BUILT_IN_IROUND)
1885 CASE_MATHFN (BUILT_IN_ISINF)
1886 CASE_MATHFN (BUILT_IN_J0)
1887 CASE_MATHFN (BUILT_IN_J1)
1888 CASE_MATHFN (BUILT_IN_JN)
1889 CASE_MATHFN (BUILT_IN_LCEIL)
1890 CASE_MATHFN (BUILT_IN_LDEXP)
1891 CASE_MATHFN (BUILT_IN_LFLOOR)
1892 CASE_MATHFN (BUILT_IN_LGAMMA)
1893 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1894 CASE_MATHFN (BUILT_IN_LLCEIL)
1895 CASE_MATHFN (BUILT_IN_LLFLOOR)
1896 CASE_MATHFN (BUILT_IN_LLRINT)
1897 CASE_MATHFN (BUILT_IN_LLROUND)
1898 CASE_MATHFN (BUILT_IN_LOG)
1899 CASE_MATHFN (BUILT_IN_LOG10)
1900 CASE_MATHFN (BUILT_IN_LOG1P)
1901 CASE_MATHFN (BUILT_IN_LOG2)
1902 CASE_MATHFN (BUILT_IN_LOGB)
1903 CASE_MATHFN (BUILT_IN_LRINT)
1904 CASE_MATHFN (BUILT_IN_LROUND)
1905 CASE_MATHFN (BUILT_IN_MODF)
1906 CASE_MATHFN (BUILT_IN_NAN)
1907 CASE_MATHFN (BUILT_IN_NANS)
1908 CASE_MATHFN (BUILT_IN_NEARBYINT)
1909 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1910 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1911 CASE_MATHFN (BUILT_IN_POW)
1912 CASE_MATHFN (BUILT_IN_POWI)
1913 CASE_MATHFN (BUILT_IN_POW10)
1914 CASE_MATHFN (BUILT_IN_REMAINDER)
1915 CASE_MATHFN (BUILT_IN_REMQUO)
1916 CASE_MATHFN (BUILT_IN_RINT)
1917 CASE_MATHFN (BUILT_IN_ROUND)
1918 CASE_MATHFN (BUILT_IN_SCALB)
1919 CASE_MATHFN (BUILT_IN_SCALBLN)
1920 CASE_MATHFN (BUILT_IN_SCALBN)
1921 CASE_MATHFN (BUILT_IN_SIGNBIT)
1922 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1923 CASE_MATHFN (BUILT_IN_SIN)
1924 CASE_MATHFN (BUILT_IN_SINCOS)
1925 CASE_MATHFN (BUILT_IN_SINH)
1926 CASE_MATHFN (BUILT_IN_SQRT)
1927 CASE_MATHFN (BUILT_IN_TAN)
1928 CASE_MATHFN (BUILT_IN_TANH)
1929 CASE_MATHFN (BUILT_IN_TGAMMA)
1930 CASE_MATHFN (BUILT_IN_TRUNC)
1931 CASE_MATHFN (BUILT_IN_Y0)
1932 CASE_MATHFN (BUILT_IN_Y1)
1933 CASE_MATHFN (BUILT_IN_YN)
1934
1935 default:
1936 return NULL_TREE;
1937 }
1938
1939 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1940 fcode2 = fcode;
1941 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1942 fcode2 = fcodef;
1943 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1944 fcode2 = fcodel;
1945 else
1946 return NULL_TREE;
1947
1948 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1949 return NULL_TREE;
1950
1951 return builtin_decl_explicit (fcode2);
1952 }
1953
1954 /* Like mathfn_built_in_1(), but always use the implicit array. */
1955
1956 tree
1957 mathfn_built_in (tree type, enum built_in_function fn)
1958 {
1959 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1960 }
1961
1962 /* If errno must be maintained, expand the RTL to check if the result,
1963 TARGET, of a built-in function call, EXP, is NaN, and if so set
1964 errno to EDOM. */
1965
1966 static void
1967 expand_errno_check (tree exp, rtx target)
1968 {
1969 rtx_code_label *lab = gen_label_rtx ();
1970
1971 /* Test the result; if it is NaN, set errno=EDOM because
1972 the argument was not in the domain. */
1973 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1974 NULL_RTX, NULL, lab,
1975 /* The jump is very likely. */
1976 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1977
1978 #ifdef TARGET_EDOM
1979 /* If this built-in doesn't throw an exception, set errno directly. */
1980 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1981 {
1982 #ifdef GEN_ERRNO_RTX
1983 rtx errno_rtx = GEN_ERRNO_RTX;
1984 #else
1985 rtx errno_rtx
1986 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1987 #endif
1988 emit_move_insn (errno_rtx,
1989 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1990 emit_label (lab);
1991 return;
1992 }
1993 #endif
1994
1995 /* Make sure the library call isn't expanded as a tail call. */
1996 CALL_EXPR_TAILCALL (exp) = 0;
1997
1998 /* We can't set errno=EDOM directly; let the library call do it.
1999 Pop the arguments right away in case the call gets deleted. */
2000 NO_DEFER_POP;
2001 expand_call (exp, target, 0);
2002 OK_DEFER_POP;
2003 emit_label (lab);
2004 }
2005
2006 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2007 Return NULL_RTX if a normal call should be emitted rather than expanding
2008 the function in-line. EXP is the expression that is a call to the builtin
2009 function; if convenient, the result should be placed in TARGET.
2010 SUBTARGET may be used as the target for computing one of EXP's operands. */
2011
2012 static rtx
2013 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2014 {
2015 optab builtin_optab;
2016 rtx op0;
2017 rtx_insn *insns;
2018 tree fndecl = get_callee_fndecl (exp);
2019 machine_mode mode;
2020 bool errno_set = false;
2021 bool try_widening = false;
2022 tree arg;
2023
2024 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2025 return NULL_RTX;
2026
2027 arg = CALL_EXPR_ARG (exp, 0);
2028
2029 switch (DECL_FUNCTION_CODE (fndecl))
2030 {
2031 CASE_FLT_FN (BUILT_IN_SQRT):
2032 errno_set = ! tree_expr_nonnegative_p (arg);
2033 try_widening = true;
2034 builtin_optab = sqrt_optab;
2035 break;
2036 CASE_FLT_FN (BUILT_IN_EXP):
2037 errno_set = true; builtin_optab = exp_optab; break;
2038 CASE_FLT_FN (BUILT_IN_EXP10):
2039 CASE_FLT_FN (BUILT_IN_POW10):
2040 errno_set = true; builtin_optab = exp10_optab; break;
2041 CASE_FLT_FN (BUILT_IN_EXP2):
2042 errno_set = true; builtin_optab = exp2_optab; break;
2043 CASE_FLT_FN (BUILT_IN_EXPM1):
2044 errno_set = true; builtin_optab = expm1_optab; break;
2045 CASE_FLT_FN (BUILT_IN_LOGB):
2046 errno_set = true; builtin_optab = logb_optab; break;
2047 CASE_FLT_FN (BUILT_IN_LOG):
2048 errno_set = true; builtin_optab = log_optab; break;
2049 CASE_FLT_FN (BUILT_IN_LOG10):
2050 errno_set = true; builtin_optab = log10_optab; break;
2051 CASE_FLT_FN (BUILT_IN_LOG2):
2052 errno_set = true; builtin_optab = log2_optab; break;
2053 CASE_FLT_FN (BUILT_IN_LOG1P):
2054 errno_set = true; builtin_optab = log1p_optab; break;
2055 CASE_FLT_FN (BUILT_IN_ASIN):
2056 builtin_optab = asin_optab; break;
2057 CASE_FLT_FN (BUILT_IN_ACOS):
2058 builtin_optab = acos_optab; break;
2059 CASE_FLT_FN (BUILT_IN_TAN):
2060 builtin_optab = tan_optab; break;
2061 CASE_FLT_FN (BUILT_IN_ATAN):
2062 builtin_optab = atan_optab; break;
2063 CASE_FLT_FN (BUILT_IN_FLOOR):
2064 builtin_optab = floor_optab; break;
2065 CASE_FLT_FN (BUILT_IN_CEIL):
2066 builtin_optab = ceil_optab; break;
2067 CASE_FLT_FN (BUILT_IN_TRUNC):
2068 builtin_optab = btrunc_optab; break;
2069 CASE_FLT_FN (BUILT_IN_ROUND):
2070 builtin_optab = round_optab; break;
2071 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2072 builtin_optab = nearbyint_optab;
2073 if (flag_trapping_math)
2074 break;
2075 /* Else fallthrough and expand as rint. */
2076 CASE_FLT_FN (BUILT_IN_RINT):
2077 builtin_optab = rint_optab; break;
2078 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2079 builtin_optab = significand_optab; break;
2080 default:
2081 gcc_unreachable ();
2082 }
2083
2084 /* Make a suitable register to place result in. */
2085 mode = TYPE_MODE (TREE_TYPE (exp));
2086
2087 if (! flag_errno_math || ! HONOR_NANS (mode))
2088 errno_set = false;
2089
2090 /* Before working hard, check whether the instruction is available, but try
2091 to widen the mode for specific operations. */
2092 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2093 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2094 && (!errno_set || !optimize_insn_for_size_p ()))
2095 {
2096 rtx result = gen_reg_rtx (mode);
2097
2098 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2099 need to expand the argument again. This way, we will not perform
2100 side-effects more the once. */
2101 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2102
2103 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2104
2105 start_sequence ();
2106
2107 /* Compute into RESULT.
2108 Set RESULT to wherever the result comes back. */
2109 result = expand_unop (mode, builtin_optab, op0, result, 0);
2110
2111 if (result != 0)
2112 {
2113 if (errno_set)
2114 expand_errno_check (exp, result);
2115
2116 /* Output the entire sequence. */
2117 insns = get_insns ();
2118 end_sequence ();
2119 emit_insn (insns);
2120 return result;
2121 }
2122
2123 /* If we were unable to expand via the builtin, stop the sequence
2124 (without outputting the insns) and call to the library function
2125 with the stabilized argument list. */
2126 end_sequence ();
2127 }
2128
2129 return expand_call (exp, target, target == const0_rtx);
2130 }
2131
2132 /* Expand a call to the builtin binary math functions (pow and atan2).
2133 Return NULL_RTX if a normal call should be emitted rather than expanding the
2134 function in-line. EXP is the expression that is a call to the builtin
2135 function; if convenient, the result should be placed in TARGET.
2136 SUBTARGET may be used as the target for computing one of EXP's
2137 operands. */
2138
2139 static rtx
2140 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2141 {
2142 optab builtin_optab;
2143 rtx op0, op1, result;
2144 rtx_insn *insns;
2145 int op1_type = REAL_TYPE;
2146 tree fndecl = get_callee_fndecl (exp);
2147 tree arg0, arg1;
2148 machine_mode mode;
2149 bool errno_set = true;
2150
2151 switch (DECL_FUNCTION_CODE (fndecl))
2152 {
2153 CASE_FLT_FN (BUILT_IN_SCALBN):
2154 CASE_FLT_FN (BUILT_IN_SCALBLN):
2155 CASE_FLT_FN (BUILT_IN_LDEXP):
2156 op1_type = INTEGER_TYPE;
2157 default:
2158 break;
2159 }
2160
2161 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2162 return NULL_RTX;
2163
2164 arg0 = CALL_EXPR_ARG (exp, 0);
2165 arg1 = CALL_EXPR_ARG (exp, 1);
2166
2167 switch (DECL_FUNCTION_CODE (fndecl))
2168 {
2169 CASE_FLT_FN (BUILT_IN_POW):
2170 builtin_optab = pow_optab; break;
2171 CASE_FLT_FN (BUILT_IN_ATAN2):
2172 builtin_optab = atan2_optab; break;
2173 CASE_FLT_FN (BUILT_IN_SCALB):
2174 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2175 return 0;
2176 builtin_optab = scalb_optab; break;
2177 CASE_FLT_FN (BUILT_IN_SCALBN):
2178 CASE_FLT_FN (BUILT_IN_SCALBLN):
2179 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2180 return 0;
2181 /* Fall through... */
2182 CASE_FLT_FN (BUILT_IN_LDEXP):
2183 builtin_optab = ldexp_optab; break;
2184 CASE_FLT_FN (BUILT_IN_FMOD):
2185 builtin_optab = fmod_optab; break;
2186 CASE_FLT_FN (BUILT_IN_REMAINDER):
2187 CASE_FLT_FN (BUILT_IN_DREM):
2188 builtin_optab = remainder_optab; break;
2189 default:
2190 gcc_unreachable ();
2191 }
2192
2193 /* Make a suitable register to place result in. */
2194 mode = TYPE_MODE (TREE_TYPE (exp));
2195
2196 /* Before working hard, check whether the instruction is available. */
2197 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2198 return NULL_RTX;
2199
2200 result = gen_reg_rtx (mode);
2201
2202 if (! flag_errno_math || ! HONOR_NANS (mode))
2203 errno_set = false;
2204
2205 if (errno_set && optimize_insn_for_size_p ())
2206 return 0;
2207
2208 /* Always stabilize the argument list. */
2209 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2210 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2211
2212 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2213 op1 = expand_normal (arg1);
2214
2215 start_sequence ();
2216
2217 /* Compute into RESULT.
2218 Set RESULT to wherever the result comes back. */
2219 result = expand_binop (mode, builtin_optab, op0, op1,
2220 result, 0, OPTAB_DIRECT);
2221
2222 /* If we were unable to expand via the builtin, stop the sequence
2223 (without outputting the insns) and call to the library function
2224 with the stabilized argument list. */
2225 if (result == 0)
2226 {
2227 end_sequence ();
2228 return expand_call (exp, target, target == const0_rtx);
2229 }
2230
2231 if (errno_set)
2232 expand_errno_check (exp, result);
2233
2234 /* Output the entire sequence. */
2235 insns = get_insns ();
2236 end_sequence ();
2237 emit_insn (insns);
2238
2239 return result;
2240 }
2241
2242 /* Expand a call to the builtin trinary math functions (fma).
2243 Return NULL_RTX if a normal call should be emitted rather than expanding the
2244 function in-line. EXP is the expression that is a call to the builtin
2245 function; if convenient, the result should be placed in TARGET.
2246 SUBTARGET may be used as the target for computing one of EXP's
2247 operands. */
2248
2249 static rtx
2250 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2251 {
2252 optab builtin_optab;
2253 rtx op0, op1, op2, result;
2254 rtx_insn *insns;
2255 tree fndecl = get_callee_fndecl (exp);
2256 tree arg0, arg1, arg2;
2257 machine_mode mode;
2258
2259 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2260 return NULL_RTX;
2261
2262 arg0 = CALL_EXPR_ARG (exp, 0);
2263 arg1 = CALL_EXPR_ARG (exp, 1);
2264 arg2 = CALL_EXPR_ARG (exp, 2);
2265
2266 switch (DECL_FUNCTION_CODE (fndecl))
2267 {
2268 CASE_FLT_FN (BUILT_IN_FMA):
2269 builtin_optab = fma_optab; break;
2270 default:
2271 gcc_unreachable ();
2272 }
2273
2274 /* Make a suitable register to place result in. */
2275 mode = TYPE_MODE (TREE_TYPE (exp));
2276
2277 /* Before working hard, check whether the instruction is available. */
2278 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2279 return NULL_RTX;
2280
2281 result = gen_reg_rtx (mode);
2282
2283 /* Always stabilize the argument list. */
2284 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2285 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2286 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2287
2288 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2289 op1 = expand_normal (arg1);
2290 op2 = expand_normal (arg2);
2291
2292 start_sequence ();
2293
2294 /* Compute into RESULT.
2295 Set RESULT to wherever the result comes back. */
2296 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2297 result, 0);
2298
2299 /* If we were unable to expand via the builtin, stop the sequence
2300 (without outputting the insns) and call to the library function
2301 with the stabilized argument list. */
2302 if (result == 0)
2303 {
2304 end_sequence ();
2305 return expand_call (exp, target, target == const0_rtx);
2306 }
2307
2308 /* Output the entire sequence. */
2309 insns = get_insns ();
2310 end_sequence ();
2311 emit_insn (insns);
2312
2313 return result;
2314 }
2315
2316 /* Expand a call to the builtin sin and cos math functions.
2317 Return NULL_RTX if a normal call should be emitted rather than expanding the
2318 function in-line. EXP is the expression that is a call to the builtin
2319 function; if convenient, the result should be placed in TARGET.
2320 SUBTARGET may be used as the target for computing one of EXP's
2321 operands. */
2322
2323 static rtx
2324 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2325 {
2326 optab builtin_optab;
2327 rtx op0;
2328 rtx_insn *insns;
2329 tree fndecl = get_callee_fndecl (exp);
2330 machine_mode mode;
2331 tree arg;
2332
2333 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2334 return NULL_RTX;
2335
2336 arg = CALL_EXPR_ARG (exp, 0);
2337
2338 switch (DECL_FUNCTION_CODE (fndecl))
2339 {
2340 CASE_FLT_FN (BUILT_IN_SIN):
2341 CASE_FLT_FN (BUILT_IN_COS):
2342 builtin_optab = sincos_optab; break;
2343 default:
2344 gcc_unreachable ();
2345 }
2346
2347 /* Make a suitable register to place result in. */
2348 mode = TYPE_MODE (TREE_TYPE (exp));
2349
2350 /* Check if sincos insn is available, otherwise fallback
2351 to sin or cos insn. */
2352 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2353 switch (DECL_FUNCTION_CODE (fndecl))
2354 {
2355 CASE_FLT_FN (BUILT_IN_SIN):
2356 builtin_optab = sin_optab; break;
2357 CASE_FLT_FN (BUILT_IN_COS):
2358 builtin_optab = cos_optab; break;
2359 default:
2360 gcc_unreachable ();
2361 }
2362
2363 /* Before working hard, check whether the instruction is available. */
2364 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2365 {
2366 rtx result = gen_reg_rtx (mode);
2367
2368 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2369 need to expand the argument again. This way, we will not perform
2370 side-effects more the once. */
2371 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2372
2373 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2374
2375 start_sequence ();
2376
2377 /* Compute into RESULT.
2378 Set RESULT to wherever the result comes back. */
2379 if (builtin_optab == sincos_optab)
2380 {
2381 int ok;
2382
2383 switch (DECL_FUNCTION_CODE (fndecl))
2384 {
2385 CASE_FLT_FN (BUILT_IN_SIN):
2386 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2387 break;
2388 CASE_FLT_FN (BUILT_IN_COS):
2389 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2390 break;
2391 default:
2392 gcc_unreachable ();
2393 }
2394 gcc_assert (ok);
2395 }
2396 else
2397 result = expand_unop (mode, builtin_optab, op0, result, 0);
2398
2399 if (result != 0)
2400 {
2401 /* Output the entire sequence. */
2402 insns = get_insns ();
2403 end_sequence ();
2404 emit_insn (insns);
2405 return result;
2406 }
2407
2408 /* If we were unable to expand via the builtin, stop the sequence
2409 (without outputting the insns) and call to the library function
2410 with the stabilized argument list. */
2411 end_sequence ();
2412 }
2413
2414 return expand_call (exp, target, target == const0_rtx);
2415 }
2416
2417 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2418 return an RTL instruction code that implements the functionality.
2419 If that isn't possible or available return CODE_FOR_nothing. */
2420
2421 static enum insn_code
2422 interclass_mathfn_icode (tree arg, tree fndecl)
2423 {
2424 bool errno_set = false;
2425 optab builtin_optab = unknown_optab;
2426 machine_mode mode;
2427
2428 switch (DECL_FUNCTION_CODE (fndecl))
2429 {
2430 CASE_FLT_FN (BUILT_IN_ILOGB):
2431 errno_set = true; builtin_optab = ilogb_optab; break;
2432 CASE_FLT_FN (BUILT_IN_ISINF):
2433 builtin_optab = isinf_optab; break;
2434 case BUILT_IN_ISNORMAL:
2435 case BUILT_IN_ISFINITE:
2436 CASE_FLT_FN (BUILT_IN_FINITE):
2437 case BUILT_IN_FINITED32:
2438 case BUILT_IN_FINITED64:
2439 case BUILT_IN_FINITED128:
2440 case BUILT_IN_ISINFD32:
2441 case BUILT_IN_ISINFD64:
2442 case BUILT_IN_ISINFD128:
2443 /* These builtins have no optabs (yet). */
2444 break;
2445 default:
2446 gcc_unreachable ();
2447 }
2448
2449 /* There's no easy way to detect the case we need to set EDOM. */
2450 if (flag_errno_math && errno_set)
2451 return CODE_FOR_nothing;
2452
2453 /* Optab mode depends on the mode of the input argument. */
2454 mode = TYPE_MODE (TREE_TYPE (arg));
2455
2456 if (builtin_optab)
2457 return optab_handler (builtin_optab, mode);
2458 return CODE_FOR_nothing;
2459 }
2460
2461 /* Expand a call to one of the builtin math functions that operate on
2462 floating point argument and output an integer result (ilogb, isinf,
2463 isnan, etc).
2464 Return 0 if a normal call should be emitted rather than expanding the
2465 function in-line. EXP is the expression that is a call to the builtin
2466 function; if convenient, the result should be placed in TARGET. */
2467
2468 static rtx
2469 expand_builtin_interclass_mathfn (tree exp, rtx target)
2470 {
2471 enum insn_code icode = CODE_FOR_nothing;
2472 rtx op0;
2473 tree fndecl = get_callee_fndecl (exp);
2474 machine_mode mode;
2475 tree arg;
2476
2477 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2478 return NULL_RTX;
2479
2480 arg = CALL_EXPR_ARG (exp, 0);
2481 icode = interclass_mathfn_icode (arg, fndecl);
2482 mode = TYPE_MODE (TREE_TYPE (arg));
2483
2484 if (icode != CODE_FOR_nothing)
2485 {
2486 struct expand_operand ops[1];
2487 rtx_insn *last = get_last_insn ();
2488 tree orig_arg = arg;
2489
2490 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2491 need to expand the argument again. This way, we will not perform
2492 side-effects more the once. */
2493 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2494
2495 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2496
2497 if (mode != GET_MODE (op0))
2498 op0 = convert_to_mode (mode, op0, 0);
2499
2500 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2501 if (maybe_legitimize_operands (icode, 0, 1, ops)
2502 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2503 return ops[0].value;
2504
2505 delete_insns_since (last);
2506 CALL_EXPR_ARG (exp, 0) = orig_arg;
2507 }
2508
2509 return NULL_RTX;
2510 }
2511
2512 /* Expand a call to the builtin sincos math function.
2513 Return NULL_RTX if a normal call should be emitted rather than expanding the
2514 function in-line. EXP is the expression that is a call to the builtin
2515 function. */
2516
2517 static rtx
2518 expand_builtin_sincos (tree exp)
2519 {
2520 rtx op0, op1, op2, target1, target2;
2521 machine_mode mode;
2522 tree arg, sinp, cosp;
2523 int result;
2524 location_t loc = EXPR_LOCATION (exp);
2525 tree alias_type, alias_off;
2526
2527 if (!validate_arglist (exp, REAL_TYPE,
2528 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2529 return NULL_RTX;
2530
2531 arg = CALL_EXPR_ARG (exp, 0);
2532 sinp = CALL_EXPR_ARG (exp, 1);
2533 cosp = CALL_EXPR_ARG (exp, 2);
2534
2535 /* Make a suitable register to place result in. */
2536 mode = TYPE_MODE (TREE_TYPE (arg));
2537
2538 /* Check if sincos insn is available, otherwise emit the call. */
2539 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2540 return NULL_RTX;
2541
2542 target1 = gen_reg_rtx (mode);
2543 target2 = gen_reg_rtx (mode);
2544
2545 op0 = expand_normal (arg);
2546 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2547 alias_off = build_int_cst (alias_type, 0);
2548 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2549 sinp, alias_off));
2550 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2551 cosp, alias_off));
2552
2553 /* Compute into target1 and target2.
2554 Set TARGET to wherever the result comes back. */
2555 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2556 gcc_assert (result);
2557
2558 /* Move target1 and target2 to the memory locations indicated
2559 by op1 and op2. */
2560 emit_move_insn (op1, target1);
2561 emit_move_insn (op2, target2);
2562
2563 return const0_rtx;
2564 }
2565
2566 /* Expand a call to the internal cexpi builtin to the sincos math function.
2567 EXP is the expression that is a call to the builtin function; if convenient,
2568 the result should be placed in TARGET. */
2569
2570 static rtx
2571 expand_builtin_cexpi (tree exp, rtx target)
2572 {
2573 tree fndecl = get_callee_fndecl (exp);
2574 tree arg, type;
2575 machine_mode mode;
2576 rtx op0, op1, op2;
2577 location_t loc = EXPR_LOCATION (exp);
2578
2579 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2580 return NULL_RTX;
2581
2582 arg = CALL_EXPR_ARG (exp, 0);
2583 type = TREE_TYPE (arg);
2584 mode = TYPE_MODE (TREE_TYPE (arg));
2585
2586 /* Try expanding via a sincos optab, fall back to emitting a libcall
2587 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2588 is only generated from sincos, cexp or if we have either of them. */
2589 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2590 {
2591 op1 = gen_reg_rtx (mode);
2592 op2 = gen_reg_rtx (mode);
2593
2594 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2595
2596 /* Compute into op1 and op2. */
2597 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2598 }
2599 else if (targetm.libc_has_function (function_sincos))
2600 {
2601 tree call, fn = NULL_TREE;
2602 tree top1, top2;
2603 rtx op1a, op2a;
2604
2605 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2606 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2607 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2608 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2609 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2610 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2611 else
2612 gcc_unreachable ();
2613
2614 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2615 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2616 op1a = copy_addr_to_reg (XEXP (op1, 0));
2617 op2a = copy_addr_to_reg (XEXP (op2, 0));
2618 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2619 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2620
2621 /* Make sure not to fold the sincos call again. */
2622 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2623 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2624 call, 3, arg, top1, top2));
2625 }
2626 else
2627 {
2628 tree call, fn = NULL_TREE, narg;
2629 tree ctype = build_complex_type (type);
2630
2631 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2632 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2633 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2634 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2635 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2636 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2637 else
2638 gcc_unreachable ();
2639
2640 /* If we don't have a decl for cexp create one. This is the
2641 friendliest fallback if the user calls __builtin_cexpi
2642 without full target C99 function support. */
2643 if (fn == NULL_TREE)
2644 {
2645 tree fntype;
2646 const char *name = NULL;
2647
2648 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2649 name = "cexpf";
2650 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2651 name = "cexp";
2652 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2653 name = "cexpl";
2654
2655 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2656 fn = build_fn_decl (name, fntype);
2657 }
2658
2659 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2660 build_real (type, dconst0), arg);
2661
2662 /* Make sure not to fold the cexp call again. */
2663 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2664 return expand_expr (build_call_nary (ctype, call, 1, narg),
2665 target, VOIDmode, EXPAND_NORMAL);
2666 }
2667
2668 /* Now build the proper return type. */
2669 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2670 make_tree (TREE_TYPE (arg), op2),
2671 make_tree (TREE_TYPE (arg), op1)),
2672 target, VOIDmode, EXPAND_NORMAL);
2673 }
2674
2675 /* Conveniently construct a function call expression. FNDECL names the
2676 function to be called, N is the number of arguments, and the "..."
2677 parameters are the argument expressions. Unlike build_call_exr
2678 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2679
2680 static tree
2681 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2682 {
2683 va_list ap;
2684 tree fntype = TREE_TYPE (fndecl);
2685 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2686
2687 va_start (ap, n);
2688 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2689 va_end (ap);
2690 SET_EXPR_LOCATION (fn, loc);
2691 return fn;
2692 }
2693
2694 /* Expand a call to one of the builtin rounding functions gcc defines
2695 as an extension (lfloor and lceil). As these are gcc extensions we
2696 do not need to worry about setting errno to EDOM.
2697 If expanding via optab fails, lower expression to (int)(floor(x)).
2698 EXP is the expression that is a call to the builtin function;
2699 if convenient, the result should be placed in TARGET. */
2700
2701 static rtx
2702 expand_builtin_int_roundingfn (tree exp, rtx target)
2703 {
2704 convert_optab builtin_optab;
2705 rtx op0, tmp;
2706 rtx_insn *insns;
2707 tree fndecl = get_callee_fndecl (exp);
2708 enum built_in_function fallback_fn;
2709 tree fallback_fndecl;
2710 machine_mode mode;
2711 tree arg;
2712
2713 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2714 gcc_unreachable ();
2715
2716 arg = CALL_EXPR_ARG (exp, 0);
2717
2718 switch (DECL_FUNCTION_CODE (fndecl))
2719 {
2720 CASE_FLT_FN (BUILT_IN_ICEIL):
2721 CASE_FLT_FN (BUILT_IN_LCEIL):
2722 CASE_FLT_FN (BUILT_IN_LLCEIL):
2723 builtin_optab = lceil_optab;
2724 fallback_fn = BUILT_IN_CEIL;
2725 break;
2726
2727 CASE_FLT_FN (BUILT_IN_IFLOOR):
2728 CASE_FLT_FN (BUILT_IN_LFLOOR):
2729 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2730 builtin_optab = lfloor_optab;
2731 fallback_fn = BUILT_IN_FLOOR;
2732 break;
2733
2734 default:
2735 gcc_unreachable ();
2736 }
2737
2738 /* Make a suitable register to place result in. */
2739 mode = TYPE_MODE (TREE_TYPE (exp));
2740
2741 target = gen_reg_rtx (mode);
2742
2743 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2744 need to expand the argument again. This way, we will not perform
2745 side-effects more the once. */
2746 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2747
2748 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2749
2750 start_sequence ();
2751
2752 /* Compute into TARGET. */
2753 if (expand_sfix_optab (target, op0, builtin_optab))
2754 {
2755 /* Output the entire sequence. */
2756 insns = get_insns ();
2757 end_sequence ();
2758 emit_insn (insns);
2759 return target;
2760 }
2761
2762 /* If we were unable to expand via the builtin, stop the sequence
2763 (without outputting the insns). */
2764 end_sequence ();
2765
2766 /* Fall back to floating point rounding optab. */
2767 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2768
2769 /* For non-C99 targets we may end up without a fallback fndecl here
2770 if the user called __builtin_lfloor directly. In this case emit
2771 a call to the floor/ceil variants nevertheless. This should result
2772 in the best user experience for not full C99 targets. */
2773 if (fallback_fndecl == NULL_TREE)
2774 {
2775 tree fntype;
2776 const char *name = NULL;
2777
2778 switch (DECL_FUNCTION_CODE (fndecl))
2779 {
2780 case BUILT_IN_ICEIL:
2781 case BUILT_IN_LCEIL:
2782 case BUILT_IN_LLCEIL:
2783 name = "ceil";
2784 break;
2785 case BUILT_IN_ICEILF:
2786 case BUILT_IN_LCEILF:
2787 case BUILT_IN_LLCEILF:
2788 name = "ceilf";
2789 break;
2790 case BUILT_IN_ICEILL:
2791 case BUILT_IN_LCEILL:
2792 case BUILT_IN_LLCEILL:
2793 name = "ceill";
2794 break;
2795 case BUILT_IN_IFLOOR:
2796 case BUILT_IN_LFLOOR:
2797 case BUILT_IN_LLFLOOR:
2798 name = "floor";
2799 break;
2800 case BUILT_IN_IFLOORF:
2801 case BUILT_IN_LFLOORF:
2802 case BUILT_IN_LLFLOORF:
2803 name = "floorf";
2804 break;
2805 case BUILT_IN_IFLOORL:
2806 case BUILT_IN_LFLOORL:
2807 case BUILT_IN_LLFLOORL:
2808 name = "floorl";
2809 break;
2810 default:
2811 gcc_unreachable ();
2812 }
2813
2814 fntype = build_function_type_list (TREE_TYPE (arg),
2815 TREE_TYPE (arg), NULL_TREE);
2816 fallback_fndecl = build_fn_decl (name, fntype);
2817 }
2818
2819 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2820
2821 tmp = expand_normal (exp);
2822 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2823
2824 /* Truncate the result of floating point optab to integer
2825 via expand_fix (). */
2826 target = gen_reg_rtx (mode);
2827 expand_fix (target, tmp, 0);
2828
2829 return target;
2830 }
2831
2832 /* Expand a call to one of the builtin math functions doing integer
2833 conversion (lrint).
2834 Return 0 if a normal call should be emitted rather than expanding the
2835 function in-line. EXP is the expression that is a call to the builtin
2836 function; if convenient, the result should be placed in TARGET. */
2837
2838 static rtx
2839 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2840 {
2841 convert_optab builtin_optab;
2842 rtx op0;
2843 rtx_insn *insns;
2844 tree fndecl = get_callee_fndecl (exp);
2845 tree arg;
2846 machine_mode mode;
2847 enum built_in_function fallback_fn = BUILT_IN_NONE;
2848
2849 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2850 gcc_unreachable ();
2851
2852 arg = CALL_EXPR_ARG (exp, 0);
2853
2854 switch (DECL_FUNCTION_CODE (fndecl))
2855 {
2856 CASE_FLT_FN (BUILT_IN_IRINT):
2857 fallback_fn = BUILT_IN_LRINT;
2858 /* FALLTHRU */
2859 CASE_FLT_FN (BUILT_IN_LRINT):
2860 CASE_FLT_FN (BUILT_IN_LLRINT):
2861 builtin_optab = lrint_optab;
2862 break;
2863
2864 CASE_FLT_FN (BUILT_IN_IROUND):
2865 fallback_fn = BUILT_IN_LROUND;
2866 /* FALLTHRU */
2867 CASE_FLT_FN (BUILT_IN_LROUND):
2868 CASE_FLT_FN (BUILT_IN_LLROUND):
2869 builtin_optab = lround_optab;
2870 break;
2871
2872 default:
2873 gcc_unreachable ();
2874 }
2875
2876 /* There's no easy way to detect the case we need to set EDOM. */
2877 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2878 return NULL_RTX;
2879
2880 /* Make a suitable register to place result in. */
2881 mode = TYPE_MODE (TREE_TYPE (exp));
2882
2883 /* There's no easy way to detect the case we need to set EDOM. */
2884 if (!flag_errno_math)
2885 {
2886 rtx result = gen_reg_rtx (mode);
2887
2888 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2889 need to expand the argument again. This way, we will not perform
2890 side-effects more the once. */
2891 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2892
2893 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2894
2895 start_sequence ();
2896
2897 if (expand_sfix_optab (result, op0, builtin_optab))
2898 {
2899 /* Output the entire sequence. */
2900 insns = get_insns ();
2901 end_sequence ();
2902 emit_insn (insns);
2903 return result;
2904 }
2905
2906 /* If we were unable to expand via the builtin, stop the sequence
2907 (without outputting the insns) and call to the library function
2908 with the stabilized argument list. */
2909 end_sequence ();
2910 }
2911
2912 if (fallback_fn != BUILT_IN_NONE)
2913 {
2914 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2915 targets, (int) round (x) should never be transformed into
2916 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2917 a call to lround in the hope that the target provides at least some
2918 C99 functions. This should result in the best user experience for
2919 not full C99 targets. */
2920 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2921 fallback_fn, 0);
2922
2923 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2924 fallback_fndecl, 1, arg);
2925
2926 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2927 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2928 return convert_to_mode (mode, target, 0);
2929 }
2930
2931 return expand_call (exp, target, target == const0_rtx);
2932 }
2933
2934 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2935 a normal call should be emitted rather than expanding the function
2936 in-line. EXP is the expression that is a call to the builtin
2937 function; if convenient, the result should be placed in TARGET. */
2938
2939 static rtx
2940 expand_builtin_powi (tree exp, rtx target)
2941 {
2942 tree arg0, arg1;
2943 rtx op0, op1;
2944 machine_mode mode;
2945 machine_mode mode2;
2946
2947 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2948 return NULL_RTX;
2949
2950 arg0 = CALL_EXPR_ARG (exp, 0);
2951 arg1 = CALL_EXPR_ARG (exp, 1);
2952 mode = TYPE_MODE (TREE_TYPE (exp));
2953
2954 /* Emit a libcall to libgcc. */
2955
2956 /* Mode of the 2nd argument must match that of an int. */
2957 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2958
2959 if (target == NULL_RTX)
2960 target = gen_reg_rtx (mode);
2961
2962 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2963 if (GET_MODE (op0) != mode)
2964 op0 = convert_to_mode (mode, op0, 0);
2965 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2966 if (GET_MODE (op1) != mode2)
2967 op1 = convert_to_mode (mode2, op1, 0);
2968
2969 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2970 target, LCT_CONST, mode, 2,
2971 op0, mode, op1, mode2);
2972
2973 return target;
2974 }
2975
2976 /* Expand expression EXP which is a call to the strlen builtin. Return
2977 NULL_RTX if we failed the caller should emit a normal call, otherwise
2978 try to get the result in TARGET, if convenient. */
2979
2980 static rtx
2981 expand_builtin_strlen (tree exp, rtx target,
2982 machine_mode target_mode)
2983 {
2984 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2985 return NULL_RTX;
2986 else
2987 {
2988 struct expand_operand ops[4];
2989 rtx pat;
2990 tree len;
2991 tree src = CALL_EXPR_ARG (exp, 0);
2992 rtx src_reg;
2993 rtx_insn *before_strlen;
2994 machine_mode insn_mode = target_mode;
2995 enum insn_code icode = CODE_FOR_nothing;
2996 unsigned int align;
2997
2998 /* If the length can be computed at compile-time, return it. */
2999 len = c_strlen (src, 0);
3000 if (len)
3001 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3002
3003 /* If the length can be computed at compile-time and is constant
3004 integer, but there are side-effects in src, evaluate
3005 src for side-effects, then return len.
3006 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3007 can be optimized into: i++; x = 3; */
3008 len = c_strlen (src, 1);
3009 if (len && TREE_CODE (len) == INTEGER_CST)
3010 {
3011 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3012 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3013 }
3014
3015 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3016
3017 /* If SRC is not a pointer type, don't do this operation inline. */
3018 if (align == 0)
3019 return NULL_RTX;
3020
3021 /* Bail out if we can't compute strlen in the right mode. */
3022 while (insn_mode != VOIDmode)
3023 {
3024 icode = optab_handler (strlen_optab, insn_mode);
3025 if (icode != CODE_FOR_nothing)
3026 break;
3027
3028 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3029 }
3030 if (insn_mode == VOIDmode)
3031 return NULL_RTX;
3032
3033 /* Make a place to hold the source address. We will not expand
3034 the actual source until we are sure that the expansion will
3035 not fail -- there are trees that cannot be expanded twice. */
3036 src_reg = gen_reg_rtx (Pmode);
3037
3038 /* Mark the beginning of the strlen sequence so we can emit the
3039 source operand later. */
3040 before_strlen = get_last_insn ();
3041
3042 create_output_operand (&ops[0], target, insn_mode);
3043 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3044 create_integer_operand (&ops[2], 0);
3045 create_integer_operand (&ops[3], align);
3046 if (!maybe_expand_insn (icode, 4, ops))
3047 return NULL_RTX;
3048
3049 /* Now that we are assured of success, expand the source. */
3050 start_sequence ();
3051 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3052 if (pat != src_reg)
3053 {
3054 #ifdef POINTERS_EXTEND_UNSIGNED
3055 if (GET_MODE (pat) != Pmode)
3056 pat = convert_to_mode (Pmode, pat,
3057 POINTERS_EXTEND_UNSIGNED);
3058 #endif
3059 emit_move_insn (src_reg, pat);
3060 }
3061 pat = get_insns ();
3062 end_sequence ();
3063
3064 if (before_strlen)
3065 emit_insn_after (pat, before_strlen);
3066 else
3067 emit_insn_before (pat, get_insns ());
3068
3069 /* Return the value in the proper mode for this function. */
3070 if (GET_MODE (ops[0].value) == target_mode)
3071 target = ops[0].value;
3072 else if (target != 0)
3073 convert_move (target, ops[0].value, 0);
3074 else
3075 target = convert_to_mode (target_mode, ops[0].value, 0);
3076
3077 return target;
3078 }
3079 }
3080
3081 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3082 bytes from constant string DATA + OFFSET and return it as target
3083 constant. */
3084
3085 static rtx
3086 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3087 machine_mode mode)
3088 {
3089 const char *str = (const char *) data;
3090
3091 gcc_assert (offset >= 0
3092 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3093 <= strlen (str) + 1));
3094
3095 return c_readstr (str + offset, mode);
3096 }
3097
3098 /* LEN specify length of the block of memcpy/memset operation.
3099 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3100 In some cases we can make very likely guess on max size, then we
3101 set it into PROBABLE_MAX_SIZE. */
3102
3103 static void
3104 determine_block_size (tree len, rtx len_rtx,
3105 unsigned HOST_WIDE_INT *min_size,
3106 unsigned HOST_WIDE_INT *max_size,
3107 unsigned HOST_WIDE_INT *probable_max_size)
3108 {
3109 if (CONST_INT_P (len_rtx))
3110 {
3111 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3112 return;
3113 }
3114 else
3115 {
3116 wide_int min, max;
3117 enum value_range_type range_type = VR_UNDEFINED;
3118
3119 /* Determine bounds from the type. */
3120 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3121 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3122 else
3123 *min_size = 0;
3124 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3125 *probable_max_size = *max_size
3126 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3127 else
3128 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3129
3130 if (TREE_CODE (len) == SSA_NAME)
3131 range_type = get_range_info (len, &min, &max);
3132 if (range_type == VR_RANGE)
3133 {
3134 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3135 *min_size = min.to_uhwi ();
3136 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3137 *probable_max_size = *max_size = max.to_uhwi ();
3138 }
3139 else if (range_type == VR_ANTI_RANGE)
3140 {
3141 /* Anti range 0...N lets us to determine minimal size to N+1. */
3142 if (min == 0)
3143 {
3144 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3145 *min_size = max.to_uhwi () + 1;
3146 }
3147 /* Code like
3148
3149 int n;
3150 if (n < 100)
3151 memcpy (a, b, n)
3152
3153 Produce anti range allowing negative values of N. We still
3154 can use the information and make a guess that N is not negative.
3155 */
3156 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3157 *probable_max_size = min.to_uhwi () - 1;
3158 }
3159 }
3160 gcc_checking_assert (*max_size <=
3161 (unsigned HOST_WIDE_INT)
3162 GET_MODE_MASK (GET_MODE (len_rtx)));
3163 }
3164
3165 /* Helper function to do the actual work for expand_builtin_memcpy. */
3166
3167 static rtx
3168 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3169 {
3170 const char *src_str;
3171 unsigned int src_align = get_pointer_alignment (src);
3172 unsigned int dest_align = get_pointer_alignment (dest);
3173 rtx dest_mem, src_mem, dest_addr, len_rtx;
3174 HOST_WIDE_INT expected_size = -1;
3175 unsigned int expected_align = 0;
3176 unsigned HOST_WIDE_INT min_size;
3177 unsigned HOST_WIDE_INT max_size;
3178 unsigned HOST_WIDE_INT probable_max_size;
3179
3180 /* If DEST is not a pointer type, call the normal function. */
3181 if (dest_align == 0)
3182 return NULL_RTX;
3183
3184 /* If either SRC is not a pointer type, don't do this
3185 operation in-line. */
3186 if (src_align == 0)
3187 return NULL_RTX;
3188
3189 if (currently_expanding_gimple_stmt)
3190 stringop_block_profile (currently_expanding_gimple_stmt,
3191 &expected_align, &expected_size);
3192
3193 if (expected_align < dest_align)
3194 expected_align = dest_align;
3195 dest_mem = get_memory_rtx (dest, len);
3196 set_mem_align (dest_mem, dest_align);
3197 len_rtx = expand_normal (len);
3198 determine_block_size (len, len_rtx, &min_size, &max_size,
3199 &probable_max_size);
3200 src_str = c_getstr (src);
3201
3202 /* If SRC is a string constant and block move would be done
3203 by pieces, we can avoid loading the string from memory
3204 and only stored the computed constants. */
3205 if (src_str
3206 && CONST_INT_P (len_rtx)
3207 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3208 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3209 CONST_CAST (char *, src_str),
3210 dest_align, false))
3211 {
3212 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3213 builtin_memcpy_read_str,
3214 CONST_CAST (char *, src_str),
3215 dest_align, false, 0);
3216 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3217 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3218 return dest_mem;
3219 }
3220
3221 src_mem = get_memory_rtx (src, len);
3222 set_mem_align (src_mem, src_align);
3223
3224 /* Copy word part most expediently. */
3225 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3226 CALL_EXPR_TAILCALL (exp)
3227 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3228 expected_align, expected_size,
3229 min_size, max_size, probable_max_size);
3230
3231 if (dest_addr == 0)
3232 {
3233 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3234 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3235 }
3236
3237 return dest_addr;
3238 }
3239
3240 /* Expand a call EXP to the memcpy builtin.
3241 Return NULL_RTX if we failed, the caller should emit a normal call,
3242 otherwise try to get the result in TARGET, if convenient (and in
3243 mode MODE if that's convenient). */
3244
3245 static rtx
3246 expand_builtin_memcpy (tree exp, rtx target)
3247 {
3248 if (!validate_arglist (exp,
3249 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3250 return NULL_RTX;
3251 else
3252 {
3253 tree dest = CALL_EXPR_ARG (exp, 0);
3254 tree src = CALL_EXPR_ARG (exp, 1);
3255 tree len = CALL_EXPR_ARG (exp, 2);
3256 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3257 }
3258 }
3259
3260 /* Expand an instrumented call EXP to the memcpy builtin.
3261 Return NULL_RTX if we failed, the caller should emit a normal call,
3262 otherwise try to get the result in TARGET, if convenient (and in
3263 mode MODE if that's convenient). */
3264
3265 static rtx
3266 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3267 {
3268 if (!validate_arglist (exp,
3269 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3270 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3271 INTEGER_TYPE, VOID_TYPE))
3272 return NULL_RTX;
3273 else
3274 {
3275 tree dest = CALL_EXPR_ARG (exp, 0);
3276 tree src = CALL_EXPR_ARG (exp, 2);
3277 tree len = CALL_EXPR_ARG (exp, 4);
3278 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3279
3280 /* Return src bounds with the result. */
3281 if (res)
3282 {
3283 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3284 expand_normal (CALL_EXPR_ARG (exp, 1)));
3285 res = chkp_join_splitted_slot (res, bnd);
3286 }
3287 return res;
3288 }
3289 }
3290
3291 /* Expand a call EXP to the mempcpy builtin.
3292 Return NULL_RTX if we failed; the caller should emit a normal call,
3293 otherwise try to get the result in TARGET, if convenient (and in
3294 mode MODE if that's convenient). If ENDP is 0 return the
3295 destination pointer, if ENDP is 1 return the end pointer ala
3296 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3297 stpcpy. */
3298
3299 static rtx
3300 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3301 {
3302 if (!validate_arglist (exp,
3303 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3304 return NULL_RTX;
3305 else
3306 {
3307 tree dest = CALL_EXPR_ARG (exp, 0);
3308 tree src = CALL_EXPR_ARG (exp, 1);
3309 tree len = CALL_EXPR_ARG (exp, 2);
3310 return expand_builtin_mempcpy_args (dest, src, len,
3311 target, mode, /*endp=*/ 1,
3312 exp);
3313 }
3314 }
3315
3316 /* Expand an instrumented call EXP to the mempcpy builtin.
3317 Return NULL_RTX if we failed, the caller should emit a normal call,
3318 otherwise try to get the result in TARGET, if convenient (and in
3319 mode MODE if that's convenient). */
3320
3321 static rtx
3322 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3323 {
3324 if (!validate_arglist (exp,
3325 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3326 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3327 INTEGER_TYPE, VOID_TYPE))
3328 return NULL_RTX;
3329 else
3330 {
3331 tree dest = CALL_EXPR_ARG (exp, 0);
3332 tree src = CALL_EXPR_ARG (exp, 2);
3333 tree len = CALL_EXPR_ARG (exp, 4);
3334 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3335 mode, 1, exp);
3336
3337 /* Return src bounds with the result. */
3338 if (res)
3339 {
3340 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3341 expand_normal (CALL_EXPR_ARG (exp, 1)));
3342 res = chkp_join_splitted_slot (res, bnd);
3343 }
3344 return res;
3345 }
3346 }
3347
3348 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3349 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3350 so that this can also be called without constructing an actual CALL_EXPR.
3351 The other arguments and return value are the same as for
3352 expand_builtin_mempcpy. */
3353
3354 static rtx
3355 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3356 rtx target, machine_mode mode, int endp,
3357 tree orig_exp)
3358 {
3359 tree fndecl = get_callee_fndecl (orig_exp);
3360
3361 /* If return value is ignored, transform mempcpy into memcpy. */
3362 if (target == const0_rtx
3363 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3364 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3365 {
3366 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3367 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3368 dest, src, len);
3369 return expand_expr (result, target, mode, EXPAND_NORMAL);
3370 }
3371 else if (target == const0_rtx
3372 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3373 {
3374 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3375 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3376 dest, src, len);
3377 return expand_expr (result, target, mode, EXPAND_NORMAL);
3378 }
3379 else
3380 {
3381 const char *src_str;
3382 unsigned int src_align = get_pointer_alignment (src);
3383 unsigned int dest_align = get_pointer_alignment (dest);
3384 rtx dest_mem, src_mem, len_rtx;
3385
3386 /* If either SRC or DEST is not a pointer type, don't do this
3387 operation in-line. */
3388 if (dest_align == 0 || src_align == 0)
3389 return NULL_RTX;
3390
3391 /* If LEN is not constant, call the normal function. */
3392 if (! tree_fits_uhwi_p (len))
3393 return NULL_RTX;
3394
3395 len_rtx = expand_normal (len);
3396 src_str = c_getstr (src);
3397
3398 /* If SRC is a string constant and block move would be done
3399 by pieces, we can avoid loading the string from memory
3400 and only stored the computed constants. */
3401 if (src_str
3402 && CONST_INT_P (len_rtx)
3403 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3404 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3405 CONST_CAST (char *, src_str),
3406 dest_align, false))
3407 {
3408 dest_mem = get_memory_rtx (dest, len);
3409 set_mem_align (dest_mem, dest_align);
3410 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3411 builtin_memcpy_read_str,
3412 CONST_CAST (char *, src_str),
3413 dest_align, false, endp);
3414 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3415 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3416 return dest_mem;
3417 }
3418
3419 if (CONST_INT_P (len_rtx)
3420 && can_move_by_pieces (INTVAL (len_rtx),
3421 MIN (dest_align, src_align)))
3422 {
3423 dest_mem = get_memory_rtx (dest, len);
3424 set_mem_align (dest_mem, dest_align);
3425 src_mem = get_memory_rtx (src, len);
3426 set_mem_align (src_mem, src_align);
3427 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3428 MIN (dest_align, src_align), endp);
3429 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3430 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3431 return dest_mem;
3432 }
3433
3434 return NULL_RTX;
3435 }
3436 }
3437
3438 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3439 we failed, the caller should emit a normal call, otherwise try to
3440 get the result in TARGET, if convenient. If ENDP is 0 return the
3441 destination pointer, if ENDP is 1 return the end pointer ala
3442 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3443 stpcpy. */
3444
3445 static rtx
3446 expand_movstr (tree dest, tree src, rtx target, int endp)
3447 {
3448 struct expand_operand ops[3];
3449 rtx dest_mem;
3450 rtx src_mem;
3451
3452 if (!targetm.have_movstr ())
3453 return NULL_RTX;
3454
3455 dest_mem = get_memory_rtx (dest, NULL);
3456 src_mem = get_memory_rtx (src, NULL);
3457 if (!endp)
3458 {
3459 target = force_reg (Pmode, XEXP (dest_mem, 0));
3460 dest_mem = replace_equiv_address (dest_mem, target);
3461 }
3462
3463 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3464 create_fixed_operand (&ops[1], dest_mem);
3465 create_fixed_operand (&ops[2], src_mem);
3466 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3467 return NULL_RTX;
3468
3469 if (endp && target != const0_rtx)
3470 {
3471 target = ops[0].value;
3472 /* movstr is supposed to set end to the address of the NUL
3473 terminator. If the caller requested a mempcpy-like return value,
3474 adjust it. */
3475 if (endp == 1)
3476 {
3477 rtx tem = plus_constant (GET_MODE (target),
3478 gen_lowpart (GET_MODE (target), target), 1);
3479 emit_move_insn (target, force_operand (tem, NULL_RTX));
3480 }
3481 }
3482 return target;
3483 }
3484
3485 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3486 NULL_RTX if we failed the caller should emit a normal call, otherwise
3487 try to get the result in TARGET, if convenient (and in mode MODE if that's
3488 convenient). */
3489
3490 static rtx
3491 expand_builtin_strcpy (tree exp, rtx target)
3492 {
3493 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3494 {
3495 tree dest = CALL_EXPR_ARG (exp, 0);
3496 tree src = CALL_EXPR_ARG (exp, 1);
3497 return expand_builtin_strcpy_args (dest, src, target);
3498 }
3499 return NULL_RTX;
3500 }
3501
3502 /* Helper function to do the actual work for expand_builtin_strcpy. The
3503 arguments to the builtin_strcpy call DEST and SRC are broken out
3504 so that this can also be called without constructing an actual CALL_EXPR.
3505 The other arguments and return value are the same as for
3506 expand_builtin_strcpy. */
3507
3508 static rtx
3509 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3510 {
3511 return expand_movstr (dest, src, target, /*endp=*/0);
3512 }
3513
3514 /* Expand a call EXP to the stpcpy builtin.
3515 Return NULL_RTX if we failed the caller should emit a normal call,
3516 otherwise try to get the result in TARGET, if convenient (and in
3517 mode MODE if that's convenient). */
3518
3519 static rtx
3520 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3521 {
3522 tree dst, src;
3523 location_t loc = EXPR_LOCATION (exp);
3524
3525 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3526 return NULL_RTX;
3527
3528 dst = CALL_EXPR_ARG (exp, 0);
3529 src = CALL_EXPR_ARG (exp, 1);
3530
3531 /* If return value is ignored, transform stpcpy into strcpy. */
3532 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3533 {
3534 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3535 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3536 return expand_expr (result, target, mode, EXPAND_NORMAL);
3537 }
3538 else
3539 {
3540 tree len, lenp1;
3541 rtx ret;
3542
3543 /* Ensure we get an actual string whose length can be evaluated at
3544 compile-time, not an expression containing a string. This is
3545 because the latter will potentially produce pessimized code
3546 when used to produce the return value. */
3547 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3548 return expand_movstr (dst, src, target, /*endp=*/2);
3549
3550 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3551 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3552 target, mode, /*endp=*/2,
3553 exp);
3554
3555 if (ret)
3556 return ret;
3557
3558 if (TREE_CODE (len) == INTEGER_CST)
3559 {
3560 rtx len_rtx = expand_normal (len);
3561
3562 if (CONST_INT_P (len_rtx))
3563 {
3564 ret = expand_builtin_strcpy_args (dst, src, target);
3565
3566 if (ret)
3567 {
3568 if (! target)
3569 {
3570 if (mode != VOIDmode)
3571 target = gen_reg_rtx (mode);
3572 else
3573 target = gen_reg_rtx (GET_MODE (ret));
3574 }
3575 if (GET_MODE (target) != GET_MODE (ret))
3576 ret = gen_lowpart (GET_MODE (target), ret);
3577
3578 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3579 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3580 gcc_assert (ret);
3581
3582 return target;
3583 }
3584 }
3585 }
3586
3587 return expand_movstr (dst, src, target, /*endp=*/2);
3588 }
3589 }
3590
3591 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3592 bytes from constant string DATA + OFFSET and return it as target
3593 constant. */
3594
3595 rtx
3596 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3597 machine_mode mode)
3598 {
3599 const char *str = (const char *) data;
3600
3601 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3602 return const0_rtx;
3603
3604 return c_readstr (str + offset, mode);
3605 }
3606
3607 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3608 NULL_RTX if we failed the caller should emit a normal call. */
3609
3610 static rtx
3611 expand_builtin_strncpy (tree exp, rtx target)
3612 {
3613 location_t loc = EXPR_LOCATION (exp);
3614
3615 if (validate_arglist (exp,
3616 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3617 {
3618 tree dest = CALL_EXPR_ARG (exp, 0);
3619 tree src = CALL_EXPR_ARG (exp, 1);
3620 tree len = CALL_EXPR_ARG (exp, 2);
3621 tree slen = c_strlen (src, 1);
3622
3623 /* We must be passed a constant len and src parameter. */
3624 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3625 return NULL_RTX;
3626
3627 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3628
3629 /* We're required to pad with trailing zeros if the requested
3630 len is greater than strlen(s2)+1. In that case try to
3631 use store_by_pieces, if it fails, punt. */
3632 if (tree_int_cst_lt (slen, len))
3633 {
3634 unsigned int dest_align = get_pointer_alignment (dest);
3635 const char *p = c_getstr (src);
3636 rtx dest_mem;
3637
3638 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3639 || !can_store_by_pieces (tree_to_uhwi (len),
3640 builtin_strncpy_read_str,
3641 CONST_CAST (char *, p),
3642 dest_align, false))
3643 return NULL_RTX;
3644
3645 dest_mem = get_memory_rtx (dest, len);
3646 store_by_pieces (dest_mem, tree_to_uhwi (len),
3647 builtin_strncpy_read_str,
3648 CONST_CAST (char *, p), dest_align, false, 0);
3649 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3650 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3651 return dest_mem;
3652 }
3653 }
3654 return NULL_RTX;
3655 }
3656
3657 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3658 bytes from constant string DATA + OFFSET and return it as target
3659 constant. */
3660
3661 rtx
3662 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3663 machine_mode mode)
3664 {
3665 const char *c = (const char *) data;
3666 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3667
3668 memset (p, *c, GET_MODE_SIZE (mode));
3669
3670 return c_readstr (p, mode);
3671 }
3672
3673 /* Callback routine for store_by_pieces. Return the RTL of a register
3674 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3675 char value given in the RTL register data. For example, if mode is
3676 4 bytes wide, return the RTL for 0x01010101*data. */
3677
3678 static rtx
3679 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3680 machine_mode mode)
3681 {
3682 rtx target, coeff;
3683 size_t size;
3684 char *p;
3685
3686 size = GET_MODE_SIZE (mode);
3687 if (size == 1)
3688 return (rtx) data;
3689
3690 p = XALLOCAVEC (char, size);
3691 memset (p, 1, size);
3692 coeff = c_readstr (p, mode);
3693
3694 target = convert_to_mode (mode, (rtx) data, 1);
3695 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3696 return force_reg (mode, target);
3697 }
3698
3699 /* Expand expression EXP, which is a call to the memset builtin. Return
3700 NULL_RTX if we failed the caller should emit a normal call, otherwise
3701 try to get the result in TARGET, if convenient (and in mode MODE if that's
3702 convenient). */
3703
3704 static rtx
3705 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3706 {
3707 if (!validate_arglist (exp,
3708 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3709 return NULL_RTX;
3710 else
3711 {
3712 tree dest = CALL_EXPR_ARG (exp, 0);
3713 tree val = CALL_EXPR_ARG (exp, 1);
3714 tree len = CALL_EXPR_ARG (exp, 2);
3715 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3716 }
3717 }
3718
3719 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3720 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3721 try to get the result in TARGET, if convenient (and in mode MODE if that's
3722 convenient). */
3723
3724 static rtx
3725 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3726 {
3727 if (!validate_arglist (exp,
3728 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3729 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3730 return NULL_RTX;
3731 else
3732 {
3733 tree dest = CALL_EXPR_ARG (exp, 0);
3734 tree val = CALL_EXPR_ARG (exp, 2);
3735 tree len = CALL_EXPR_ARG (exp, 3);
3736 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3737
3738 /* Return src bounds with the result. */
3739 if (res)
3740 {
3741 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3742 expand_normal (CALL_EXPR_ARG (exp, 1)));
3743 res = chkp_join_splitted_slot (res, bnd);
3744 }
3745 return res;
3746 }
3747 }
3748
3749 /* Helper function to do the actual work for expand_builtin_memset. The
3750 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3751 so that this can also be called without constructing an actual CALL_EXPR.
3752 The other arguments and return value are the same as for
3753 expand_builtin_memset. */
3754
3755 static rtx
3756 expand_builtin_memset_args (tree dest, tree val, tree len,
3757 rtx target, machine_mode mode, tree orig_exp)
3758 {
3759 tree fndecl, fn;
3760 enum built_in_function fcode;
3761 machine_mode val_mode;
3762 char c;
3763 unsigned int dest_align;
3764 rtx dest_mem, dest_addr, len_rtx;
3765 HOST_WIDE_INT expected_size = -1;
3766 unsigned int expected_align = 0;
3767 unsigned HOST_WIDE_INT min_size;
3768 unsigned HOST_WIDE_INT max_size;
3769 unsigned HOST_WIDE_INT probable_max_size;
3770
3771 dest_align = get_pointer_alignment (dest);
3772
3773 /* If DEST is not a pointer type, don't do this operation in-line. */
3774 if (dest_align == 0)
3775 return NULL_RTX;
3776
3777 if (currently_expanding_gimple_stmt)
3778 stringop_block_profile (currently_expanding_gimple_stmt,
3779 &expected_align, &expected_size);
3780
3781 if (expected_align < dest_align)
3782 expected_align = dest_align;
3783
3784 /* If the LEN parameter is zero, return DEST. */
3785 if (integer_zerop (len))
3786 {
3787 /* Evaluate and ignore VAL in case it has side-effects. */
3788 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3789 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3790 }
3791
3792 /* Stabilize the arguments in case we fail. */
3793 dest = builtin_save_expr (dest);
3794 val = builtin_save_expr (val);
3795 len = builtin_save_expr (len);
3796
3797 len_rtx = expand_normal (len);
3798 determine_block_size (len, len_rtx, &min_size, &max_size,
3799 &probable_max_size);
3800 dest_mem = get_memory_rtx (dest, len);
3801 val_mode = TYPE_MODE (unsigned_char_type_node);
3802
3803 if (TREE_CODE (val) != INTEGER_CST)
3804 {
3805 rtx val_rtx;
3806
3807 val_rtx = expand_normal (val);
3808 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3809
3810 /* Assume that we can memset by pieces if we can store
3811 * the coefficients by pieces (in the required modes).
3812 * We can't pass builtin_memset_gen_str as that emits RTL. */
3813 c = 1;
3814 if (tree_fits_uhwi_p (len)
3815 && can_store_by_pieces (tree_to_uhwi (len),
3816 builtin_memset_read_str, &c, dest_align,
3817 true))
3818 {
3819 val_rtx = force_reg (val_mode, val_rtx);
3820 store_by_pieces (dest_mem, tree_to_uhwi (len),
3821 builtin_memset_gen_str, val_rtx, dest_align,
3822 true, 0);
3823 }
3824 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3825 dest_align, expected_align,
3826 expected_size, min_size, max_size,
3827 probable_max_size))
3828 goto do_libcall;
3829
3830 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3831 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3832 return dest_mem;
3833 }
3834
3835 if (target_char_cast (val, &c))
3836 goto do_libcall;
3837
3838 if (c)
3839 {
3840 if (tree_fits_uhwi_p (len)
3841 && can_store_by_pieces (tree_to_uhwi (len),
3842 builtin_memset_read_str, &c, dest_align,
3843 true))
3844 store_by_pieces (dest_mem, tree_to_uhwi (len),
3845 builtin_memset_read_str, &c, dest_align, true, 0);
3846 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3847 gen_int_mode (c, val_mode),
3848 dest_align, expected_align,
3849 expected_size, min_size, max_size,
3850 probable_max_size))
3851 goto do_libcall;
3852
3853 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3854 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3855 return dest_mem;
3856 }
3857
3858 set_mem_align (dest_mem, dest_align);
3859 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3860 CALL_EXPR_TAILCALL (orig_exp)
3861 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3862 expected_align, expected_size,
3863 min_size, max_size,
3864 probable_max_size);
3865
3866 if (dest_addr == 0)
3867 {
3868 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3869 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3870 }
3871
3872 return dest_addr;
3873
3874 do_libcall:
3875 fndecl = get_callee_fndecl (orig_exp);
3876 fcode = DECL_FUNCTION_CODE (fndecl);
3877 if (fcode == BUILT_IN_MEMSET
3878 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3879 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3880 dest, val, len);
3881 else if (fcode == BUILT_IN_BZERO)
3882 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3883 dest, len);
3884 else
3885 gcc_unreachable ();
3886 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3887 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3888 return expand_call (fn, target, target == const0_rtx);
3889 }
3890
3891 /* Expand expression EXP, which is a call to the bzero builtin. Return
3892 NULL_RTX if we failed the caller should emit a normal call. */
3893
3894 static rtx
3895 expand_builtin_bzero (tree exp)
3896 {
3897 tree dest, size;
3898 location_t loc = EXPR_LOCATION (exp);
3899
3900 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3901 return NULL_RTX;
3902
3903 dest = CALL_EXPR_ARG (exp, 0);
3904 size = CALL_EXPR_ARG (exp, 1);
3905
3906 /* New argument list transforming bzero(ptr x, int y) to
3907 memset(ptr x, int 0, size_t y). This is done this way
3908 so that if it isn't expanded inline, we fallback to
3909 calling bzero instead of memset. */
3910
3911 return expand_builtin_memset_args (dest, integer_zero_node,
3912 fold_convert_loc (loc,
3913 size_type_node, size),
3914 const0_rtx, VOIDmode, exp);
3915 }
3916
3917 /* Try to expand cmpstr operation ICODE with the given operands.
3918 Return the result rtx on success, otherwise return null. */
3919
3920 static rtx
3921 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3922 HOST_WIDE_INT align)
3923 {
3924 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3925
3926 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3927 target = NULL_RTX;
3928
3929 struct expand_operand ops[4];
3930 create_output_operand (&ops[0], target, insn_mode);
3931 create_fixed_operand (&ops[1], arg1_rtx);
3932 create_fixed_operand (&ops[2], arg2_rtx);
3933 create_integer_operand (&ops[3], align);
3934 if (maybe_expand_insn (icode, 4, ops))
3935 return ops[0].value;
3936 return NULL_RTX;
3937 }
3938
3939 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3940 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3941 otherwise return null. */
3942
3943 static rtx
3944 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
3945 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
3946 HOST_WIDE_INT align)
3947 {
3948 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3949
3950 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3951 target = NULL_RTX;
3952
3953 struct expand_operand ops[5];
3954 create_output_operand (&ops[0], target, insn_mode);
3955 create_fixed_operand (&ops[1], arg1_rtx);
3956 create_fixed_operand (&ops[2], arg2_rtx);
3957 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
3958 TYPE_UNSIGNED (arg3_type));
3959 create_integer_operand (&ops[4], align);
3960 if (maybe_expand_insn (icode, 5, ops))
3961 return ops[0].value;
3962 return NULL_RTX;
3963 }
3964
3965 /* Expand expression EXP, which is a call to the memcmp built-in function.
3966 Return NULL_RTX if we failed and the caller should emit a normal call,
3967 otherwise try to get the result in TARGET, if convenient. */
3968
3969 static rtx
3970 expand_builtin_memcmp (tree exp, rtx target)
3971 {
3972 if (!validate_arglist (exp,
3973 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3974 return NULL_RTX;
3975
3976 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3977 implementing memcmp because it will stop if it encounters two
3978 zero bytes. */
3979 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
3980 if (icode == CODE_FOR_nothing)
3981 return NULL_RTX;
3982
3983 tree arg1 = CALL_EXPR_ARG (exp, 0);
3984 tree arg2 = CALL_EXPR_ARG (exp, 1);
3985 tree len = CALL_EXPR_ARG (exp, 2);
3986
3987 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3988 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3989
3990 /* If we don't have POINTER_TYPE, call the function. */
3991 if (arg1_align == 0 || arg2_align == 0)
3992 return NULL_RTX;
3993
3994 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3995 location_t loc = EXPR_LOCATION (exp);
3996 rtx arg1_rtx = get_memory_rtx (arg1, len);
3997 rtx arg2_rtx = get_memory_rtx (arg2, len);
3998 rtx arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3999
4000 /* Set MEM_SIZE as appropriate. */
4001 if (CONST_INT_P (arg3_rtx))
4002 {
4003 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
4004 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
4005 }
4006
4007 rtx result = expand_cmpstrn_or_cmpmem (icode, target, arg1_rtx, arg2_rtx,
4008 TREE_TYPE (len), arg3_rtx,
4009 MIN (arg1_align, arg2_align));
4010 if (result)
4011 {
4012 /* Return the value in the proper mode for this function. */
4013 if (GET_MODE (result) == mode)
4014 return result;
4015
4016 if (target != 0)
4017 {
4018 convert_move (target, result, 0);
4019 return target;
4020 }
4021
4022 return convert_to_mode (mode, result, 0);
4023 }
4024
4025 result = target;
4026 if (! (result != 0
4027 && REG_P (result) && GET_MODE (result) == mode
4028 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4029 result = gen_reg_rtx (mode);
4030
4031 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4032 TYPE_MODE (integer_type_node), 3,
4033 XEXP (arg1_rtx, 0), Pmode,
4034 XEXP (arg2_rtx, 0), Pmode,
4035 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4036 TYPE_UNSIGNED (sizetype)),
4037 TYPE_MODE (sizetype));
4038 return result;
4039 }
4040
4041 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4042 if we failed the caller should emit a normal call, otherwise try to get
4043 the result in TARGET, if convenient. */
4044
4045 static rtx
4046 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4047 {
4048 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4049 return NULL_RTX;
4050
4051 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4052 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4053 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4054 {
4055 rtx arg1_rtx, arg2_rtx;
4056 tree fndecl, fn;
4057 tree arg1 = CALL_EXPR_ARG (exp, 0);
4058 tree arg2 = CALL_EXPR_ARG (exp, 1);
4059 rtx result = NULL_RTX;
4060
4061 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4062 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4063
4064 /* If we don't have POINTER_TYPE, call the function. */
4065 if (arg1_align == 0 || arg2_align == 0)
4066 return NULL_RTX;
4067
4068 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4069 arg1 = builtin_save_expr (arg1);
4070 arg2 = builtin_save_expr (arg2);
4071
4072 arg1_rtx = get_memory_rtx (arg1, NULL);
4073 arg2_rtx = get_memory_rtx (arg2, NULL);
4074
4075 /* Try to call cmpstrsi. */
4076 if (cmpstr_icode != CODE_FOR_nothing)
4077 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4078 MIN (arg1_align, arg2_align));
4079
4080 /* Try to determine at least one length and call cmpstrnsi. */
4081 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4082 {
4083 tree len;
4084 rtx arg3_rtx;
4085
4086 tree len1 = c_strlen (arg1, 1);
4087 tree len2 = c_strlen (arg2, 1);
4088
4089 if (len1)
4090 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4091 if (len2)
4092 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4093
4094 /* If we don't have a constant length for the first, use the length
4095 of the second, if we know it. We don't require a constant for
4096 this case; some cost analysis could be done if both are available
4097 but neither is constant. For now, assume they're equally cheap,
4098 unless one has side effects. If both strings have constant lengths,
4099 use the smaller. */
4100
4101 if (!len1)
4102 len = len2;
4103 else if (!len2)
4104 len = len1;
4105 else if (TREE_SIDE_EFFECTS (len1))
4106 len = len2;
4107 else if (TREE_SIDE_EFFECTS (len2))
4108 len = len1;
4109 else if (TREE_CODE (len1) != INTEGER_CST)
4110 len = len2;
4111 else if (TREE_CODE (len2) != INTEGER_CST)
4112 len = len1;
4113 else if (tree_int_cst_lt (len1, len2))
4114 len = len1;
4115 else
4116 len = len2;
4117
4118 /* If both arguments have side effects, we cannot optimize. */
4119 if (len && !TREE_SIDE_EFFECTS (len))
4120 {
4121 arg3_rtx = expand_normal (len);
4122 result = expand_cmpstrn_or_cmpmem
4123 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4124 arg3_rtx, MIN (arg1_align, arg2_align));
4125 }
4126 }
4127
4128 if (result)
4129 {
4130 /* Return the value in the proper mode for this function. */
4131 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4132 if (GET_MODE (result) == mode)
4133 return result;
4134 if (target == 0)
4135 return convert_to_mode (mode, result, 0);
4136 convert_move (target, result, 0);
4137 return target;
4138 }
4139
4140 /* Expand the library call ourselves using a stabilized argument
4141 list to avoid re-evaluating the function's arguments twice. */
4142 fndecl = get_callee_fndecl (exp);
4143 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4144 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4145 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4146 return expand_call (fn, target, target == const0_rtx);
4147 }
4148 return NULL_RTX;
4149 }
4150
4151 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4152 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4153 the result in TARGET, if convenient. */
4154
4155 static rtx
4156 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4157 ATTRIBUTE_UNUSED machine_mode mode)
4158 {
4159 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4160
4161 if (!validate_arglist (exp,
4162 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4163 return NULL_RTX;
4164
4165 /* If c_strlen can determine an expression for one of the string
4166 lengths, and it doesn't have side effects, then emit cmpstrnsi
4167 using length MIN(strlen(string)+1, arg3). */
4168 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4169 if (cmpstrn_icode != CODE_FOR_nothing)
4170 {
4171 tree len, len1, len2;
4172 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4173 rtx result;
4174 tree fndecl, fn;
4175 tree arg1 = CALL_EXPR_ARG (exp, 0);
4176 tree arg2 = CALL_EXPR_ARG (exp, 1);
4177 tree arg3 = CALL_EXPR_ARG (exp, 2);
4178
4179 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4180 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4181
4182 len1 = c_strlen (arg1, 1);
4183 len2 = c_strlen (arg2, 1);
4184
4185 if (len1)
4186 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4187 if (len2)
4188 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4189
4190 /* If we don't have a constant length for the first, use the length
4191 of the second, if we know it. We don't require a constant for
4192 this case; some cost analysis could be done if both are available
4193 but neither is constant. For now, assume they're equally cheap,
4194 unless one has side effects. If both strings have constant lengths,
4195 use the smaller. */
4196
4197 if (!len1)
4198 len = len2;
4199 else if (!len2)
4200 len = len1;
4201 else if (TREE_SIDE_EFFECTS (len1))
4202 len = len2;
4203 else if (TREE_SIDE_EFFECTS (len2))
4204 len = len1;
4205 else if (TREE_CODE (len1) != INTEGER_CST)
4206 len = len2;
4207 else if (TREE_CODE (len2) != INTEGER_CST)
4208 len = len1;
4209 else if (tree_int_cst_lt (len1, len2))
4210 len = len1;
4211 else
4212 len = len2;
4213
4214 /* If both arguments have side effects, we cannot optimize. */
4215 if (!len || TREE_SIDE_EFFECTS (len))
4216 return NULL_RTX;
4217
4218 /* The actual new length parameter is MIN(len,arg3). */
4219 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4220 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4221
4222 /* If we don't have POINTER_TYPE, call the function. */
4223 if (arg1_align == 0 || arg2_align == 0)
4224 return NULL_RTX;
4225
4226 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4227 arg1 = builtin_save_expr (arg1);
4228 arg2 = builtin_save_expr (arg2);
4229 len = builtin_save_expr (len);
4230
4231 arg1_rtx = get_memory_rtx (arg1, len);
4232 arg2_rtx = get_memory_rtx (arg2, len);
4233 arg3_rtx = expand_normal (len);
4234 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4235 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4236 MIN (arg1_align, arg2_align));
4237 if (result)
4238 {
4239 /* Return the value in the proper mode for this function. */
4240 mode = TYPE_MODE (TREE_TYPE (exp));
4241 if (GET_MODE (result) == mode)
4242 return result;
4243 if (target == 0)
4244 return convert_to_mode (mode, result, 0);
4245 convert_move (target, result, 0);
4246 return target;
4247 }
4248
4249 /* Expand the library call ourselves using a stabilized argument
4250 list to avoid re-evaluating the function's arguments twice. */
4251 fndecl = get_callee_fndecl (exp);
4252 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4253 arg1, arg2, len);
4254 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4255 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4256 return expand_call (fn, target, target == const0_rtx);
4257 }
4258 return NULL_RTX;
4259 }
4260
4261 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4262 if that's convenient. */
4263
4264 rtx
4265 expand_builtin_saveregs (void)
4266 {
4267 rtx val;
4268 rtx_insn *seq;
4269
4270 /* Don't do __builtin_saveregs more than once in a function.
4271 Save the result of the first call and reuse it. */
4272 if (saveregs_value != 0)
4273 return saveregs_value;
4274
4275 /* When this function is called, it means that registers must be
4276 saved on entry to this function. So we migrate the call to the
4277 first insn of this function. */
4278
4279 start_sequence ();
4280
4281 /* Do whatever the machine needs done in this case. */
4282 val = targetm.calls.expand_builtin_saveregs ();
4283
4284 seq = get_insns ();
4285 end_sequence ();
4286
4287 saveregs_value = val;
4288
4289 /* Put the insns after the NOTE that starts the function. If this
4290 is inside a start_sequence, make the outer-level insn chain current, so
4291 the code is placed at the start of the function. */
4292 push_topmost_sequence ();
4293 emit_insn_after (seq, entry_of_function ());
4294 pop_topmost_sequence ();
4295
4296 return val;
4297 }
4298
4299 /* Expand a call to __builtin_next_arg. */
4300
4301 static rtx
4302 expand_builtin_next_arg (void)
4303 {
4304 /* Checking arguments is already done in fold_builtin_next_arg
4305 that must be called before this function. */
4306 return expand_binop (ptr_mode, add_optab,
4307 crtl->args.internal_arg_pointer,
4308 crtl->args.arg_offset_rtx,
4309 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4310 }
4311
4312 /* Make it easier for the backends by protecting the valist argument
4313 from multiple evaluations. */
4314
4315 static tree
4316 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4317 {
4318 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4319
4320 /* The current way of determining the type of valist is completely
4321 bogus. We should have the information on the va builtin instead. */
4322 if (!vatype)
4323 vatype = targetm.fn_abi_va_list (cfun->decl);
4324
4325 if (TREE_CODE (vatype) == ARRAY_TYPE)
4326 {
4327 if (TREE_SIDE_EFFECTS (valist))
4328 valist = save_expr (valist);
4329
4330 /* For this case, the backends will be expecting a pointer to
4331 vatype, but it's possible we've actually been given an array
4332 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4333 So fix it. */
4334 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4335 {
4336 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4337 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4338 }
4339 }
4340 else
4341 {
4342 tree pt = build_pointer_type (vatype);
4343
4344 if (! needs_lvalue)
4345 {
4346 if (! TREE_SIDE_EFFECTS (valist))
4347 return valist;
4348
4349 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4350 TREE_SIDE_EFFECTS (valist) = 1;
4351 }
4352
4353 if (TREE_SIDE_EFFECTS (valist))
4354 valist = save_expr (valist);
4355 valist = fold_build2_loc (loc, MEM_REF,
4356 vatype, valist, build_int_cst (pt, 0));
4357 }
4358
4359 return valist;
4360 }
4361
4362 /* The "standard" definition of va_list is void*. */
4363
4364 tree
4365 std_build_builtin_va_list (void)
4366 {
4367 return ptr_type_node;
4368 }
4369
4370 /* The "standard" abi va_list is va_list_type_node. */
4371
4372 tree
4373 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4374 {
4375 return va_list_type_node;
4376 }
4377
4378 /* The "standard" type of va_list is va_list_type_node. */
4379
4380 tree
4381 std_canonical_va_list_type (tree type)
4382 {
4383 tree wtype, htype;
4384
4385 if (INDIRECT_REF_P (type))
4386 type = TREE_TYPE (type);
4387 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4388 type = TREE_TYPE (type);
4389 wtype = va_list_type_node;
4390 htype = type;
4391 /* Treat structure va_list types. */
4392 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4393 htype = TREE_TYPE (htype);
4394 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4395 {
4396 /* If va_list is an array type, the argument may have decayed
4397 to a pointer type, e.g. by being passed to another function.
4398 In that case, unwrap both types so that we can compare the
4399 underlying records. */
4400 if (TREE_CODE (htype) == ARRAY_TYPE
4401 || POINTER_TYPE_P (htype))
4402 {
4403 wtype = TREE_TYPE (wtype);
4404 htype = TREE_TYPE (htype);
4405 }
4406 }
4407 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4408 return va_list_type_node;
4409
4410 return NULL_TREE;
4411 }
4412
4413 /* The "standard" implementation of va_start: just assign `nextarg' to
4414 the variable. */
4415
4416 void
4417 std_expand_builtin_va_start (tree valist, rtx nextarg)
4418 {
4419 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4420 convert_move (va_r, nextarg, 0);
4421
4422 /* We do not have any valid bounds for the pointer, so
4423 just store zero bounds for it. */
4424 if (chkp_function_instrumented_p (current_function_decl))
4425 chkp_expand_bounds_reset_for_mem (valist,
4426 make_tree (TREE_TYPE (valist),
4427 nextarg));
4428 }
4429
4430 /* Expand EXP, a call to __builtin_va_start. */
4431
4432 static rtx
4433 expand_builtin_va_start (tree exp)
4434 {
4435 rtx nextarg;
4436 tree valist;
4437 location_t loc = EXPR_LOCATION (exp);
4438
4439 if (call_expr_nargs (exp) < 2)
4440 {
4441 error_at (loc, "too few arguments to function %<va_start%>");
4442 return const0_rtx;
4443 }
4444
4445 if (fold_builtin_next_arg (exp, true))
4446 return const0_rtx;
4447
4448 nextarg = expand_builtin_next_arg ();
4449 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4450
4451 if (targetm.expand_builtin_va_start)
4452 targetm.expand_builtin_va_start (valist, nextarg);
4453 else
4454 std_expand_builtin_va_start (valist, nextarg);
4455
4456 return const0_rtx;
4457 }
4458
4459 /* Expand EXP, a call to __builtin_va_end. */
4460
4461 static rtx
4462 expand_builtin_va_end (tree exp)
4463 {
4464 tree valist = CALL_EXPR_ARG (exp, 0);
4465
4466 /* Evaluate for side effects, if needed. I hate macros that don't
4467 do that. */
4468 if (TREE_SIDE_EFFECTS (valist))
4469 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4470
4471 return const0_rtx;
4472 }
4473
4474 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4475 builtin rather than just as an assignment in stdarg.h because of the
4476 nastiness of array-type va_list types. */
4477
4478 static rtx
4479 expand_builtin_va_copy (tree exp)
4480 {
4481 tree dst, src, t;
4482 location_t loc = EXPR_LOCATION (exp);
4483
4484 dst = CALL_EXPR_ARG (exp, 0);
4485 src = CALL_EXPR_ARG (exp, 1);
4486
4487 dst = stabilize_va_list_loc (loc, dst, 1);
4488 src = stabilize_va_list_loc (loc, src, 0);
4489
4490 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4491
4492 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4493 {
4494 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4495 TREE_SIDE_EFFECTS (t) = 1;
4496 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4497 }
4498 else
4499 {
4500 rtx dstb, srcb, size;
4501
4502 /* Evaluate to pointers. */
4503 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4504 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4505 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4506 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4507
4508 dstb = convert_memory_address (Pmode, dstb);
4509 srcb = convert_memory_address (Pmode, srcb);
4510
4511 /* "Dereference" to BLKmode memories. */
4512 dstb = gen_rtx_MEM (BLKmode, dstb);
4513 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4514 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4515 srcb = gen_rtx_MEM (BLKmode, srcb);
4516 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4517 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4518
4519 /* Copy. */
4520 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4521 }
4522
4523 return const0_rtx;
4524 }
4525
4526 /* Expand a call to one of the builtin functions __builtin_frame_address or
4527 __builtin_return_address. */
4528
4529 static rtx
4530 expand_builtin_frame_address (tree fndecl, tree exp)
4531 {
4532 /* The argument must be a nonnegative integer constant.
4533 It counts the number of frames to scan up the stack.
4534 The value is either the frame pointer value or the return
4535 address saved in that frame. */
4536 if (call_expr_nargs (exp) == 0)
4537 /* Warning about missing arg was already issued. */
4538 return const0_rtx;
4539 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4540 {
4541 error ("invalid argument to %qD", fndecl);
4542 return const0_rtx;
4543 }
4544 else
4545 {
4546 /* Number of frames to scan up the stack. */
4547 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4548
4549 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4550
4551 /* Some ports cannot access arbitrary stack frames. */
4552 if (tem == NULL)
4553 {
4554 warning (0, "unsupported argument to %qD", fndecl);
4555 return const0_rtx;
4556 }
4557
4558 if (count)
4559 {
4560 /* Warn since no effort is made to ensure that any frame
4561 beyond the current one exists or can be safely reached. */
4562 warning (OPT_Wframe_address, "calling %qD with "
4563 "a nonzero argument is unsafe", fndecl);
4564 }
4565
4566 /* For __builtin_frame_address, return what we've got. */
4567 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4568 return tem;
4569
4570 if (!REG_P (tem)
4571 && ! CONSTANT_P (tem))
4572 tem = copy_addr_to_reg (tem);
4573 return tem;
4574 }
4575 }
4576
4577 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4578 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4579 is the same as for allocate_dynamic_stack_space. */
4580
4581 static rtx
4582 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4583 {
4584 rtx op0;
4585 rtx result;
4586 bool valid_arglist;
4587 unsigned int align;
4588 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4589 == BUILT_IN_ALLOCA_WITH_ALIGN);
4590
4591 valid_arglist
4592 = (alloca_with_align
4593 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4594 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4595
4596 if (!valid_arglist)
4597 return NULL_RTX;
4598
4599 /* Compute the argument. */
4600 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4601
4602 /* Compute the alignment. */
4603 align = (alloca_with_align
4604 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4605 : BIGGEST_ALIGNMENT);
4606
4607 /* Allocate the desired space. */
4608 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4609 result = convert_memory_address (ptr_mode, result);
4610
4611 return result;
4612 }
4613
4614 /* Expand a call to bswap builtin in EXP.
4615 Return NULL_RTX if a normal call should be emitted rather than expanding the
4616 function in-line. If convenient, the result should be placed in TARGET.
4617 SUBTARGET may be used as the target for computing one of EXP's operands. */
4618
4619 static rtx
4620 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4621 rtx subtarget)
4622 {
4623 tree arg;
4624 rtx op0;
4625
4626 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4627 return NULL_RTX;
4628
4629 arg = CALL_EXPR_ARG (exp, 0);
4630 op0 = expand_expr (arg,
4631 subtarget && GET_MODE (subtarget) == target_mode
4632 ? subtarget : NULL_RTX,
4633 target_mode, EXPAND_NORMAL);
4634 if (GET_MODE (op0) != target_mode)
4635 op0 = convert_to_mode (target_mode, op0, 1);
4636
4637 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4638
4639 gcc_assert (target);
4640
4641 return convert_to_mode (target_mode, target, 1);
4642 }
4643
4644 /* Expand a call to a unary builtin in EXP.
4645 Return NULL_RTX if a normal call should be emitted rather than expanding the
4646 function in-line. If convenient, the result should be placed in TARGET.
4647 SUBTARGET may be used as the target for computing one of EXP's operands. */
4648
4649 static rtx
4650 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4651 rtx subtarget, optab op_optab)
4652 {
4653 rtx op0;
4654
4655 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4656 return NULL_RTX;
4657
4658 /* Compute the argument. */
4659 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4660 (subtarget
4661 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4662 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4663 VOIDmode, EXPAND_NORMAL);
4664 /* Compute op, into TARGET if possible.
4665 Set TARGET to wherever the result comes back. */
4666 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4667 op_optab, op0, target, op_optab != clrsb_optab);
4668 gcc_assert (target);
4669
4670 return convert_to_mode (target_mode, target, 0);
4671 }
4672
4673 /* Expand a call to __builtin_expect. We just return our argument
4674 as the builtin_expect semantic should've been already executed by
4675 tree branch prediction pass. */
4676
4677 static rtx
4678 expand_builtin_expect (tree exp, rtx target)
4679 {
4680 tree arg;
4681
4682 if (call_expr_nargs (exp) < 2)
4683 return const0_rtx;
4684 arg = CALL_EXPR_ARG (exp, 0);
4685
4686 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4687 /* When guessing was done, the hints should be already stripped away. */
4688 gcc_assert (!flag_guess_branch_prob
4689 || optimize == 0 || seen_error ());
4690 return target;
4691 }
4692
4693 /* Expand a call to __builtin_assume_aligned. We just return our first
4694 argument as the builtin_assume_aligned semantic should've been already
4695 executed by CCP. */
4696
4697 static rtx
4698 expand_builtin_assume_aligned (tree exp, rtx target)
4699 {
4700 if (call_expr_nargs (exp) < 2)
4701 return const0_rtx;
4702 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4703 EXPAND_NORMAL);
4704 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4705 && (call_expr_nargs (exp) < 3
4706 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4707 return target;
4708 }
4709
4710 void
4711 expand_builtin_trap (void)
4712 {
4713 if (targetm.have_trap ())
4714 {
4715 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4716 /* For trap insns when not accumulating outgoing args force
4717 REG_ARGS_SIZE note to prevent crossjumping of calls with
4718 different args sizes. */
4719 if (!ACCUMULATE_OUTGOING_ARGS)
4720 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4721 }
4722 else
4723 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4724 emit_barrier ();
4725 }
4726
4727 /* Expand a call to __builtin_unreachable. We do nothing except emit
4728 a barrier saying that control flow will not pass here.
4729
4730 It is the responsibility of the program being compiled to ensure
4731 that control flow does never reach __builtin_unreachable. */
4732 static void
4733 expand_builtin_unreachable (void)
4734 {
4735 emit_barrier ();
4736 }
4737
4738 /* Expand EXP, a call to fabs, fabsf or fabsl.
4739 Return NULL_RTX if a normal call should be emitted rather than expanding
4740 the function inline. If convenient, the result should be placed
4741 in TARGET. SUBTARGET may be used as the target for computing
4742 the operand. */
4743
4744 static rtx
4745 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4746 {
4747 machine_mode mode;
4748 tree arg;
4749 rtx op0;
4750
4751 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4752 return NULL_RTX;
4753
4754 arg = CALL_EXPR_ARG (exp, 0);
4755 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4756 mode = TYPE_MODE (TREE_TYPE (arg));
4757 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4758 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4759 }
4760
4761 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4762 Return NULL is a normal call should be emitted rather than expanding the
4763 function inline. If convenient, the result should be placed in TARGET.
4764 SUBTARGET may be used as the target for computing the operand. */
4765
4766 static rtx
4767 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4768 {
4769 rtx op0, op1;
4770 tree arg;
4771
4772 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4773 return NULL_RTX;
4774
4775 arg = CALL_EXPR_ARG (exp, 0);
4776 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4777
4778 arg = CALL_EXPR_ARG (exp, 1);
4779 op1 = expand_normal (arg);
4780
4781 return expand_copysign (op0, op1, target);
4782 }
4783
4784 /* Expand a call to __builtin___clear_cache. */
4785
4786 static rtx
4787 expand_builtin___clear_cache (tree exp)
4788 {
4789 if (!targetm.code_for_clear_cache)
4790 {
4791 #ifdef CLEAR_INSN_CACHE
4792 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4793 does something. Just do the default expansion to a call to
4794 __clear_cache(). */
4795 return NULL_RTX;
4796 #else
4797 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4798 does nothing. There is no need to call it. Do nothing. */
4799 return const0_rtx;
4800 #endif /* CLEAR_INSN_CACHE */
4801 }
4802
4803 /* We have a "clear_cache" insn, and it will handle everything. */
4804 tree begin, end;
4805 rtx begin_rtx, end_rtx;
4806
4807 /* We must not expand to a library call. If we did, any
4808 fallback library function in libgcc that might contain a call to
4809 __builtin___clear_cache() would recurse infinitely. */
4810 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4811 {
4812 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4813 return const0_rtx;
4814 }
4815
4816 if (targetm.have_clear_cache ())
4817 {
4818 struct expand_operand ops[2];
4819
4820 begin = CALL_EXPR_ARG (exp, 0);
4821 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4822
4823 end = CALL_EXPR_ARG (exp, 1);
4824 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4825
4826 create_address_operand (&ops[0], begin_rtx);
4827 create_address_operand (&ops[1], end_rtx);
4828 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4829 return const0_rtx;
4830 }
4831 return const0_rtx;
4832 }
4833
4834 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4835
4836 static rtx
4837 round_trampoline_addr (rtx tramp)
4838 {
4839 rtx temp, addend, mask;
4840
4841 /* If we don't need too much alignment, we'll have been guaranteed
4842 proper alignment by get_trampoline_type. */
4843 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4844 return tramp;
4845
4846 /* Round address up to desired boundary. */
4847 temp = gen_reg_rtx (Pmode);
4848 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4849 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4850
4851 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4852 temp, 0, OPTAB_LIB_WIDEN);
4853 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4854 temp, 0, OPTAB_LIB_WIDEN);
4855
4856 return tramp;
4857 }
4858
4859 static rtx
4860 expand_builtin_init_trampoline (tree exp, bool onstack)
4861 {
4862 tree t_tramp, t_func, t_chain;
4863 rtx m_tramp, r_tramp, r_chain, tmp;
4864
4865 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4866 POINTER_TYPE, VOID_TYPE))
4867 return NULL_RTX;
4868
4869 t_tramp = CALL_EXPR_ARG (exp, 0);
4870 t_func = CALL_EXPR_ARG (exp, 1);
4871 t_chain = CALL_EXPR_ARG (exp, 2);
4872
4873 r_tramp = expand_normal (t_tramp);
4874 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4875 MEM_NOTRAP_P (m_tramp) = 1;
4876
4877 /* If ONSTACK, the TRAMP argument should be the address of a field
4878 within the local function's FRAME decl. Either way, let's see if
4879 we can fill in the MEM_ATTRs for this memory. */
4880 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4881 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4882
4883 /* Creator of a heap trampoline is responsible for making sure the
4884 address is aligned to at least STACK_BOUNDARY. Normally malloc
4885 will ensure this anyhow. */
4886 tmp = round_trampoline_addr (r_tramp);
4887 if (tmp != r_tramp)
4888 {
4889 m_tramp = change_address (m_tramp, BLKmode, tmp);
4890 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4891 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4892 }
4893
4894 /* The FUNC argument should be the address of the nested function.
4895 Extract the actual function decl to pass to the hook. */
4896 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4897 t_func = TREE_OPERAND (t_func, 0);
4898 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4899
4900 r_chain = expand_normal (t_chain);
4901
4902 /* Generate insns to initialize the trampoline. */
4903 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4904
4905 if (onstack)
4906 {
4907 trampolines_created = 1;
4908
4909 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4910 "trampoline generated for nested function %qD", t_func);
4911 }
4912
4913 return const0_rtx;
4914 }
4915
4916 static rtx
4917 expand_builtin_adjust_trampoline (tree exp)
4918 {
4919 rtx tramp;
4920
4921 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4922 return NULL_RTX;
4923
4924 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4925 tramp = round_trampoline_addr (tramp);
4926 if (targetm.calls.trampoline_adjust_address)
4927 tramp = targetm.calls.trampoline_adjust_address (tramp);
4928
4929 return tramp;
4930 }
4931
4932 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4933 function. The function first checks whether the back end provides
4934 an insn to implement signbit for the respective mode. If not, it
4935 checks whether the floating point format of the value is such that
4936 the sign bit can be extracted. If that is not the case, error out.
4937 EXP is the expression that is a call to the builtin function; if
4938 convenient, the result should be placed in TARGET. */
4939 static rtx
4940 expand_builtin_signbit (tree exp, rtx target)
4941 {
4942 const struct real_format *fmt;
4943 machine_mode fmode, imode, rmode;
4944 tree arg;
4945 int word, bitpos;
4946 enum insn_code icode;
4947 rtx temp;
4948 location_t loc = EXPR_LOCATION (exp);
4949
4950 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4951 return NULL_RTX;
4952
4953 arg = CALL_EXPR_ARG (exp, 0);
4954 fmode = TYPE_MODE (TREE_TYPE (arg));
4955 rmode = TYPE_MODE (TREE_TYPE (exp));
4956 fmt = REAL_MODE_FORMAT (fmode);
4957
4958 arg = builtin_save_expr (arg);
4959
4960 /* Expand the argument yielding a RTX expression. */
4961 temp = expand_normal (arg);
4962
4963 /* Check if the back end provides an insn that handles signbit for the
4964 argument's mode. */
4965 icode = optab_handler (signbit_optab, fmode);
4966 if (icode != CODE_FOR_nothing)
4967 {
4968 rtx_insn *last = get_last_insn ();
4969 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4970 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4971 return target;
4972 delete_insns_since (last);
4973 }
4974
4975 /* For floating point formats without a sign bit, implement signbit
4976 as "ARG < 0.0". */
4977 bitpos = fmt->signbit_ro;
4978 if (bitpos < 0)
4979 {
4980 /* But we can't do this if the format supports signed zero. */
4981 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4982
4983 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4984 build_real (TREE_TYPE (arg), dconst0));
4985 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4986 }
4987
4988 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4989 {
4990 imode = int_mode_for_mode (fmode);
4991 gcc_assert (imode != BLKmode);
4992 temp = gen_lowpart (imode, temp);
4993 }
4994 else
4995 {
4996 imode = word_mode;
4997 /* Handle targets with different FP word orders. */
4998 if (FLOAT_WORDS_BIG_ENDIAN)
4999 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5000 else
5001 word = bitpos / BITS_PER_WORD;
5002 temp = operand_subword_force (temp, word, fmode);
5003 bitpos = bitpos % BITS_PER_WORD;
5004 }
5005
5006 /* Force the intermediate word_mode (or narrower) result into a
5007 register. This avoids attempting to create paradoxical SUBREGs
5008 of floating point modes below. */
5009 temp = force_reg (imode, temp);
5010
5011 /* If the bitpos is within the "result mode" lowpart, the operation
5012 can be implement with a single bitwise AND. Otherwise, we need
5013 a right shift and an AND. */
5014
5015 if (bitpos < GET_MODE_BITSIZE (rmode))
5016 {
5017 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5018
5019 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5020 temp = gen_lowpart (rmode, temp);
5021 temp = expand_binop (rmode, and_optab, temp,
5022 immed_wide_int_const (mask, rmode),
5023 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5024 }
5025 else
5026 {
5027 /* Perform a logical right shift to place the signbit in the least
5028 significant bit, then truncate the result to the desired mode
5029 and mask just this bit. */
5030 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5031 temp = gen_lowpart (rmode, temp);
5032 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5033 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5034 }
5035
5036 return temp;
5037 }
5038
5039 /* Expand fork or exec calls. TARGET is the desired target of the
5040 call. EXP is the call. FN is the
5041 identificator of the actual function. IGNORE is nonzero if the
5042 value is to be ignored. */
5043
5044 static rtx
5045 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5046 {
5047 tree id, decl;
5048 tree call;
5049
5050 /* If we are not profiling, just call the function. */
5051 if (!profile_arc_flag)
5052 return NULL_RTX;
5053
5054 /* Otherwise call the wrapper. This should be equivalent for the rest of
5055 compiler, so the code does not diverge, and the wrapper may run the
5056 code necessary for keeping the profiling sane. */
5057
5058 switch (DECL_FUNCTION_CODE (fn))
5059 {
5060 case BUILT_IN_FORK:
5061 id = get_identifier ("__gcov_fork");
5062 break;
5063
5064 case BUILT_IN_EXECL:
5065 id = get_identifier ("__gcov_execl");
5066 break;
5067
5068 case BUILT_IN_EXECV:
5069 id = get_identifier ("__gcov_execv");
5070 break;
5071
5072 case BUILT_IN_EXECLP:
5073 id = get_identifier ("__gcov_execlp");
5074 break;
5075
5076 case BUILT_IN_EXECLE:
5077 id = get_identifier ("__gcov_execle");
5078 break;
5079
5080 case BUILT_IN_EXECVP:
5081 id = get_identifier ("__gcov_execvp");
5082 break;
5083
5084 case BUILT_IN_EXECVE:
5085 id = get_identifier ("__gcov_execve");
5086 break;
5087
5088 default:
5089 gcc_unreachable ();
5090 }
5091
5092 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5093 FUNCTION_DECL, id, TREE_TYPE (fn));
5094 DECL_EXTERNAL (decl) = 1;
5095 TREE_PUBLIC (decl) = 1;
5096 DECL_ARTIFICIAL (decl) = 1;
5097 TREE_NOTHROW (decl) = 1;
5098 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5099 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5100 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5101 return expand_call (call, target, ignore);
5102 }
5103
5104
5105 \f
5106 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5107 the pointer in these functions is void*, the tree optimizers may remove
5108 casts. The mode computed in expand_builtin isn't reliable either, due
5109 to __sync_bool_compare_and_swap.
5110
5111 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5112 group of builtins. This gives us log2 of the mode size. */
5113
5114 static inline machine_mode
5115 get_builtin_sync_mode (int fcode_diff)
5116 {
5117 /* The size is not negotiable, so ask not to get BLKmode in return
5118 if the target indicates that a smaller size would be better. */
5119 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5120 }
5121
5122 /* Expand the memory expression LOC and return the appropriate memory operand
5123 for the builtin_sync operations. */
5124
5125 static rtx
5126 get_builtin_sync_mem (tree loc, machine_mode mode)
5127 {
5128 rtx addr, mem;
5129
5130 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5131 addr = convert_memory_address (Pmode, addr);
5132
5133 /* Note that we explicitly do not want any alias information for this
5134 memory, so that we kill all other live memories. Otherwise we don't
5135 satisfy the full barrier semantics of the intrinsic. */
5136 mem = validize_mem (gen_rtx_MEM (mode, addr));
5137
5138 /* The alignment needs to be at least according to that of the mode. */
5139 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5140 get_pointer_alignment (loc)));
5141 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5142 MEM_VOLATILE_P (mem) = 1;
5143
5144 return mem;
5145 }
5146
5147 /* Make sure an argument is in the right mode.
5148 EXP is the tree argument.
5149 MODE is the mode it should be in. */
5150
5151 static rtx
5152 expand_expr_force_mode (tree exp, machine_mode mode)
5153 {
5154 rtx val;
5155 machine_mode old_mode;
5156
5157 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5158 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5159 of CONST_INTs, where we know the old_mode only from the call argument. */
5160
5161 old_mode = GET_MODE (val);
5162 if (old_mode == VOIDmode)
5163 old_mode = TYPE_MODE (TREE_TYPE (exp));
5164 val = convert_modes (mode, old_mode, val, 1);
5165 return val;
5166 }
5167
5168
5169 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5170 EXP is the CALL_EXPR. CODE is the rtx code
5171 that corresponds to the arithmetic or logical operation from the name;
5172 an exception here is that NOT actually means NAND. TARGET is an optional
5173 place for us to store the results; AFTER is true if this is the
5174 fetch_and_xxx form. */
5175
5176 static rtx
5177 expand_builtin_sync_operation (machine_mode mode, tree exp,
5178 enum rtx_code code, bool after,
5179 rtx target)
5180 {
5181 rtx val, mem;
5182 location_t loc = EXPR_LOCATION (exp);
5183
5184 if (code == NOT && warn_sync_nand)
5185 {
5186 tree fndecl = get_callee_fndecl (exp);
5187 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5188
5189 static bool warned_f_a_n, warned_n_a_f;
5190
5191 switch (fcode)
5192 {
5193 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5194 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5195 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5196 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5197 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5198 if (warned_f_a_n)
5199 break;
5200
5201 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5202 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5203 warned_f_a_n = true;
5204 break;
5205
5206 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5207 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5208 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5209 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5210 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5211 if (warned_n_a_f)
5212 break;
5213
5214 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5215 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5216 warned_n_a_f = true;
5217 break;
5218
5219 default:
5220 gcc_unreachable ();
5221 }
5222 }
5223
5224 /* Expand the operands. */
5225 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5226 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5227
5228 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5229 after);
5230 }
5231
5232 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5233 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5234 true if this is the boolean form. TARGET is a place for us to store the
5235 results; this is NOT optional if IS_BOOL is true. */
5236
5237 static rtx
5238 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5239 bool is_bool, rtx target)
5240 {
5241 rtx old_val, new_val, mem;
5242 rtx *pbool, *poval;
5243
5244 /* Expand the operands. */
5245 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5246 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5247 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5248
5249 pbool = poval = NULL;
5250 if (target != const0_rtx)
5251 {
5252 if (is_bool)
5253 pbool = &target;
5254 else
5255 poval = &target;
5256 }
5257 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5258 false, MEMMODEL_SYNC_SEQ_CST,
5259 MEMMODEL_SYNC_SEQ_CST))
5260 return NULL_RTX;
5261
5262 return target;
5263 }
5264
5265 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5266 general form is actually an atomic exchange, and some targets only
5267 support a reduced form with the second argument being a constant 1.
5268 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5269 the results. */
5270
5271 static rtx
5272 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5273 rtx target)
5274 {
5275 rtx val, mem;
5276
5277 /* Expand the operands. */
5278 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5279 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5280
5281 return expand_sync_lock_test_and_set (target, mem, val);
5282 }
5283
5284 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5285
5286 static void
5287 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5288 {
5289 rtx mem;
5290
5291 /* Expand the operands. */
5292 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5293
5294 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5295 }
5296
5297 /* Given an integer representing an ``enum memmodel'', verify its
5298 correctness and return the memory model enum. */
5299
5300 static enum memmodel
5301 get_memmodel (tree exp)
5302 {
5303 rtx op;
5304 unsigned HOST_WIDE_INT val;
5305
5306 /* If the parameter is not a constant, it's a run time value so we'll just
5307 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5308 if (TREE_CODE (exp) != INTEGER_CST)
5309 return MEMMODEL_SEQ_CST;
5310
5311 op = expand_normal (exp);
5312
5313 val = INTVAL (op);
5314 if (targetm.memmodel_check)
5315 val = targetm.memmodel_check (val);
5316 else if (val & ~MEMMODEL_MASK)
5317 {
5318 warning (OPT_Winvalid_memory_model,
5319 "Unknown architecture specifier in memory model to builtin.");
5320 return MEMMODEL_SEQ_CST;
5321 }
5322
5323 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5324 if (memmodel_base (val) >= MEMMODEL_LAST)
5325 {
5326 warning (OPT_Winvalid_memory_model,
5327 "invalid memory model argument to builtin");
5328 return MEMMODEL_SEQ_CST;
5329 }
5330
5331 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5332 be conservative and promote consume to acquire. */
5333 if (val == MEMMODEL_CONSUME)
5334 val = MEMMODEL_ACQUIRE;
5335
5336 return (enum memmodel) val;
5337 }
5338
5339 /* Expand the __atomic_exchange intrinsic:
5340 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5341 EXP is the CALL_EXPR.
5342 TARGET is an optional place for us to store the results. */
5343
5344 static rtx
5345 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5346 {
5347 rtx val, mem;
5348 enum memmodel model;
5349
5350 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5351
5352 if (!flag_inline_atomics)
5353 return NULL_RTX;
5354
5355 /* Expand the operands. */
5356 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5357 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5358
5359 return expand_atomic_exchange (target, mem, val, model);
5360 }
5361
5362 /* Expand the __atomic_compare_exchange intrinsic:
5363 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5364 TYPE desired, BOOL weak,
5365 enum memmodel success,
5366 enum memmodel failure)
5367 EXP is the CALL_EXPR.
5368 TARGET is an optional place for us to store the results. */
5369
5370 static rtx
5371 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5372 rtx target)
5373 {
5374 rtx expect, desired, mem, oldval;
5375 rtx_code_label *label;
5376 enum memmodel success, failure;
5377 tree weak;
5378 bool is_weak;
5379
5380 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5381 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5382
5383 if (failure > success)
5384 {
5385 warning (OPT_Winvalid_memory_model,
5386 "failure memory model cannot be stronger than success memory "
5387 "model for %<__atomic_compare_exchange%>");
5388 success = MEMMODEL_SEQ_CST;
5389 }
5390
5391 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5392 {
5393 warning (OPT_Winvalid_memory_model,
5394 "invalid failure memory model for "
5395 "%<__atomic_compare_exchange%>");
5396 failure = MEMMODEL_SEQ_CST;
5397 success = MEMMODEL_SEQ_CST;
5398 }
5399
5400
5401 if (!flag_inline_atomics)
5402 return NULL_RTX;
5403
5404 /* Expand the operands. */
5405 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5406
5407 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5408 expect = convert_memory_address (Pmode, expect);
5409 expect = gen_rtx_MEM (mode, expect);
5410 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5411
5412 weak = CALL_EXPR_ARG (exp, 3);
5413 is_weak = false;
5414 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5415 is_weak = true;
5416
5417 if (target == const0_rtx)
5418 target = NULL;
5419
5420 /* Lest the rtl backend create a race condition with an imporoper store
5421 to memory, always create a new pseudo for OLDVAL. */
5422 oldval = NULL;
5423
5424 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5425 is_weak, success, failure))
5426 return NULL_RTX;
5427
5428 /* Conditionally store back to EXPECT, lest we create a race condition
5429 with an improper store to memory. */
5430 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5431 the normal case where EXPECT is totally private, i.e. a register. At
5432 which point the store can be unconditional. */
5433 label = gen_label_rtx ();
5434 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5435 GET_MODE (target), 1, label);
5436 emit_move_insn (expect, oldval);
5437 emit_label (label);
5438
5439 return target;
5440 }
5441
5442 /* Expand the __atomic_load intrinsic:
5443 TYPE __atomic_load (TYPE *object, enum memmodel)
5444 EXP is the CALL_EXPR.
5445 TARGET is an optional place for us to store the results. */
5446
5447 static rtx
5448 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5449 {
5450 rtx mem;
5451 enum memmodel model;
5452
5453 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5454 if (is_mm_release (model) || is_mm_acq_rel (model))
5455 {
5456 warning (OPT_Winvalid_memory_model,
5457 "invalid memory model for %<__atomic_load%>");
5458 model = MEMMODEL_SEQ_CST;
5459 }
5460
5461 if (!flag_inline_atomics)
5462 return NULL_RTX;
5463
5464 /* Expand the operand. */
5465 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5466
5467 return expand_atomic_load (target, mem, model);
5468 }
5469
5470
5471 /* Expand the __atomic_store intrinsic:
5472 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5473 EXP is the CALL_EXPR.
5474 TARGET is an optional place for us to store the results. */
5475
5476 static rtx
5477 expand_builtin_atomic_store (machine_mode mode, tree exp)
5478 {
5479 rtx mem, val;
5480 enum memmodel model;
5481
5482 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5483 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5484 || is_mm_release (model)))
5485 {
5486 warning (OPT_Winvalid_memory_model,
5487 "invalid memory model for %<__atomic_store%>");
5488 model = MEMMODEL_SEQ_CST;
5489 }
5490
5491 if (!flag_inline_atomics)
5492 return NULL_RTX;
5493
5494 /* Expand the operands. */
5495 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5496 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5497
5498 return expand_atomic_store (mem, val, model, false);
5499 }
5500
5501 /* Expand the __atomic_fetch_XXX intrinsic:
5502 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5503 EXP is the CALL_EXPR.
5504 TARGET is an optional place for us to store the results.
5505 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5506 FETCH_AFTER is true if returning the result of the operation.
5507 FETCH_AFTER is false if returning the value before the operation.
5508 IGNORE is true if the result is not used.
5509 EXT_CALL is the correct builtin for an external call if this cannot be
5510 resolved to an instruction sequence. */
5511
5512 static rtx
5513 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5514 enum rtx_code code, bool fetch_after,
5515 bool ignore, enum built_in_function ext_call)
5516 {
5517 rtx val, mem, ret;
5518 enum memmodel model;
5519 tree fndecl;
5520 tree addr;
5521
5522 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5523
5524 /* Expand the operands. */
5525 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5526 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5527
5528 /* Only try generating instructions if inlining is turned on. */
5529 if (flag_inline_atomics)
5530 {
5531 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5532 if (ret)
5533 return ret;
5534 }
5535
5536 /* Return if a different routine isn't needed for the library call. */
5537 if (ext_call == BUILT_IN_NONE)
5538 return NULL_RTX;
5539
5540 /* Change the call to the specified function. */
5541 fndecl = get_callee_fndecl (exp);
5542 addr = CALL_EXPR_FN (exp);
5543 STRIP_NOPS (addr);
5544
5545 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5546 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5547
5548 /* Expand the call here so we can emit trailing code. */
5549 ret = expand_call (exp, target, ignore);
5550
5551 /* Replace the original function just in case it matters. */
5552 TREE_OPERAND (addr, 0) = fndecl;
5553
5554 /* Then issue the arithmetic correction to return the right result. */
5555 if (!ignore)
5556 {
5557 if (code == NOT)
5558 {
5559 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5560 OPTAB_LIB_WIDEN);
5561 ret = expand_simple_unop (mode, NOT, ret, target, true);
5562 }
5563 else
5564 ret = expand_simple_binop (mode, code, ret, val, target, true,
5565 OPTAB_LIB_WIDEN);
5566 }
5567 return ret;
5568 }
5569
5570 /* Expand an atomic clear operation.
5571 void _atomic_clear (BOOL *obj, enum memmodel)
5572 EXP is the call expression. */
5573
5574 static rtx
5575 expand_builtin_atomic_clear (tree exp)
5576 {
5577 machine_mode mode;
5578 rtx mem, ret;
5579 enum memmodel model;
5580
5581 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5582 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5583 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5584
5585 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5586 {
5587 warning (OPT_Winvalid_memory_model,
5588 "invalid memory model for %<__atomic_store%>");
5589 model = MEMMODEL_SEQ_CST;
5590 }
5591
5592 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5593 Failing that, a store is issued by __atomic_store. The only way this can
5594 fail is if the bool type is larger than a word size. Unlikely, but
5595 handle it anyway for completeness. Assume a single threaded model since
5596 there is no atomic support in this case, and no barriers are required. */
5597 ret = expand_atomic_store (mem, const0_rtx, model, true);
5598 if (!ret)
5599 emit_move_insn (mem, const0_rtx);
5600 return const0_rtx;
5601 }
5602
5603 /* Expand an atomic test_and_set operation.
5604 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5605 EXP is the call expression. */
5606
5607 static rtx
5608 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5609 {
5610 rtx mem;
5611 enum memmodel model;
5612 machine_mode mode;
5613
5614 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5615 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5616 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5617
5618 return expand_atomic_test_and_set (target, mem, model);
5619 }
5620
5621
5622 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5623 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5624
5625 static tree
5626 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5627 {
5628 int size;
5629 machine_mode mode;
5630 unsigned int mode_align, type_align;
5631
5632 if (TREE_CODE (arg0) != INTEGER_CST)
5633 return NULL_TREE;
5634
5635 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5636 mode = mode_for_size (size, MODE_INT, 0);
5637 mode_align = GET_MODE_ALIGNMENT (mode);
5638
5639 if (TREE_CODE (arg1) == INTEGER_CST)
5640 {
5641 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5642
5643 /* Either this argument is null, or it's a fake pointer encoding
5644 the alignment of the object. */
5645 val = val & -val;
5646 val *= BITS_PER_UNIT;
5647
5648 if (val == 0 || mode_align < val)
5649 type_align = mode_align;
5650 else
5651 type_align = val;
5652 }
5653 else
5654 {
5655 tree ttype = TREE_TYPE (arg1);
5656
5657 /* This function is usually invoked and folded immediately by the front
5658 end before anything else has a chance to look at it. The pointer
5659 parameter at this point is usually cast to a void *, so check for that
5660 and look past the cast. */
5661 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5662 && VOID_TYPE_P (TREE_TYPE (ttype)))
5663 arg1 = TREE_OPERAND (arg1, 0);
5664
5665 ttype = TREE_TYPE (arg1);
5666 gcc_assert (POINTER_TYPE_P (ttype));
5667
5668 /* Get the underlying type of the object. */
5669 ttype = TREE_TYPE (ttype);
5670 type_align = TYPE_ALIGN (ttype);
5671 }
5672
5673 /* If the object has smaller alignment, the lock free routines cannot
5674 be used. */
5675 if (type_align < mode_align)
5676 return boolean_false_node;
5677
5678 /* Check if a compare_and_swap pattern exists for the mode which represents
5679 the required size. The pattern is not allowed to fail, so the existence
5680 of the pattern indicates support is present. */
5681 if (can_compare_and_swap_p (mode, true))
5682 return boolean_true_node;
5683 else
5684 return boolean_false_node;
5685 }
5686
5687 /* Return true if the parameters to call EXP represent an object which will
5688 always generate lock free instructions. The first argument represents the
5689 size of the object, and the second parameter is a pointer to the object
5690 itself. If NULL is passed for the object, then the result is based on
5691 typical alignment for an object of the specified size. Otherwise return
5692 false. */
5693
5694 static rtx
5695 expand_builtin_atomic_always_lock_free (tree exp)
5696 {
5697 tree size;
5698 tree arg0 = CALL_EXPR_ARG (exp, 0);
5699 tree arg1 = CALL_EXPR_ARG (exp, 1);
5700
5701 if (TREE_CODE (arg0) != INTEGER_CST)
5702 {
5703 error ("non-constant argument 1 to __atomic_always_lock_free");
5704 return const0_rtx;
5705 }
5706
5707 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5708 if (size == boolean_true_node)
5709 return const1_rtx;
5710 return const0_rtx;
5711 }
5712
5713 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5714 is lock free on this architecture. */
5715
5716 static tree
5717 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5718 {
5719 if (!flag_inline_atomics)
5720 return NULL_TREE;
5721
5722 /* If it isn't always lock free, don't generate a result. */
5723 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5724 return boolean_true_node;
5725
5726 return NULL_TREE;
5727 }
5728
5729 /* Return true if the parameters to call EXP represent an object which will
5730 always generate lock free instructions. The first argument represents the
5731 size of the object, and the second parameter is a pointer to the object
5732 itself. If NULL is passed for the object, then the result is based on
5733 typical alignment for an object of the specified size. Otherwise return
5734 NULL*/
5735
5736 static rtx
5737 expand_builtin_atomic_is_lock_free (tree exp)
5738 {
5739 tree size;
5740 tree arg0 = CALL_EXPR_ARG (exp, 0);
5741 tree arg1 = CALL_EXPR_ARG (exp, 1);
5742
5743 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5744 {
5745 error ("non-integer argument 1 to __atomic_is_lock_free");
5746 return NULL_RTX;
5747 }
5748
5749 if (!flag_inline_atomics)
5750 return NULL_RTX;
5751
5752 /* If the value is known at compile time, return the RTX for it. */
5753 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5754 if (size == boolean_true_node)
5755 return const1_rtx;
5756
5757 return NULL_RTX;
5758 }
5759
5760 /* Expand the __atomic_thread_fence intrinsic:
5761 void __atomic_thread_fence (enum memmodel)
5762 EXP is the CALL_EXPR. */
5763
5764 static void
5765 expand_builtin_atomic_thread_fence (tree exp)
5766 {
5767 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5768 expand_mem_thread_fence (model);
5769 }
5770
5771 /* Expand the __atomic_signal_fence intrinsic:
5772 void __atomic_signal_fence (enum memmodel)
5773 EXP is the CALL_EXPR. */
5774
5775 static void
5776 expand_builtin_atomic_signal_fence (tree exp)
5777 {
5778 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5779 expand_mem_signal_fence (model);
5780 }
5781
5782 /* Expand the __sync_synchronize intrinsic. */
5783
5784 static void
5785 expand_builtin_sync_synchronize (void)
5786 {
5787 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5788 }
5789
5790 static rtx
5791 expand_builtin_thread_pointer (tree exp, rtx target)
5792 {
5793 enum insn_code icode;
5794 if (!validate_arglist (exp, VOID_TYPE))
5795 return const0_rtx;
5796 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5797 if (icode != CODE_FOR_nothing)
5798 {
5799 struct expand_operand op;
5800 /* If the target is not sutitable then create a new target. */
5801 if (target == NULL_RTX
5802 || !REG_P (target)
5803 || GET_MODE (target) != Pmode)
5804 target = gen_reg_rtx (Pmode);
5805 create_output_operand (&op, target, Pmode);
5806 expand_insn (icode, 1, &op);
5807 return target;
5808 }
5809 error ("__builtin_thread_pointer is not supported on this target");
5810 return const0_rtx;
5811 }
5812
5813 static void
5814 expand_builtin_set_thread_pointer (tree exp)
5815 {
5816 enum insn_code icode;
5817 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5818 return;
5819 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5820 if (icode != CODE_FOR_nothing)
5821 {
5822 struct expand_operand op;
5823 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5824 Pmode, EXPAND_NORMAL);
5825 create_input_operand (&op, val, Pmode);
5826 expand_insn (icode, 1, &op);
5827 return;
5828 }
5829 error ("__builtin_set_thread_pointer is not supported on this target");
5830 }
5831
5832 \f
5833 /* Emit code to restore the current value of stack. */
5834
5835 static void
5836 expand_stack_restore (tree var)
5837 {
5838 rtx_insn *prev;
5839 rtx sa = expand_normal (var);
5840
5841 sa = convert_memory_address (Pmode, sa);
5842
5843 prev = get_last_insn ();
5844 emit_stack_restore (SAVE_BLOCK, sa);
5845
5846 record_new_stack_level ();
5847
5848 fixup_args_size_notes (prev, get_last_insn (), 0);
5849 }
5850
5851 /* Emit code to save the current value of stack. */
5852
5853 static rtx
5854 expand_stack_save (void)
5855 {
5856 rtx ret = NULL_RTX;
5857
5858 emit_stack_save (SAVE_BLOCK, &ret);
5859 return ret;
5860 }
5861
5862
5863 /* Expand an expression EXP that calls a built-in function,
5864 with result going to TARGET if that's convenient
5865 (and in mode MODE if that's convenient).
5866 SUBTARGET may be used as the target for computing one of EXP's operands.
5867 IGNORE is nonzero if the value is to be ignored. */
5868
5869 rtx
5870 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5871 int ignore)
5872 {
5873 tree fndecl = get_callee_fndecl (exp);
5874 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5875 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5876 int flags;
5877
5878 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5879 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5880
5881 /* When ASan is enabled, we don't want to expand some memory/string
5882 builtins and rely on libsanitizer's hooks. This allows us to avoid
5883 redundant checks and be sure, that possible overflow will be detected
5884 by ASan. */
5885
5886 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5887 return expand_call (exp, target, ignore);
5888
5889 /* When not optimizing, generate calls to library functions for a certain
5890 set of builtins. */
5891 if (!optimize
5892 && !called_as_built_in (fndecl)
5893 && fcode != BUILT_IN_FORK
5894 && fcode != BUILT_IN_EXECL
5895 && fcode != BUILT_IN_EXECV
5896 && fcode != BUILT_IN_EXECLP
5897 && fcode != BUILT_IN_EXECLE
5898 && fcode != BUILT_IN_EXECVP
5899 && fcode != BUILT_IN_EXECVE
5900 && fcode != BUILT_IN_ALLOCA
5901 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5902 && fcode != BUILT_IN_FREE
5903 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5904 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5905 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5906 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5907 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5908 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5909 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5910 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5911 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5912 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5913 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5914 && fcode != BUILT_IN_CHKP_BNDRET)
5915 return expand_call (exp, target, ignore);
5916
5917 /* The built-in function expanders test for target == const0_rtx
5918 to determine whether the function's result will be ignored. */
5919 if (ignore)
5920 target = const0_rtx;
5921
5922 /* If the result of a pure or const built-in function is ignored, and
5923 none of its arguments are volatile, we can avoid expanding the
5924 built-in call and just evaluate the arguments for side-effects. */
5925 if (target == const0_rtx
5926 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5927 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5928 {
5929 bool volatilep = false;
5930 tree arg;
5931 call_expr_arg_iterator iter;
5932
5933 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5934 if (TREE_THIS_VOLATILE (arg))
5935 {
5936 volatilep = true;
5937 break;
5938 }
5939
5940 if (! volatilep)
5941 {
5942 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5943 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5944 return const0_rtx;
5945 }
5946 }
5947
5948 /* expand_builtin_with_bounds is supposed to be used for
5949 instrumented builtin calls. */
5950 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5951
5952 switch (fcode)
5953 {
5954 CASE_FLT_FN (BUILT_IN_FABS):
5955 case BUILT_IN_FABSD32:
5956 case BUILT_IN_FABSD64:
5957 case BUILT_IN_FABSD128:
5958 target = expand_builtin_fabs (exp, target, subtarget);
5959 if (target)
5960 return target;
5961 break;
5962
5963 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5964 target = expand_builtin_copysign (exp, target, subtarget);
5965 if (target)
5966 return target;
5967 break;
5968
5969 /* Just do a normal library call if we were unable to fold
5970 the values. */
5971 CASE_FLT_FN (BUILT_IN_CABS):
5972 break;
5973
5974 CASE_FLT_FN (BUILT_IN_EXP):
5975 CASE_FLT_FN (BUILT_IN_EXP10):
5976 CASE_FLT_FN (BUILT_IN_POW10):
5977 CASE_FLT_FN (BUILT_IN_EXP2):
5978 CASE_FLT_FN (BUILT_IN_EXPM1):
5979 CASE_FLT_FN (BUILT_IN_LOGB):
5980 CASE_FLT_FN (BUILT_IN_LOG):
5981 CASE_FLT_FN (BUILT_IN_LOG10):
5982 CASE_FLT_FN (BUILT_IN_LOG2):
5983 CASE_FLT_FN (BUILT_IN_LOG1P):
5984 CASE_FLT_FN (BUILT_IN_TAN):
5985 CASE_FLT_FN (BUILT_IN_ASIN):
5986 CASE_FLT_FN (BUILT_IN_ACOS):
5987 CASE_FLT_FN (BUILT_IN_ATAN):
5988 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5989 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5990 because of possible accuracy problems. */
5991 if (! flag_unsafe_math_optimizations)
5992 break;
5993 CASE_FLT_FN (BUILT_IN_SQRT):
5994 CASE_FLT_FN (BUILT_IN_FLOOR):
5995 CASE_FLT_FN (BUILT_IN_CEIL):
5996 CASE_FLT_FN (BUILT_IN_TRUNC):
5997 CASE_FLT_FN (BUILT_IN_ROUND):
5998 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5999 CASE_FLT_FN (BUILT_IN_RINT):
6000 target = expand_builtin_mathfn (exp, target, subtarget);
6001 if (target)
6002 return target;
6003 break;
6004
6005 CASE_FLT_FN (BUILT_IN_FMA):
6006 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6007 if (target)
6008 return target;
6009 break;
6010
6011 CASE_FLT_FN (BUILT_IN_ILOGB):
6012 if (! flag_unsafe_math_optimizations)
6013 break;
6014 CASE_FLT_FN (BUILT_IN_ISINF):
6015 CASE_FLT_FN (BUILT_IN_FINITE):
6016 case BUILT_IN_ISFINITE:
6017 case BUILT_IN_ISNORMAL:
6018 target = expand_builtin_interclass_mathfn (exp, target);
6019 if (target)
6020 return target;
6021 break;
6022
6023 CASE_FLT_FN (BUILT_IN_ICEIL):
6024 CASE_FLT_FN (BUILT_IN_LCEIL):
6025 CASE_FLT_FN (BUILT_IN_LLCEIL):
6026 CASE_FLT_FN (BUILT_IN_LFLOOR):
6027 CASE_FLT_FN (BUILT_IN_IFLOOR):
6028 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6029 target = expand_builtin_int_roundingfn (exp, target);
6030 if (target)
6031 return target;
6032 break;
6033
6034 CASE_FLT_FN (BUILT_IN_IRINT):
6035 CASE_FLT_FN (BUILT_IN_LRINT):
6036 CASE_FLT_FN (BUILT_IN_LLRINT):
6037 CASE_FLT_FN (BUILT_IN_IROUND):
6038 CASE_FLT_FN (BUILT_IN_LROUND):
6039 CASE_FLT_FN (BUILT_IN_LLROUND):
6040 target = expand_builtin_int_roundingfn_2 (exp, target);
6041 if (target)
6042 return target;
6043 break;
6044
6045 CASE_FLT_FN (BUILT_IN_POWI):
6046 target = expand_builtin_powi (exp, target);
6047 if (target)
6048 return target;
6049 break;
6050
6051 CASE_FLT_FN (BUILT_IN_ATAN2):
6052 CASE_FLT_FN (BUILT_IN_LDEXP):
6053 CASE_FLT_FN (BUILT_IN_SCALB):
6054 CASE_FLT_FN (BUILT_IN_SCALBN):
6055 CASE_FLT_FN (BUILT_IN_SCALBLN):
6056 if (! flag_unsafe_math_optimizations)
6057 break;
6058
6059 CASE_FLT_FN (BUILT_IN_FMOD):
6060 CASE_FLT_FN (BUILT_IN_REMAINDER):
6061 CASE_FLT_FN (BUILT_IN_DREM):
6062 CASE_FLT_FN (BUILT_IN_POW):
6063 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6064 if (target)
6065 return target;
6066 break;
6067
6068 CASE_FLT_FN (BUILT_IN_CEXPI):
6069 target = expand_builtin_cexpi (exp, target);
6070 gcc_assert (target);
6071 return target;
6072
6073 CASE_FLT_FN (BUILT_IN_SIN):
6074 CASE_FLT_FN (BUILT_IN_COS):
6075 if (! flag_unsafe_math_optimizations)
6076 break;
6077 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6078 if (target)
6079 return target;
6080 break;
6081
6082 CASE_FLT_FN (BUILT_IN_SINCOS):
6083 if (! flag_unsafe_math_optimizations)
6084 break;
6085 target = expand_builtin_sincos (exp);
6086 if (target)
6087 return target;
6088 break;
6089
6090 case BUILT_IN_APPLY_ARGS:
6091 return expand_builtin_apply_args ();
6092
6093 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6094 FUNCTION with a copy of the parameters described by
6095 ARGUMENTS, and ARGSIZE. It returns a block of memory
6096 allocated on the stack into which is stored all the registers
6097 that might possibly be used for returning the result of a
6098 function. ARGUMENTS is the value returned by
6099 __builtin_apply_args. ARGSIZE is the number of bytes of
6100 arguments that must be copied. ??? How should this value be
6101 computed? We'll also need a safe worst case value for varargs
6102 functions. */
6103 case BUILT_IN_APPLY:
6104 if (!validate_arglist (exp, POINTER_TYPE,
6105 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6106 && !validate_arglist (exp, REFERENCE_TYPE,
6107 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6108 return const0_rtx;
6109 else
6110 {
6111 rtx ops[3];
6112
6113 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6114 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6115 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6116
6117 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6118 }
6119
6120 /* __builtin_return (RESULT) causes the function to return the
6121 value described by RESULT. RESULT is address of the block of
6122 memory returned by __builtin_apply. */
6123 case BUILT_IN_RETURN:
6124 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6125 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6126 return const0_rtx;
6127
6128 case BUILT_IN_SAVEREGS:
6129 return expand_builtin_saveregs ();
6130
6131 case BUILT_IN_VA_ARG_PACK:
6132 /* All valid uses of __builtin_va_arg_pack () are removed during
6133 inlining. */
6134 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6135 return const0_rtx;
6136
6137 case BUILT_IN_VA_ARG_PACK_LEN:
6138 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6139 inlining. */
6140 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6141 return const0_rtx;
6142
6143 /* Return the address of the first anonymous stack arg. */
6144 case BUILT_IN_NEXT_ARG:
6145 if (fold_builtin_next_arg (exp, false))
6146 return const0_rtx;
6147 return expand_builtin_next_arg ();
6148
6149 case BUILT_IN_CLEAR_CACHE:
6150 target = expand_builtin___clear_cache (exp);
6151 if (target)
6152 return target;
6153 break;
6154
6155 case BUILT_IN_CLASSIFY_TYPE:
6156 return expand_builtin_classify_type (exp);
6157
6158 case BUILT_IN_CONSTANT_P:
6159 return const0_rtx;
6160
6161 case BUILT_IN_FRAME_ADDRESS:
6162 case BUILT_IN_RETURN_ADDRESS:
6163 return expand_builtin_frame_address (fndecl, exp);
6164
6165 /* Returns the address of the area where the structure is returned.
6166 0 otherwise. */
6167 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6168 if (call_expr_nargs (exp) != 0
6169 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6170 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6171 return const0_rtx;
6172 else
6173 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6174
6175 case BUILT_IN_ALLOCA:
6176 case BUILT_IN_ALLOCA_WITH_ALIGN:
6177 /* If the allocation stems from the declaration of a variable-sized
6178 object, it cannot accumulate. */
6179 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6180 if (target)
6181 return target;
6182 break;
6183
6184 case BUILT_IN_STACK_SAVE:
6185 return expand_stack_save ();
6186
6187 case BUILT_IN_STACK_RESTORE:
6188 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6189 return const0_rtx;
6190
6191 case BUILT_IN_BSWAP16:
6192 case BUILT_IN_BSWAP32:
6193 case BUILT_IN_BSWAP64:
6194 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6195 if (target)
6196 return target;
6197 break;
6198
6199 CASE_INT_FN (BUILT_IN_FFS):
6200 target = expand_builtin_unop (target_mode, exp, target,
6201 subtarget, ffs_optab);
6202 if (target)
6203 return target;
6204 break;
6205
6206 CASE_INT_FN (BUILT_IN_CLZ):
6207 target = expand_builtin_unop (target_mode, exp, target,
6208 subtarget, clz_optab);
6209 if (target)
6210 return target;
6211 break;
6212
6213 CASE_INT_FN (BUILT_IN_CTZ):
6214 target = expand_builtin_unop (target_mode, exp, target,
6215 subtarget, ctz_optab);
6216 if (target)
6217 return target;
6218 break;
6219
6220 CASE_INT_FN (BUILT_IN_CLRSB):
6221 target = expand_builtin_unop (target_mode, exp, target,
6222 subtarget, clrsb_optab);
6223 if (target)
6224 return target;
6225 break;
6226
6227 CASE_INT_FN (BUILT_IN_POPCOUNT):
6228 target = expand_builtin_unop (target_mode, exp, target,
6229 subtarget, popcount_optab);
6230 if (target)
6231 return target;
6232 break;
6233
6234 CASE_INT_FN (BUILT_IN_PARITY):
6235 target = expand_builtin_unop (target_mode, exp, target,
6236 subtarget, parity_optab);
6237 if (target)
6238 return target;
6239 break;
6240
6241 case BUILT_IN_STRLEN:
6242 target = expand_builtin_strlen (exp, target, target_mode);
6243 if (target)
6244 return target;
6245 break;
6246
6247 case BUILT_IN_STRCPY:
6248 target = expand_builtin_strcpy (exp, target);
6249 if (target)
6250 return target;
6251 break;
6252
6253 case BUILT_IN_STRNCPY:
6254 target = expand_builtin_strncpy (exp, target);
6255 if (target)
6256 return target;
6257 break;
6258
6259 case BUILT_IN_STPCPY:
6260 target = expand_builtin_stpcpy (exp, target, mode);
6261 if (target)
6262 return target;
6263 break;
6264
6265 case BUILT_IN_MEMCPY:
6266 target = expand_builtin_memcpy (exp, target);
6267 if (target)
6268 return target;
6269 break;
6270
6271 case BUILT_IN_MEMPCPY:
6272 target = expand_builtin_mempcpy (exp, target, mode);
6273 if (target)
6274 return target;
6275 break;
6276
6277 case BUILT_IN_MEMSET:
6278 target = expand_builtin_memset (exp, target, mode);
6279 if (target)
6280 return target;
6281 break;
6282
6283 case BUILT_IN_BZERO:
6284 target = expand_builtin_bzero (exp);
6285 if (target)
6286 return target;
6287 break;
6288
6289 case BUILT_IN_STRCMP:
6290 target = expand_builtin_strcmp (exp, target);
6291 if (target)
6292 return target;
6293 break;
6294
6295 case BUILT_IN_STRNCMP:
6296 target = expand_builtin_strncmp (exp, target, mode);
6297 if (target)
6298 return target;
6299 break;
6300
6301 case BUILT_IN_BCMP:
6302 case BUILT_IN_MEMCMP:
6303 target = expand_builtin_memcmp (exp, target);
6304 if (target)
6305 return target;
6306 break;
6307
6308 case BUILT_IN_SETJMP:
6309 /* This should have been lowered to the builtins below. */
6310 gcc_unreachable ();
6311
6312 case BUILT_IN_SETJMP_SETUP:
6313 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6314 and the receiver label. */
6315 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6316 {
6317 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6318 VOIDmode, EXPAND_NORMAL);
6319 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6320 rtx_insn *label_r = label_rtx (label);
6321
6322 /* This is copied from the handling of non-local gotos. */
6323 expand_builtin_setjmp_setup (buf_addr, label_r);
6324 nonlocal_goto_handler_labels
6325 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6326 nonlocal_goto_handler_labels);
6327 /* ??? Do not let expand_label treat us as such since we would
6328 not want to be both on the list of non-local labels and on
6329 the list of forced labels. */
6330 FORCED_LABEL (label) = 0;
6331 return const0_rtx;
6332 }
6333 break;
6334
6335 case BUILT_IN_SETJMP_RECEIVER:
6336 /* __builtin_setjmp_receiver is passed the receiver label. */
6337 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6338 {
6339 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6340 rtx_insn *label_r = label_rtx (label);
6341
6342 expand_builtin_setjmp_receiver (label_r);
6343 return const0_rtx;
6344 }
6345 break;
6346
6347 /* __builtin_longjmp is passed a pointer to an array of five words.
6348 It's similar to the C library longjmp function but works with
6349 __builtin_setjmp above. */
6350 case BUILT_IN_LONGJMP:
6351 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6352 {
6353 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6354 VOIDmode, EXPAND_NORMAL);
6355 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6356
6357 if (value != const1_rtx)
6358 {
6359 error ("%<__builtin_longjmp%> second argument must be 1");
6360 return const0_rtx;
6361 }
6362
6363 expand_builtin_longjmp (buf_addr, value);
6364 return const0_rtx;
6365 }
6366 break;
6367
6368 case BUILT_IN_NONLOCAL_GOTO:
6369 target = expand_builtin_nonlocal_goto (exp);
6370 if (target)
6371 return target;
6372 break;
6373
6374 /* This updates the setjmp buffer that is its argument with the value
6375 of the current stack pointer. */
6376 case BUILT_IN_UPDATE_SETJMP_BUF:
6377 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6378 {
6379 rtx buf_addr
6380 = expand_normal (CALL_EXPR_ARG (exp, 0));
6381
6382 expand_builtin_update_setjmp_buf (buf_addr);
6383 return const0_rtx;
6384 }
6385 break;
6386
6387 case BUILT_IN_TRAP:
6388 expand_builtin_trap ();
6389 return const0_rtx;
6390
6391 case BUILT_IN_UNREACHABLE:
6392 expand_builtin_unreachable ();
6393 return const0_rtx;
6394
6395 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6396 case BUILT_IN_SIGNBITD32:
6397 case BUILT_IN_SIGNBITD64:
6398 case BUILT_IN_SIGNBITD128:
6399 target = expand_builtin_signbit (exp, target);
6400 if (target)
6401 return target;
6402 break;
6403
6404 /* Various hooks for the DWARF 2 __throw routine. */
6405 case BUILT_IN_UNWIND_INIT:
6406 expand_builtin_unwind_init ();
6407 return const0_rtx;
6408 case BUILT_IN_DWARF_CFA:
6409 return virtual_cfa_rtx;
6410 #ifdef DWARF2_UNWIND_INFO
6411 case BUILT_IN_DWARF_SP_COLUMN:
6412 return expand_builtin_dwarf_sp_column ();
6413 case BUILT_IN_INIT_DWARF_REG_SIZES:
6414 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6415 return const0_rtx;
6416 #endif
6417 case BUILT_IN_FROB_RETURN_ADDR:
6418 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6419 case BUILT_IN_EXTRACT_RETURN_ADDR:
6420 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6421 case BUILT_IN_EH_RETURN:
6422 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6423 CALL_EXPR_ARG (exp, 1));
6424 return const0_rtx;
6425 case BUILT_IN_EH_RETURN_DATA_REGNO:
6426 return expand_builtin_eh_return_data_regno (exp);
6427 case BUILT_IN_EXTEND_POINTER:
6428 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6429 case BUILT_IN_EH_POINTER:
6430 return expand_builtin_eh_pointer (exp);
6431 case BUILT_IN_EH_FILTER:
6432 return expand_builtin_eh_filter (exp);
6433 case BUILT_IN_EH_COPY_VALUES:
6434 return expand_builtin_eh_copy_values (exp);
6435
6436 case BUILT_IN_VA_START:
6437 return expand_builtin_va_start (exp);
6438 case BUILT_IN_VA_END:
6439 return expand_builtin_va_end (exp);
6440 case BUILT_IN_VA_COPY:
6441 return expand_builtin_va_copy (exp);
6442 case BUILT_IN_EXPECT:
6443 return expand_builtin_expect (exp, target);
6444 case BUILT_IN_ASSUME_ALIGNED:
6445 return expand_builtin_assume_aligned (exp, target);
6446 case BUILT_IN_PREFETCH:
6447 expand_builtin_prefetch (exp);
6448 return const0_rtx;
6449
6450 case BUILT_IN_INIT_TRAMPOLINE:
6451 return expand_builtin_init_trampoline (exp, true);
6452 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6453 return expand_builtin_init_trampoline (exp, false);
6454 case BUILT_IN_ADJUST_TRAMPOLINE:
6455 return expand_builtin_adjust_trampoline (exp);
6456
6457 case BUILT_IN_FORK:
6458 case BUILT_IN_EXECL:
6459 case BUILT_IN_EXECV:
6460 case BUILT_IN_EXECLP:
6461 case BUILT_IN_EXECLE:
6462 case BUILT_IN_EXECVP:
6463 case BUILT_IN_EXECVE:
6464 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6465 if (target)
6466 return target;
6467 break;
6468
6469 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6470 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6471 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6472 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6473 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6474 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6475 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6476 if (target)
6477 return target;
6478 break;
6479
6480 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6481 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6482 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6483 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6484 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6485 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6486 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6487 if (target)
6488 return target;
6489 break;
6490
6491 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6492 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6493 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6494 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6495 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6496 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6497 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6498 if (target)
6499 return target;
6500 break;
6501
6502 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6503 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6504 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6505 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6506 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6507 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6508 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6509 if (target)
6510 return target;
6511 break;
6512
6513 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6514 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6515 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6516 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6517 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6518 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6519 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6520 if (target)
6521 return target;
6522 break;
6523
6524 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6525 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6526 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6527 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6528 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6529 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6530 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6531 if (target)
6532 return target;
6533 break;
6534
6535 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6536 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6537 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6538 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6539 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6540 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6541 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6542 if (target)
6543 return target;
6544 break;
6545
6546 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6547 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6548 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6549 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6550 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6551 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6552 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6553 if (target)
6554 return target;
6555 break;
6556
6557 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6558 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6559 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6560 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6561 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6562 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6563 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6564 if (target)
6565 return target;
6566 break;
6567
6568 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6569 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6570 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6571 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6572 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6573 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6574 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6575 if (target)
6576 return target;
6577 break;
6578
6579 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6580 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6581 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6582 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6583 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6584 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6585 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6586 if (target)
6587 return target;
6588 break;
6589
6590 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6591 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6592 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6593 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6594 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6595 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6596 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6597 if (target)
6598 return target;
6599 break;
6600
6601 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6602 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6603 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6604 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6605 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6606 if (mode == VOIDmode)
6607 mode = TYPE_MODE (boolean_type_node);
6608 if (!target || !register_operand (target, mode))
6609 target = gen_reg_rtx (mode);
6610
6611 mode = get_builtin_sync_mode
6612 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6613 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6614 if (target)
6615 return target;
6616 break;
6617
6618 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6619 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6620 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6621 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6622 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6623 mode = get_builtin_sync_mode
6624 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6625 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6626 if (target)
6627 return target;
6628 break;
6629
6630 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6631 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6632 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6633 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6634 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6635 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6636 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6637 if (target)
6638 return target;
6639 break;
6640
6641 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6642 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6643 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6644 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6645 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6646 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6647 expand_builtin_sync_lock_release (mode, exp);
6648 return const0_rtx;
6649
6650 case BUILT_IN_SYNC_SYNCHRONIZE:
6651 expand_builtin_sync_synchronize ();
6652 return const0_rtx;
6653
6654 case BUILT_IN_ATOMIC_EXCHANGE_1:
6655 case BUILT_IN_ATOMIC_EXCHANGE_2:
6656 case BUILT_IN_ATOMIC_EXCHANGE_4:
6657 case BUILT_IN_ATOMIC_EXCHANGE_8:
6658 case BUILT_IN_ATOMIC_EXCHANGE_16:
6659 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6660 target = expand_builtin_atomic_exchange (mode, exp, target);
6661 if (target)
6662 return target;
6663 break;
6664
6665 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6666 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6667 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6668 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6669 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6670 {
6671 unsigned int nargs, z;
6672 vec<tree, va_gc> *vec;
6673
6674 mode =
6675 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6676 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6677 if (target)
6678 return target;
6679
6680 /* If this is turned into an external library call, the weak parameter
6681 must be dropped to match the expected parameter list. */
6682 nargs = call_expr_nargs (exp);
6683 vec_alloc (vec, nargs - 1);
6684 for (z = 0; z < 3; z++)
6685 vec->quick_push (CALL_EXPR_ARG (exp, z));
6686 /* Skip the boolean weak parameter. */
6687 for (z = 4; z < 6; z++)
6688 vec->quick_push (CALL_EXPR_ARG (exp, z));
6689 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6690 break;
6691 }
6692
6693 case BUILT_IN_ATOMIC_LOAD_1:
6694 case BUILT_IN_ATOMIC_LOAD_2:
6695 case BUILT_IN_ATOMIC_LOAD_4:
6696 case BUILT_IN_ATOMIC_LOAD_8:
6697 case BUILT_IN_ATOMIC_LOAD_16:
6698 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6699 target = expand_builtin_atomic_load (mode, exp, target);
6700 if (target)
6701 return target;
6702 break;
6703
6704 case BUILT_IN_ATOMIC_STORE_1:
6705 case BUILT_IN_ATOMIC_STORE_2:
6706 case BUILT_IN_ATOMIC_STORE_4:
6707 case BUILT_IN_ATOMIC_STORE_8:
6708 case BUILT_IN_ATOMIC_STORE_16:
6709 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6710 target = expand_builtin_atomic_store (mode, exp);
6711 if (target)
6712 return const0_rtx;
6713 break;
6714
6715 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6716 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6717 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6718 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6719 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6720 {
6721 enum built_in_function lib;
6722 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6723 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6724 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6725 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6726 ignore, lib);
6727 if (target)
6728 return target;
6729 break;
6730 }
6731 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6732 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6733 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6734 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6735 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6736 {
6737 enum built_in_function lib;
6738 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6739 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6740 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6741 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6742 ignore, lib);
6743 if (target)
6744 return target;
6745 break;
6746 }
6747 case BUILT_IN_ATOMIC_AND_FETCH_1:
6748 case BUILT_IN_ATOMIC_AND_FETCH_2:
6749 case BUILT_IN_ATOMIC_AND_FETCH_4:
6750 case BUILT_IN_ATOMIC_AND_FETCH_8:
6751 case BUILT_IN_ATOMIC_AND_FETCH_16:
6752 {
6753 enum built_in_function lib;
6754 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6755 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6756 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6757 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6758 ignore, lib);
6759 if (target)
6760 return target;
6761 break;
6762 }
6763 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6764 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6765 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6766 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6767 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6768 {
6769 enum built_in_function lib;
6770 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6771 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6772 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6773 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6774 ignore, lib);
6775 if (target)
6776 return target;
6777 break;
6778 }
6779 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6780 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6781 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6782 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6783 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6784 {
6785 enum built_in_function lib;
6786 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6787 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6788 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6789 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6790 ignore, lib);
6791 if (target)
6792 return target;
6793 break;
6794 }
6795 case BUILT_IN_ATOMIC_OR_FETCH_1:
6796 case BUILT_IN_ATOMIC_OR_FETCH_2:
6797 case BUILT_IN_ATOMIC_OR_FETCH_4:
6798 case BUILT_IN_ATOMIC_OR_FETCH_8:
6799 case BUILT_IN_ATOMIC_OR_FETCH_16:
6800 {
6801 enum built_in_function lib;
6802 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6803 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6804 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6805 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6806 ignore, lib);
6807 if (target)
6808 return target;
6809 break;
6810 }
6811 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6812 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6813 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6814 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6815 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6816 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6817 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6818 ignore, BUILT_IN_NONE);
6819 if (target)
6820 return target;
6821 break;
6822
6823 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6824 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6825 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6826 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6827 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6828 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6829 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6830 ignore, BUILT_IN_NONE);
6831 if (target)
6832 return target;
6833 break;
6834
6835 case BUILT_IN_ATOMIC_FETCH_AND_1:
6836 case BUILT_IN_ATOMIC_FETCH_AND_2:
6837 case BUILT_IN_ATOMIC_FETCH_AND_4:
6838 case BUILT_IN_ATOMIC_FETCH_AND_8:
6839 case BUILT_IN_ATOMIC_FETCH_AND_16:
6840 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6841 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6842 ignore, BUILT_IN_NONE);
6843 if (target)
6844 return target;
6845 break;
6846
6847 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6848 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6849 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6850 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6851 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6852 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6853 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6854 ignore, BUILT_IN_NONE);
6855 if (target)
6856 return target;
6857 break;
6858
6859 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6860 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6861 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6862 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6863 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6864 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6865 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6866 ignore, BUILT_IN_NONE);
6867 if (target)
6868 return target;
6869 break;
6870
6871 case BUILT_IN_ATOMIC_FETCH_OR_1:
6872 case BUILT_IN_ATOMIC_FETCH_OR_2:
6873 case BUILT_IN_ATOMIC_FETCH_OR_4:
6874 case BUILT_IN_ATOMIC_FETCH_OR_8:
6875 case BUILT_IN_ATOMIC_FETCH_OR_16:
6876 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6877 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6878 ignore, BUILT_IN_NONE);
6879 if (target)
6880 return target;
6881 break;
6882
6883 case BUILT_IN_ATOMIC_TEST_AND_SET:
6884 return expand_builtin_atomic_test_and_set (exp, target);
6885
6886 case BUILT_IN_ATOMIC_CLEAR:
6887 return expand_builtin_atomic_clear (exp);
6888
6889 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6890 return expand_builtin_atomic_always_lock_free (exp);
6891
6892 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6893 target = expand_builtin_atomic_is_lock_free (exp);
6894 if (target)
6895 return target;
6896 break;
6897
6898 case BUILT_IN_ATOMIC_THREAD_FENCE:
6899 expand_builtin_atomic_thread_fence (exp);
6900 return const0_rtx;
6901
6902 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6903 expand_builtin_atomic_signal_fence (exp);
6904 return const0_rtx;
6905
6906 case BUILT_IN_OBJECT_SIZE:
6907 return expand_builtin_object_size (exp);
6908
6909 case BUILT_IN_MEMCPY_CHK:
6910 case BUILT_IN_MEMPCPY_CHK:
6911 case BUILT_IN_MEMMOVE_CHK:
6912 case BUILT_IN_MEMSET_CHK:
6913 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6914 if (target)
6915 return target;
6916 break;
6917
6918 case BUILT_IN_STRCPY_CHK:
6919 case BUILT_IN_STPCPY_CHK:
6920 case BUILT_IN_STRNCPY_CHK:
6921 case BUILT_IN_STPNCPY_CHK:
6922 case BUILT_IN_STRCAT_CHK:
6923 case BUILT_IN_STRNCAT_CHK:
6924 case BUILT_IN_SNPRINTF_CHK:
6925 case BUILT_IN_VSNPRINTF_CHK:
6926 maybe_emit_chk_warning (exp, fcode);
6927 break;
6928
6929 case BUILT_IN_SPRINTF_CHK:
6930 case BUILT_IN_VSPRINTF_CHK:
6931 maybe_emit_sprintf_chk_warning (exp, fcode);
6932 break;
6933
6934 case BUILT_IN_FREE:
6935 if (warn_free_nonheap_object)
6936 maybe_emit_free_warning (exp);
6937 break;
6938
6939 case BUILT_IN_THREAD_POINTER:
6940 return expand_builtin_thread_pointer (exp, target);
6941
6942 case BUILT_IN_SET_THREAD_POINTER:
6943 expand_builtin_set_thread_pointer (exp);
6944 return const0_rtx;
6945
6946 case BUILT_IN_CILK_DETACH:
6947 expand_builtin_cilk_detach (exp);
6948 return const0_rtx;
6949
6950 case BUILT_IN_CILK_POP_FRAME:
6951 expand_builtin_cilk_pop_frame (exp);
6952 return const0_rtx;
6953
6954 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6955 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6956 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6957 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6958 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6959 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6960 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6961 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6962 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6963 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6964 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6965 /* We allow user CHKP builtins if Pointer Bounds
6966 Checker is off. */
6967 if (!chkp_function_instrumented_p (current_function_decl))
6968 {
6969 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6970 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6971 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6972 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6973 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6974 return expand_normal (CALL_EXPR_ARG (exp, 0));
6975 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6976 return expand_normal (size_zero_node);
6977 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6978 return expand_normal (size_int (-1));
6979 else
6980 return const0_rtx;
6981 }
6982 /* FALLTHROUGH */
6983
6984 case BUILT_IN_CHKP_BNDMK:
6985 case BUILT_IN_CHKP_BNDSTX:
6986 case BUILT_IN_CHKP_BNDCL:
6987 case BUILT_IN_CHKP_BNDCU:
6988 case BUILT_IN_CHKP_BNDLDX:
6989 case BUILT_IN_CHKP_BNDRET:
6990 case BUILT_IN_CHKP_INTERSECT:
6991 case BUILT_IN_CHKP_NARROW:
6992 case BUILT_IN_CHKP_EXTRACT_LOWER:
6993 case BUILT_IN_CHKP_EXTRACT_UPPER:
6994 /* Software implementation of Pointer Bounds Checker is NYI.
6995 Target support is required. */
6996 error ("Your target platform does not support -fcheck-pointer-bounds");
6997 break;
6998
6999 case BUILT_IN_ACC_ON_DEVICE:
7000 /* Do library call, if we failed to expand the builtin when
7001 folding. */
7002 break;
7003
7004 default: /* just do library call, if unknown builtin */
7005 break;
7006 }
7007
7008 /* The switch statement above can drop through to cause the function
7009 to be called normally. */
7010 return expand_call (exp, target, ignore);
7011 }
7012
7013 /* Similar to expand_builtin but is used for instrumented calls. */
7014
7015 rtx
7016 expand_builtin_with_bounds (tree exp, rtx target,
7017 rtx subtarget ATTRIBUTE_UNUSED,
7018 machine_mode mode, int ignore)
7019 {
7020 tree fndecl = get_callee_fndecl (exp);
7021 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7022
7023 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7024
7025 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7026 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7027
7028 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7029 && fcode < END_CHKP_BUILTINS);
7030
7031 switch (fcode)
7032 {
7033 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7034 target = expand_builtin_memcpy_with_bounds (exp, target);
7035 if (target)
7036 return target;
7037 break;
7038
7039 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7040 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7041 if (target)
7042 return target;
7043 break;
7044
7045 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7046 target = expand_builtin_memset_with_bounds (exp, target, mode);
7047 if (target)
7048 return target;
7049 break;
7050
7051 default:
7052 break;
7053 }
7054
7055 /* The switch statement above can drop through to cause the function
7056 to be called normally. */
7057 return expand_call (exp, target, ignore);
7058 }
7059
7060 /* Determine whether a tree node represents a call to a built-in
7061 function. If the tree T is a call to a built-in function with
7062 the right number of arguments of the appropriate types, return
7063 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7064 Otherwise the return value is END_BUILTINS. */
7065
7066 enum built_in_function
7067 builtin_mathfn_code (const_tree t)
7068 {
7069 const_tree fndecl, arg, parmlist;
7070 const_tree argtype, parmtype;
7071 const_call_expr_arg_iterator iter;
7072
7073 if (TREE_CODE (t) != CALL_EXPR
7074 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7075 return END_BUILTINS;
7076
7077 fndecl = get_callee_fndecl (t);
7078 if (fndecl == NULL_TREE
7079 || TREE_CODE (fndecl) != FUNCTION_DECL
7080 || ! DECL_BUILT_IN (fndecl)
7081 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7082 return END_BUILTINS;
7083
7084 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7085 init_const_call_expr_arg_iterator (t, &iter);
7086 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7087 {
7088 /* If a function doesn't take a variable number of arguments,
7089 the last element in the list will have type `void'. */
7090 parmtype = TREE_VALUE (parmlist);
7091 if (VOID_TYPE_P (parmtype))
7092 {
7093 if (more_const_call_expr_args_p (&iter))
7094 return END_BUILTINS;
7095 return DECL_FUNCTION_CODE (fndecl);
7096 }
7097
7098 if (! more_const_call_expr_args_p (&iter))
7099 return END_BUILTINS;
7100
7101 arg = next_const_call_expr_arg (&iter);
7102 argtype = TREE_TYPE (arg);
7103
7104 if (SCALAR_FLOAT_TYPE_P (parmtype))
7105 {
7106 if (! SCALAR_FLOAT_TYPE_P (argtype))
7107 return END_BUILTINS;
7108 }
7109 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7110 {
7111 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7112 return END_BUILTINS;
7113 }
7114 else if (POINTER_TYPE_P (parmtype))
7115 {
7116 if (! POINTER_TYPE_P (argtype))
7117 return END_BUILTINS;
7118 }
7119 else if (INTEGRAL_TYPE_P (parmtype))
7120 {
7121 if (! INTEGRAL_TYPE_P (argtype))
7122 return END_BUILTINS;
7123 }
7124 else
7125 return END_BUILTINS;
7126 }
7127
7128 /* Variable-length argument list. */
7129 return DECL_FUNCTION_CODE (fndecl);
7130 }
7131
7132 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7133 evaluate to a constant. */
7134
7135 static tree
7136 fold_builtin_constant_p (tree arg)
7137 {
7138 /* We return 1 for a numeric type that's known to be a constant
7139 value at compile-time or for an aggregate type that's a
7140 literal constant. */
7141 STRIP_NOPS (arg);
7142
7143 /* If we know this is a constant, emit the constant of one. */
7144 if (CONSTANT_CLASS_P (arg)
7145 || (TREE_CODE (arg) == CONSTRUCTOR
7146 && TREE_CONSTANT (arg)))
7147 return integer_one_node;
7148 if (TREE_CODE (arg) == ADDR_EXPR)
7149 {
7150 tree op = TREE_OPERAND (arg, 0);
7151 if (TREE_CODE (op) == STRING_CST
7152 || (TREE_CODE (op) == ARRAY_REF
7153 && integer_zerop (TREE_OPERAND (op, 1))
7154 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7155 return integer_one_node;
7156 }
7157
7158 /* If this expression has side effects, show we don't know it to be a
7159 constant. Likewise if it's a pointer or aggregate type since in
7160 those case we only want literals, since those are only optimized
7161 when generating RTL, not later.
7162 And finally, if we are compiling an initializer, not code, we
7163 need to return a definite result now; there's not going to be any
7164 more optimization done. */
7165 if (TREE_SIDE_EFFECTS (arg)
7166 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7167 || POINTER_TYPE_P (TREE_TYPE (arg))
7168 || cfun == 0
7169 || folding_initializer
7170 || force_folding_builtin_constant_p)
7171 return integer_zero_node;
7172
7173 return NULL_TREE;
7174 }
7175
7176 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7177 return it as a truthvalue. */
7178
7179 static tree
7180 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7181 tree predictor)
7182 {
7183 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7184
7185 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7186 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7187 ret_type = TREE_TYPE (TREE_TYPE (fn));
7188 pred_type = TREE_VALUE (arg_types);
7189 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7190
7191 pred = fold_convert_loc (loc, pred_type, pred);
7192 expected = fold_convert_loc (loc, expected_type, expected);
7193 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7194 predictor);
7195
7196 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7197 build_int_cst (ret_type, 0));
7198 }
7199
7200 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7201 NULL_TREE if no simplification is possible. */
7202
7203 tree
7204 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7205 {
7206 tree inner, fndecl, inner_arg0;
7207 enum tree_code code;
7208
7209 /* Distribute the expected value over short-circuiting operators.
7210 See through the cast from truthvalue_type_node to long. */
7211 inner_arg0 = arg0;
7212 while (CONVERT_EXPR_P (inner_arg0)
7213 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7214 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7215 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7216
7217 /* If this is a builtin_expect within a builtin_expect keep the
7218 inner one. See through a comparison against a constant. It
7219 might have been added to create a thruthvalue. */
7220 inner = inner_arg0;
7221
7222 if (COMPARISON_CLASS_P (inner)
7223 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7224 inner = TREE_OPERAND (inner, 0);
7225
7226 if (TREE_CODE (inner) == CALL_EXPR
7227 && (fndecl = get_callee_fndecl (inner))
7228 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7229 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7230 return arg0;
7231
7232 inner = inner_arg0;
7233 code = TREE_CODE (inner);
7234 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7235 {
7236 tree op0 = TREE_OPERAND (inner, 0);
7237 tree op1 = TREE_OPERAND (inner, 1);
7238
7239 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7240 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7241 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7242
7243 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7244 }
7245
7246 /* If the argument isn't invariant then there's nothing else we can do. */
7247 if (!TREE_CONSTANT (inner_arg0))
7248 return NULL_TREE;
7249
7250 /* If we expect that a comparison against the argument will fold to
7251 a constant return the constant. In practice, this means a true
7252 constant or the address of a non-weak symbol. */
7253 inner = inner_arg0;
7254 STRIP_NOPS (inner);
7255 if (TREE_CODE (inner) == ADDR_EXPR)
7256 {
7257 do
7258 {
7259 inner = TREE_OPERAND (inner, 0);
7260 }
7261 while (TREE_CODE (inner) == COMPONENT_REF
7262 || TREE_CODE (inner) == ARRAY_REF);
7263 if ((TREE_CODE (inner) == VAR_DECL
7264 || TREE_CODE (inner) == FUNCTION_DECL)
7265 && DECL_WEAK (inner))
7266 return NULL_TREE;
7267 }
7268
7269 /* Otherwise, ARG0 already has the proper type for the return value. */
7270 return arg0;
7271 }
7272
7273 /* Fold a call to __builtin_classify_type with argument ARG. */
7274
7275 static tree
7276 fold_builtin_classify_type (tree arg)
7277 {
7278 if (arg == 0)
7279 return build_int_cst (integer_type_node, no_type_class);
7280
7281 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7282 }
7283
7284 /* Fold a call to __builtin_strlen with argument ARG. */
7285
7286 static tree
7287 fold_builtin_strlen (location_t loc, tree type, tree arg)
7288 {
7289 if (!validate_arg (arg, POINTER_TYPE))
7290 return NULL_TREE;
7291 else
7292 {
7293 tree len = c_strlen (arg, 0);
7294
7295 if (len)
7296 return fold_convert_loc (loc, type, len);
7297
7298 return NULL_TREE;
7299 }
7300 }
7301
7302 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7303
7304 static tree
7305 fold_builtin_inf (location_t loc, tree type, int warn)
7306 {
7307 REAL_VALUE_TYPE real;
7308
7309 /* __builtin_inff is intended to be usable to define INFINITY on all
7310 targets. If an infinity is not available, INFINITY expands "to a
7311 positive constant of type float that overflows at translation
7312 time", footnote "In this case, using INFINITY will violate the
7313 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7314 Thus we pedwarn to ensure this constraint violation is
7315 diagnosed. */
7316 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7317 pedwarn (loc, 0, "target format does not support infinity");
7318
7319 real_inf (&real);
7320 return build_real (type, real);
7321 }
7322
7323 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7324
7325 static tree
7326 fold_builtin_nan (tree arg, tree type, int quiet)
7327 {
7328 REAL_VALUE_TYPE real;
7329 const char *str;
7330
7331 if (!validate_arg (arg, POINTER_TYPE))
7332 return NULL_TREE;
7333 str = c_getstr (arg);
7334 if (!str)
7335 return NULL_TREE;
7336
7337 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7338 return NULL_TREE;
7339
7340 return build_real (type, real);
7341 }
7342
7343 /* Return true if the floating point expression T has an integer value.
7344 We also allow +Inf, -Inf and NaN to be considered integer values. */
7345
7346 static bool
7347 integer_valued_real_p (tree t)
7348 {
7349 switch (TREE_CODE (t))
7350 {
7351 case FLOAT_EXPR:
7352 return true;
7353
7354 case ABS_EXPR:
7355 case SAVE_EXPR:
7356 return integer_valued_real_p (TREE_OPERAND (t, 0));
7357
7358 case COMPOUND_EXPR:
7359 case MODIFY_EXPR:
7360 case BIND_EXPR:
7361 return integer_valued_real_p (TREE_OPERAND (t, 1));
7362
7363 case PLUS_EXPR:
7364 case MINUS_EXPR:
7365 case MULT_EXPR:
7366 case MIN_EXPR:
7367 case MAX_EXPR:
7368 return integer_valued_real_p (TREE_OPERAND (t, 0))
7369 && integer_valued_real_p (TREE_OPERAND (t, 1));
7370
7371 case COND_EXPR:
7372 return integer_valued_real_p (TREE_OPERAND (t, 1))
7373 && integer_valued_real_p (TREE_OPERAND (t, 2));
7374
7375 case REAL_CST:
7376 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7377
7378 CASE_CONVERT:
7379 {
7380 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7381 if (TREE_CODE (type) == INTEGER_TYPE)
7382 return true;
7383 if (TREE_CODE (type) == REAL_TYPE)
7384 return integer_valued_real_p (TREE_OPERAND (t, 0));
7385 break;
7386 }
7387
7388 case CALL_EXPR:
7389 switch (builtin_mathfn_code (t))
7390 {
7391 CASE_FLT_FN (BUILT_IN_CEIL):
7392 CASE_FLT_FN (BUILT_IN_FLOOR):
7393 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7394 CASE_FLT_FN (BUILT_IN_RINT):
7395 CASE_FLT_FN (BUILT_IN_ROUND):
7396 CASE_FLT_FN (BUILT_IN_TRUNC):
7397 return true;
7398
7399 CASE_FLT_FN (BUILT_IN_FMIN):
7400 CASE_FLT_FN (BUILT_IN_FMAX):
7401 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7402 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7403
7404 default:
7405 break;
7406 }
7407 break;
7408
7409 default:
7410 break;
7411 }
7412 return false;
7413 }
7414
7415 /* FNDECL is assumed to be a builtin where truncation can be propagated
7416 across (for instance floor((double)f) == (double)floorf (f).
7417 Do the transformation for a call with argument ARG. */
7418
7419 static tree
7420 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7421 {
7422 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7423
7424 if (!validate_arg (arg, REAL_TYPE))
7425 return NULL_TREE;
7426
7427 /* Integer rounding functions are idempotent. */
7428 if (fcode == builtin_mathfn_code (arg))
7429 return arg;
7430
7431 /* If argument is already integer valued, and we don't need to worry
7432 about setting errno, there's no need to perform rounding. */
7433 if (! flag_errno_math && integer_valued_real_p (arg))
7434 return arg;
7435
7436 if (optimize)
7437 {
7438 tree arg0 = strip_float_extensions (arg);
7439 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7440 tree newtype = TREE_TYPE (arg0);
7441 tree decl;
7442
7443 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7444 && (decl = mathfn_built_in (newtype, fcode)))
7445 return fold_convert_loc (loc, ftype,
7446 build_call_expr_loc (loc, decl, 1,
7447 fold_convert_loc (loc,
7448 newtype,
7449 arg0)));
7450 }
7451 return NULL_TREE;
7452 }
7453
7454 /* FNDECL is assumed to be builtin which can narrow the FP type of
7455 the argument, for instance lround((double)f) -> lroundf (f).
7456 Do the transformation for a call with argument ARG. */
7457
7458 static tree
7459 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7460 {
7461 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7462
7463 if (!validate_arg (arg, REAL_TYPE))
7464 return NULL_TREE;
7465
7466 /* If argument is already integer valued, and we don't need to worry
7467 about setting errno, there's no need to perform rounding. */
7468 if (! flag_errno_math && integer_valued_real_p (arg))
7469 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7470 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7471
7472 if (optimize)
7473 {
7474 tree ftype = TREE_TYPE (arg);
7475 tree arg0 = strip_float_extensions (arg);
7476 tree newtype = TREE_TYPE (arg0);
7477 tree decl;
7478
7479 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7480 && (decl = mathfn_built_in (newtype, fcode)))
7481 return build_call_expr_loc (loc, decl, 1,
7482 fold_convert_loc (loc, newtype, arg0));
7483 }
7484
7485 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7486 sizeof (int) == sizeof (long). */
7487 if (TYPE_PRECISION (integer_type_node)
7488 == TYPE_PRECISION (long_integer_type_node))
7489 {
7490 tree newfn = NULL_TREE;
7491 switch (fcode)
7492 {
7493 CASE_FLT_FN (BUILT_IN_ICEIL):
7494 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7495 break;
7496
7497 CASE_FLT_FN (BUILT_IN_IFLOOR):
7498 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7499 break;
7500
7501 CASE_FLT_FN (BUILT_IN_IROUND):
7502 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7503 break;
7504
7505 CASE_FLT_FN (BUILT_IN_IRINT):
7506 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7507 break;
7508
7509 default:
7510 break;
7511 }
7512
7513 if (newfn)
7514 {
7515 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7516 return fold_convert_loc (loc,
7517 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7518 }
7519 }
7520
7521 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7522 sizeof (long long) == sizeof (long). */
7523 if (TYPE_PRECISION (long_long_integer_type_node)
7524 == TYPE_PRECISION (long_integer_type_node))
7525 {
7526 tree newfn = NULL_TREE;
7527 switch (fcode)
7528 {
7529 CASE_FLT_FN (BUILT_IN_LLCEIL):
7530 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7531 break;
7532
7533 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7534 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7535 break;
7536
7537 CASE_FLT_FN (BUILT_IN_LLROUND):
7538 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7539 break;
7540
7541 CASE_FLT_FN (BUILT_IN_LLRINT):
7542 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7543 break;
7544
7545 default:
7546 break;
7547 }
7548
7549 if (newfn)
7550 {
7551 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7552 return fold_convert_loc (loc,
7553 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7554 }
7555 }
7556
7557 return NULL_TREE;
7558 }
7559
7560 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7561 return type. Return NULL_TREE if no simplification can be made. */
7562
7563 static tree
7564 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7565 {
7566 tree res;
7567
7568 if (!validate_arg (arg, COMPLEX_TYPE)
7569 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7570 return NULL_TREE;
7571
7572 /* Calculate the result when the argument is a constant. */
7573 if (TREE_CODE (arg) == COMPLEX_CST
7574 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7575 type, mpfr_hypot)))
7576 return res;
7577
7578 if (TREE_CODE (arg) == COMPLEX_EXPR)
7579 {
7580 tree real = TREE_OPERAND (arg, 0);
7581 tree imag = TREE_OPERAND (arg, 1);
7582
7583 /* If either part is zero, cabs is fabs of the other. */
7584 if (real_zerop (real))
7585 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7586 if (real_zerop (imag))
7587 return fold_build1_loc (loc, ABS_EXPR, type, real);
7588
7589 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7590 if (flag_unsafe_math_optimizations
7591 && operand_equal_p (real, imag, OEP_PURE_SAME))
7592 {
7593 STRIP_NOPS (real);
7594 return fold_build2_loc (loc, MULT_EXPR, type,
7595 fold_build1_loc (loc, ABS_EXPR, type, real),
7596 build_real_truncate (type, dconst_sqrt2 ()));
7597 }
7598 }
7599
7600 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7601 if (TREE_CODE (arg) == NEGATE_EXPR
7602 || TREE_CODE (arg) == CONJ_EXPR)
7603 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7604
7605 /* Don't do this when optimizing for size. */
7606 if (flag_unsafe_math_optimizations
7607 && optimize && optimize_function_for_speed_p (cfun))
7608 {
7609 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7610
7611 if (sqrtfn != NULL_TREE)
7612 {
7613 tree rpart, ipart, result;
7614
7615 arg = builtin_save_expr (arg);
7616
7617 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7618 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7619
7620 rpart = builtin_save_expr (rpart);
7621 ipart = builtin_save_expr (ipart);
7622
7623 result = fold_build2_loc (loc, PLUS_EXPR, type,
7624 fold_build2_loc (loc, MULT_EXPR, type,
7625 rpart, rpart),
7626 fold_build2_loc (loc, MULT_EXPR, type,
7627 ipart, ipart));
7628
7629 return build_call_expr_loc (loc, sqrtfn, 1, result);
7630 }
7631 }
7632
7633 return NULL_TREE;
7634 }
7635
7636 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7637 complex tree type of the result. If NEG is true, the imaginary
7638 zero is negative. */
7639
7640 static tree
7641 build_complex_cproj (tree type, bool neg)
7642 {
7643 REAL_VALUE_TYPE rinf, rzero = dconst0;
7644
7645 real_inf (&rinf);
7646 rzero.sign = neg;
7647 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7648 build_real (TREE_TYPE (type), rzero));
7649 }
7650
7651 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7652 return type. Return NULL_TREE if no simplification can be made. */
7653
7654 static tree
7655 fold_builtin_cproj (location_t loc, tree arg, tree type)
7656 {
7657 if (!validate_arg (arg, COMPLEX_TYPE)
7658 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7659 return NULL_TREE;
7660
7661 /* If there are no infinities, return arg. */
7662 if (! HONOR_INFINITIES (type))
7663 return non_lvalue_loc (loc, arg);
7664
7665 /* Calculate the result when the argument is a constant. */
7666 if (TREE_CODE (arg) == COMPLEX_CST)
7667 {
7668 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7669 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7670
7671 if (real_isinf (real) || real_isinf (imag))
7672 return build_complex_cproj (type, imag->sign);
7673 else
7674 return arg;
7675 }
7676 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7677 {
7678 tree real = TREE_OPERAND (arg, 0);
7679 tree imag = TREE_OPERAND (arg, 1);
7680
7681 STRIP_NOPS (real);
7682 STRIP_NOPS (imag);
7683
7684 /* If the real part is inf and the imag part is known to be
7685 nonnegative, return (inf + 0i). Remember side-effects are
7686 possible in the imag part. */
7687 if (TREE_CODE (real) == REAL_CST
7688 && real_isinf (TREE_REAL_CST_PTR (real))
7689 && tree_expr_nonnegative_p (imag))
7690 return omit_one_operand_loc (loc, type,
7691 build_complex_cproj (type, false),
7692 arg);
7693
7694 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7695 Remember side-effects are possible in the real part. */
7696 if (TREE_CODE (imag) == REAL_CST
7697 && real_isinf (TREE_REAL_CST_PTR (imag)))
7698 return
7699 omit_one_operand_loc (loc, type,
7700 build_complex_cproj (type, TREE_REAL_CST_PTR
7701 (imag)->sign), arg);
7702 }
7703
7704 return NULL_TREE;
7705 }
7706
7707 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7708 TYPE is the type of the return value. Return NULL_TREE if no
7709 simplification can be made. */
7710
7711 static tree
7712 fold_builtin_cos (location_t loc,
7713 tree arg, tree type, tree fndecl)
7714 {
7715 tree res, narg;
7716
7717 if (!validate_arg (arg, REAL_TYPE))
7718 return NULL_TREE;
7719
7720 /* Calculate the result when the argument is a constant. */
7721 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7722 return res;
7723
7724 /* Optimize cos(-x) into cos (x). */
7725 if ((narg = fold_strip_sign_ops (arg)))
7726 return build_call_expr_loc (loc, fndecl, 1, narg);
7727
7728 return NULL_TREE;
7729 }
7730
7731 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7732 Return NULL_TREE if no simplification can be made. */
7733
7734 static tree
7735 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7736 {
7737 if (validate_arg (arg, REAL_TYPE))
7738 {
7739 tree res, narg;
7740
7741 /* Calculate the result when the argument is a constant. */
7742 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7743 return res;
7744
7745 /* Optimize cosh(-x) into cosh (x). */
7746 if ((narg = fold_strip_sign_ops (arg)))
7747 return build_call_expr_loc (loc, fndecl, 1, narg);
7748 }
7749
7750 return NULL_TREE;
7751 }
7752
7753 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7754 argument ARG. TYPE is the type of the return value. Return
7755 NULL_TREE if no simplification can be made. */
7756
7757 static tree
7758 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7759 bool hyper)
7760 {
7761 if (validate_arg (arg, COMPLEX_TYPE)
7762 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7763 {
7764 tree tmp;
7765
7766 /* Calculate the result when the argument is a constant. */
7767 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7768 return tmp;
7769
7770 /* Optimize fn(-x) into fn(x). */
7771 if ((tmp = fold_strip_sign_ops (arg)))
7772 return build_call_expr_loc (loc, fndecl, 1, tmp);
7773 }
7774
7775 return NULL_TREE;
7776 }
7777
7778 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7779 Return NULL_TREE if no simplification can be made. */
7780
7781 static tree
7782 fold_builtin_tan (tree arg, tree type)
7783 {
7784 enum built_in_function fcode;
7785 tree res;
7786
7787 if (!validate_arg (arg, REAL_TYPE))
7788 return NULL_TREE;
7789
7790 /* Calculate the result when the argument is a constant. */
7791 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7792 return res;
7793
7794 /* Optimize tan(atan(x)) = x. */
7795 fcode = builtin_mathfn_code (arg);
7796 if (flag_unsafe_math_optimizations
7797 && (fcode == BUILT_IN_ATAN
7798 || fcode == BUILT_IN_ATANF
7799 || fcode == BUILT_IN_ATANL))
7800 return CALL_EXPR_ARG (arg, 0);
7801
7802 return NULL_TREE;
7803 }
7804
7805 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7806 NULL_TREE if no simplification can be made. */
7807
7808 static tree
7809 fold_builtin_sincos (location_t loc,
7810 tree arg0, tree arg1, tree arg2)
7811 {
7812 tree type;
7813 tree res, fn, call;
7814
7815 if (!validate_arg (arg0, REAL_TYPE)
7816 || !validate_arg (arg1, POINTER_TYPE)
7817 || !validate_arg (arg2, POINTER_TYPE))
7818 return NULL_TREE;
7819
7820 type = TREE_TYPE (arg0);
7821
7822 /* Calculate the result when the argument is a constant. */
7823 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7824 return res;
7825
7826 /* Canonicalize sincos to cexpi. */
7827 if (!targetm.libc_has_function (function_c99_math_complex))
7828 return NULL_TREE;
7829 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7830 if (!fn)
7831 return NULL_TREE;
7832
7833 call = build_call_expr_loc (loc, fn, 1, arg0);
7834 call = builtin_save_expr (call);
7835
7836 return build2 (COMPOUND_EXPR, void_type_node,
7837 build2 (MODIFY_EXPR, void_type_node,
7838 build_fold_indirect_ref_loc (loc, arg1),
7839 build1 (IMAGPART_EXPR, type, call)),
7840 build2 (MODIFY_EXPR, void_type_node,
7841 build_fold_indirect_ref_loc (loc, arg2),
7842 build1 (REALPART_EXPR, type, call)));
7843 }
7844
7845 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7846 NULL_TREE if no simplification can be made. */
7847
7848 static tree
7849 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7850 {
7851 tree rtype;
7852 tree realp, imagp, ifn;
7853 tree res;
7854
7855 if (!validate_arg (arg0, COMPLEX_TYPE)
7856 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7857 return NULL_TREE;
7858
7859 /* Calculate the result when the argument is a constant. */
7860 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7861 return res;
7862
7863 rtype = TREE_TYPE (TREE_TYPE (arg0));
7864
7865 /* In case we can figure out the real part of arg0 and it is constant zero
7866 fold to cexpi. */
7867 if (!targetm.libc_has_function (function_c99_math_complex))
7868 return NULL_TREE;
7869 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7870 if (!ifn)
7871 return NULL_TREE;
7872
7873 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7874 && real_zerop (realp))
7875 {
7876 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7877 return build_call_expr_loc (loc, ifn, 1, narg);
7878 }
7879
7880 /* In case we can easily decompose real and imaginary parts split cexp
7881 to exp (r) * cexpi (i). */
7882 if (flag_unsafe_math_optimizations
7883 && realp)
7884 {
7885 tree rfn, rcall, icall;
7886
7887 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7888 if (!rfn)
7889 return NULL_TREE;
7890
7891 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7892 if (!imagp)
7893 return NULL_TREE;
7894
7895 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7896 icall = builtin_save_expr (icall);
7897 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7898 rcall = builtin_save_expr (rcall);
7899 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7900 fold_build2_loc (loc, MULT_EXPR, rtype,
7901 rcall,
7902 fold_build1_loc (loc, REALPART_EXPR,
7903 rtype, icall)),
7904 fold_build2_loc (loc, MULT_EXPR, rtype,
7905 rcall,
7906 fold_build1_loc (loc, IMAGPART_EXPR,
7907 rtype, icall)));
7908 }
7909
7910 return NULL_TREE;
7911 }
7912
7913 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7914 Return NULL_TREE if no simplification can be made. */
7915
7916 static tree
7917 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7918 {
7919 if (!validate_arg (arg, REAL_TYPE))
7920 return NULL_TREE;
7921
7922 /* Optimize trunc of constant value. */
7923 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7924 {
7925 REAL_VALUE_TYPE r, x;
7926 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7927
7928 x = TREE_REAL_CST (arg);
7929 real_trunc (&r, TYPE_MODE (type), &x);
7930 return build_real (type, r);
7931 }
7932
7933 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7934 }
7935
7936 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7937 Return NULL_TREE if no simplification can be made. */
7938
7939 static tree
7940 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7941 {
7942 if (!validate_arg (arg, REAL_TYPE))
7943 return NULL_TREE;
7944
7945 /* Optimize floor of constant value. */
7946 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7947 {
7948 REAL_VALUE_TYPE x;
7949
7950 x = TREE_REAL_CST (arg);
7951 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7952 {
7953 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7954 REAL_VALUE_TYPE r;
7955
7956 real_floor (&r, TYPE_MODE (type), &x);
7957 return build_real (type, r);
7958 }
7959 }
7960
7961 /* Fold floor (x) where x is nonnegative to trunc (x). */
7962 if (tree_expr_nonnegative_p (arg))
7963 {
7964 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7965 if (truncfn)
7966 return build_call_expr_loc (loc, truncfn, 1, arg);
7967 }
7968
7969 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7970 }
7971
7972 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7973 Return NULL_TREE if no simplification can be made. */
7974
7975 static tree
7976 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7977 {
7978 if (!validate_arg (arg, REAL_TYPE))
7979 return NULL_TREE;
7980
7981 /* Optimize ceil of constant value. */
7982 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7983 {
7984 REAL_VALUE_TYPE x;
7985
7986 x = TREE_REAL_CST (arg);
7987 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7988 {
7989 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7990 REAL_VALUE_TYPE r;
7991
7992 real_ceil (&r, TYPE_MODE (type), &x);
7993 return build_real (type, r);
7994 }
7995 }
7996
7997 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7998 }
7999
8000 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8001 Return NULL_TREE if no simplification can be made. */
8002
8003 static tree
8004 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8005 {
8006 if (!validate_arg (arg, REAL_TYPE))
8007 return NULL_TREE;
8008
8009 /* Optimize round of constant value. */
8010 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8011 {
8012 REAL_VALUE_TYPE x;
8013
8014 x = TREE_REAL_CST (arg);
8015 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8016 {
8017 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8018 REAL_VALUE_TYPE r;
8019
8020 real_round (&r, TYPE_MODE (type), &x);
8021 return build_real (type, r);
8022 }
8023 }
8024
8025 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8026 }
8027
8028 /* Fold function call to builtin lround, lroundf or lroundl (or the
8029 corresponding long long versions) and other rounding functions. ARG
8030 is the argument to the call. Return NULL_TREE if no simplification
8031 can be made. */
8032
8033 static tree
8034 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8035 {
8036 if (!validate_arg (arg, REAL_TYPE))
8037 return NULL_TREE;
8038
8039 /* Optimize lround of constant value. */
8040 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8041 {
8042 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8043
8044 if (real_isfinite (&x))
8045 {
8046 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8047 tree ftype = TREE_TYPE (arg);
8048 REAL_VALUE_TYPE r;
8049 bool fail = false;
8050
8051 switch (DECL_FUNCTION_CODE (fndecl))
8052 {
8053 CASE_FLT_FN (BUILT_IN_IFLOOR):
8054 CASE_FLT_FN (BUILT_IN_LFLOOR):
8055 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8056 real_floor (&r, TYPE_MODE (ftype), &x);
8057 break;
8058
8059 CASE_FLT_FN (BUILT_IN_ICEIL):
8060 CASE_FLT_FN (BUILT_IN_LCEIL):
8061 CASE_FLT_FN (BUILT_IN_LLCEIL):
8062 real_ceil (&r, TYPE_MODE (ftype), &x);
8063 break;
8064
8065 CASE_FLT_FN (BUILT_IN_IROUND):
8066 CASE_FLT_FN (BUILT_IN_LROUND):
8067 CASE_FLT_FN (BUILT_IN_LLROUND):
8068 real_round (&r, TYPE_MODE (ftype), &x);
8069 break;
8070
8071 default:
8072 gcc_unreachable ();
8073 }
8074
8075 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8076 if (!fail)
8077 return wide_int_to_tree (itype, val);
8078 }
8079 }
8080
8081 switch (DECL_FUNCTION_CODE (fndecl))
8082 {
8083 CASE_FLT_FN (BUILT_IN_LFLOOR):
8084 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8085 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8086 if (tree_expr_nonnegative_p (arg))
8087 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8088 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8089 break;
8090 default:;
8091 }
8092
8093 return fold_fixed_mathfn (loc, fndecl, arg);
8094 }
8095
8096 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8097 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8098 the argument to the call. Return NULL_TREE if no simplification can
8099 be made. */
8100
8101 static tree
8102 fold_builtin_bitop (tree fndecl, tree arg)
8103 {
8104 if (!validate_arg (arg, INTEGER_TYPE))
8105 return NULL_TREE;
8106
8107 /* Optimize for constant argument. */
8108 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8109 {
8110 tree type = TREE_TYPE (arg);
8111 int result;
8112
8113 switch (DECL_FUNCTION_CODE (fndecl))
8114 {
8115 CASE_INT_FN (BUILT_IN_FFS):
8116 result = wi::ffs (arg);
8117 break;
8118
8119 CASE_INT_FN (BUILT_IN_CLZ):
8120 if (wi::ne_p (arg, 0))
8121 result = wi::clz (arg);
8122 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8123 result = TYPE_PRECISION (type);
8124 break;
8125
8126 CASE_INT_FN (BUILT_IN_CTZ):
8127 if (wi::ne_p (arg, 0))
8128 result = wi::ctz (arg);
8129 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8130 result = TYPE_PRECISION (type);
8131 break;
8132
8133 CASE_INT_FN (BUILT_IN_CLRSB):
8134 result = wi::clrsb (arg);
8135 break;
8136
8137 CASE_INT_FN (BUILT_IN_POPCOUNT):
8138 result = wi::popcount (arg);
8139 break;
8140
8141 CASE_INT_FN (BUILT_IN_PARITY):
8142 result = wi::parity (arg);
8143 break;
8144
8145 default:
8146 gcc_unreachable ();
8147 }
8148
8149 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8150 }
8151
8152 return NULL_TREE;
8153 }
8154
8155 /* Fold function call to builtin_bswap and the short, long and long long
8156 variants. Return NULL_TREE if no simplification can be made. */
8157 static tree
8158 fold_builtin_bswap (tree fndecl, tree arg)
8159 {
8160 if (! validate_arg (arg, INTEGER_TYPE))
8161 return NULL_TREE;
8162
8163 /* Optimize constant value. */
8164 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8165 {
8166 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8167
8168 switch (DECL_FUNCTION_CODE (fndecl))
8169 {
8170 case BUILT_IN_BSWAP16:
8171 case BUILT_IN_BSWAP32:
8172 case BUILT_IN_BSWAP64:
8173 {
8174 signop sgn = TYPE_SIGN (type);
8175 tree result =
8176 wide_int_to_tree (type,
8177 wide_int::from (arg, TYPE_PRECISION (type),
8178 sgn).bswap ());
8179 return result;
8180 }
8181 default:
8182 gcc_unreachable ();
8183 }
8184 }
8185
8186 return NULL_TREE;
8187 }
8188
8189 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8190 NULL_TREE if no simplification can be made. */
8191
8192 static tree
8193 fold_builtin_hypot (location_t loc, tree fndecl,
8194 tree arg0, tree arg1, tree type)
8195 {
8196 tree res, narg0, narg1;
8197
8198 if (!validate_arg (arg0, REAL_TYPE)
8199 || !validate_arg (arg1, REAL_TYPE))
8200 return NULL_TREE;
8201
8202 /* Calculate the result when the argument is a constant. */
8203 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8204 return res;
8205
8206 /* If either argument to hypot has a negate or abs, strip that off.
8207 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8208 narg0 = fold_strip_sign_ops (arg0);
8209 narg1 = fold_strip_sign_ops (arg1);
8210 if (narg0 || narg1)
8211 {
8212 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8213 narg1 ? narg1 : arg1);
8214 }
8215
8216 /* If either argument is zero, hypot is fabs of the other. */
8217 if (real_zerop (arg0))
8218 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8219 else if (real_zerop (arg1))
8220 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8221
8222 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8223 if (flag_unsafe_math_optimizations
8224 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8225 return fold_build2_loc (loc, MULT_EXPR, type,
8226 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8227 build_real_truncate (type, dconst_sqrt2 ()));
8228
8229 return NULL_TREE;
8230 }
8231
8232
8233 /* Fold a builtin function call to pow, powf, or powl. Return
8234 NULL_TREE if no simplification can be made. */
8235 static tree
8236 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8237 {
8238 tree res;
8239
8240 if (!validate_arg (arg0, REAL_TYPE)
8241 || !validate_arg (arg1, REAL_TYPE))
8242 return NULL_TREE;
8243
8244 /* Calculate the result when the argument is a constant. */
8245 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8246 return res;
8247
8248 /* Optimize pow(1.0,y) = 1.0. */
8249 if (real_onep (arg0))
8250 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8251
8252 if (TREE_CODE (arg1) == REAL_CST
8253 && !TREE_OVERFLOW (arg1))
8254 {
8255 REAL_VALUE_TYPE cint;
8256 REAL_VALUE_TYPE c;
8257 HOST_WIDE_INT n;
8258
8259 c = TREE_REAL_CST (arg1);
8260
8261 /* Optimize pow(x,0.0) = 1.0. */
8262 if (real_equal (&c, &dconst0))
8263 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8264 arg0);
8265
8266 /* Optimize pow(x,1.0) = x. */
8267 if (real_equal (&c, &dconst1))
8268 return arg0;
8269
8270 /* Optimize pow(x,-1.0) = 1.0/x. */
8271 if (real_equal (&c, &dconstm1))
8272 return fold_build2_loc (loc, RDIV_EXPR, type,
8273 build_real (type, dconst1), arg0);
8274
8275 /* Optimize pow(x,0.5) = sqrt(x). */
8276 if (flag_unsafe_math_optimizations
8277 && real_equal (&c, &dconsthalf))
8278 {
8279 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8280
8281 if (sqrtfn != NULL_TREE)
8282 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8283 }
8284
8285 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8286 if (flag_unsafe_math_optimizations)
8287 {
8288 const REAL_VALUE_TYPE dconstroot
8289 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8290
8291 if (real_equal (&c, &dconstroot))
8292 {
8293 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8294 if (cbrtfn != NULL_TREE)
8295 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8296 }
8297 }
8298
8299 /* Check for an integer exponent. */
8300 n = real_to_integer (&c);
8301 real_from_integer (&cint, VOIDmode, n, SIGNED);
8302 if (real_identical (&c, &cint))
8303 {
8304 /* Attempt to evaluate pow at compile-time, unless this should
8305 raise an exception. */
8306 if (TREE_CODE (arg0) == REAL_CST
8307 && !TREE_OVERFLOW (arg0)
8308 && (n > 0
8309 || (!flag_trapping_math && !flag_errno_math)
8310 || !real_equal (&TREE_REAL_CST (arg0), &dconst0)))
8311 {
8312 REAL_VALUE_TYPE x;
8313 bool inexact;
8314
8315 x = TREE_REAL_CST (arg0);
8316 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8317 if (flag_unsafe_math_optimizations || !inexact)
8318 return build_real (type, x);
8319 }
8320
8321 /* Strip sign ops from even integer powers. */
8322 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8323 {
8324 tree narg0 = fold_strip_sign_ops (arg0);
8325 if (narg0)
8326 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8327 }
8328 }
8329 }
8330
8331 if (flag_unsafe_math_optimizations)
8332 {
8333 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8334
8335 /* Optimize pow(expN(x),y) = expN(x*y). */
8336 if (BUILTIN_EXPONENT_P (fcode))
8337 {
8338 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8339 tree arg = CALL_EXPR_ARG (arg0, 0);
8340 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8341 return build_call_expr_loc (loc, expfn, 1, arg);
8342 }
8343
8344 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8345 if (BUILTIN_SQRT_P (fcode))
8346 {
8347 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8348 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8349 build_real (type, dconsthalf));
8350 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8351 }
8352
8353 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8354 if (BUILTIN_CBRT_P (fcode))
8355 {
8356 tree arg = CALL_EXPR_ARG (arg0, 0);
8357 if (tree_expr_nonnegative_p (arg))
8358 {
8359 tree c = build_real_truncate (type, dconst_third ());
8360 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1, c);
8361 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8362 }
8363 }
8364
8365 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8366 if (fcode == BUILT_IN_POW
8367 || fcode == BUILT_IN_POWF
8368 || fcode == BUILT_IN_POWL)
8369 {
8370 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8371 if (tree_expr_nonnegative_p (arg00))
8372 {
8373 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8374 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8375 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8376 }
8377 }
8378 }
8379
8380 return NULL_TREE;
8381 }
8382
8383 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8384 Return NULL_TREE if no simplification can be made. */
8385 static tree
8386 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8387 tree arg0, tree arg1, tree type)
8388 {
8389 if (!validate_arg (arg0, REAL_TYPE)
8390 || !validate_arg (arg1, INTEGER_TYPE))
8391 return NULL_TREE;
8392
8393 /* Optimize pow(1.0,y) = 1.0. */
8394 if (real_onep (arg0))
8395 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8396
8397 if (tree_fits_shwi_p (arg1))
8398 {
8399 HOST_WIDE_INT c = tree_to_shwi (arg1);
8400
8401 /* Evaluate powi at compile-time. */
8402 if (TREE_CODE (arg0) == REAL_CST
8403 && !TREE_OVERFLOW (arg0))
8404 {
8405 REAL_VALUE_TYPE x;
8406 x = TREE_REAL_CST (arg0);
8407 real_powi (&x, TYPE_MODE (type), &x, c);
8408 return build_real (type, x);
8409 }
8410
8411 /* Optimize pow(x,0) = 1.0. */
8412 if (c == 0)
8413 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8414 arg0);
8415
8416 /* Optimize pow(x,1) = x. */
8417 if (c == 1)
8418 return arg0;
8419
8420 /* Optimize pow(x,-1) = 1.0/x. */
8421 if (c == -1)
8422 return fold_build2_loc (loc, RDIV_EXPR, type,
8423 build_real (type, dconst1), arg0);
8424 }
8425
8426 return NULL_TREE;
8427 }
8428
8429 /* A subroutine of fold_builtin to fold the various exponent
8430 functions. Return NULL_TREE if no simplification can be made.
8431 FUNC is the corresponding MPFR exponent function. */
8432
8433 static tree
8434 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8435 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8436 {
8437 if (validate_arg (arg, REAL_TYPE))
8438 {
8439 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8440 tree res;
8441
8442 /* Calculate the result when the argument is a constant. */
8443 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8444 return res;
8445
8446 /* Optimize expN(logN(x)) = x. */
8447 if (flag_unsafe_math_optimizations)
8448 {
8449 const enum built_in_function fcode = builtin_mathfn_code (arg);
8450
8451 if ((func == mpfr_exp
8452 && (fcode == BUILT_IN_LOG
8453 || fcode == BUILT_IN_LOGF
8454 || fcode == BUILT_IN_LOGL))
8455 || (func == mpfr_exp2
8456 && (fcode == BUILT_IN_LOG2
8457 || fcode == BUILT_IN_LOG2F
8458 || fcode == BUILT_IN_LOG2L))
8459 || (func == mpfr_exp10
8460 && (fcode == BUILT_IN_LOG10
8461 || fcode == BUILT_IN_LOG10F
8462 || fcode == BUILT_IN_LOG10L)))
8463 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8464 }
8465 }
8466
8467 return NULL_TREE;
8468 }
8469
8470 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8471 arguments to the call, and TYPE is its return type.
8472 Return NULL_TREE if no simplification can be made. */
8473
8474 static tree
8475 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8476 {
8477 if (!validate_arg (arg1, POINTER_TYPE)
8478 || !validate_arg (arg2, INTEGER_TYPE)
8479 || !validate_arg (len, INTEGER_TYPE))
8480 return NULL_TREE;
8481 else
8482 {
8483 const char *p1;
8484
8485 if (TREE_CODE (arg2) != INTEGER_CST
8486 || !tree_fits_uhwi_p (len))
8487 return NULL_TREE;
8488
8489 p1 = c_getstr (arg1);
8490 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8491 {
8492 char c;
8493 const char *r;
8494 tree tem;
8495
8496 if (target_char_cast (arg2, &c))
8497 return NULL_TREE;
8498
8499 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8500
8501 if (r == NULL)
8502 return build_int_cst (TREE_TYPE (arg1), 0);
8503
8504 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8505 return fold_convert_loc (loc, type, tem);
8506 }
8507 return NULL_TREE;
8508 }
8509 }
8510
8511 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8512 Return NULL_TREE if no simplification can be made. */
8513
8514 static tree
8515 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8516 {
8517 const char *p1, *p2;
8518
8519 if (!validate_arg (arg1, POINTER_TYPE)
8520 || !validate_arg (arg2, POINTER_TYPE)
8521 || !validate_arg (len, INTEGER_TYPE))
8522 return NULL_TREE;
8523
8524 /* If the LEN parameter is zero, return zero. */
8525 if (integer_zerop (len))
8526 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8527 arg1, arg2);
8528
8529 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8530 if (operand_equal_p (arg1, arg2, 0))
8531 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8532
8533 p1 = c_getstr (arg1);
8534 p2 = c_getstr (arg2);
8535
8536 /* If all arguments are constant, and the value of len is not greater
8537 than the lengths of arg1 and arg2, evaluate at compile-time. */
8538 if (tree_fits_uhwi_p (len) && p1 && p2
8539 && compare_tree_int (len, strlen (p1) + 1) <= 0
8540 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8541 {
8542 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8543
8544 if (r > 0)
8545 return integer_one_node;
8546 else if (r < 0)
8547 return integer_minus_one_node;
8548 else
8549 return integer_zero_node;
8550 }
8551
8552 /* If len parameter is one, return an expression corresponding to
8553 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8554 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8555 {
8556 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8557 tree cst_uchar_ptr_node
8558 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8559
8560 tree ind1
8561 = fold_convert_loc (loc, integer_type_node,
8562 build1 (INDIRECT_REF, cst_uchar_node,
8563 fold_convert_loc (loc,
8564 cst_uchar_ptr_node,
8565 arg1)));
8566 tree ind2
8567 = fold_convert_loc (loc, integer_type_node,
8568 build1 (INDIRECT_REF, cst_uchar_node,
8569 fold_convert_loc (loc,
8570 cst_uchar_ptr_node,
8571 arg2)));
8572 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8573 }
8574
8575 return NULL_TREE;
8576 }
8577
8578 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8579 Return NULL_TREE if no simplification can be made. */
8580
8581 static tree
8582 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8583 {
8584 const char *p1, *p2;
8585
8586 if (!validate_arg (arg1, POINTER_TYPE)
8587 || !validate_arg (arg2, POINTER_TYPE))
8588 return NULL_TREE;
8589
8590 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8591 if (operand_equal_p (arg1, arg2, 0))
8592 return integer_zero_node;
8593
8594 p1 = c_getstr (arg1);
8595 p2 = c_getstr (arg2);
8596
8597 if (p1 && p2)
8598 {
8599 const int i = strcmp (p1, p2);
8600 if (i < 0)
8601 return integer_minus_one_node;
8602 else if (i > 0)
8603 return integer_one_node;
8604 else
8605 return integer_zero_node;
8606 }
8607
8608 /* If the second arg is "", return *(const unsigned char*)arg1. */
8609 if (p2 && *p2 == '\0')
8610 {
8611 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8612 tree cst_uchar_ptr_node
8613 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8614
8615 return fold_convert_loc (loc, integer_type_node,
8616 build1 (INDIRECT_REF, cst_uchar_node,
8617 fold_convert_loc (loc,
8618 cst_uchar_ptr_node,
8619 arg1)));
8620 }
8621
8622 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8623 if (p1 && *p1 == '\0')
8624 {
8625 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8626 tree cst_uchar_ptr_node
8627 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8628
8629 tree temp
8630 = fold_convert_loc (loc, integer_type_node,
8631 build1 (INDIRECT_REF, cst_uchar_node,
8632 fold_convert_loc (loc,
8633 cst_uchar_ptr_node,
8634 arg2)));
8635 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8636 }
8637
8638 return NULL_TREE;
8639 }
8640
8641 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8642 Return NULL_TREE if no simplification can be made. */
8643
8644 static tree
8645 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8646 {
8647 const char *p1, *p2;
8648
8649 if (!validate_arg (arg1, POINTER_TYPE)
8650 || !validate_arg (arg2, POINTER_TYPE)
8651 || !validate_arg (len, INTEGER_TYPE))
8652 return NULL_TREE;
8653
8654 /* If the LEN parameter is zero, return zero. */
8655 if (integer_zerop (len))
8656 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8657 arg1, arg2);
8658
8659 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8660 if (operand_equal_p (arg1, arg2, 0))
8661 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8662
8663 p1 = c_getstr (arg1);
8664 p2 = c_getstr (arg2);
8665
8666 if (tree_fits_uhwi_p (len) && p1 && p2)
8667 {
8668 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8669 if (i > 0)
8670 return integer_one_node;
8671 else if (i < 0)
8672 return integer_minus_one_node;
8673 else
8674 return integer_zero_node;
8675 }
8676
8677 /* If the second arg is "", and the length is greater than zero,
8678 return *(const unsigned char*)arg1. */
8679 if (p2 && *p2 == '\0'
8680 && TREE_CODE (len) == INTEGER_CST
8681 && tree_int_cst_sgn (len) == 1)
8682 {
8683 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8684 tree cst_uchar_ptr_node
8685 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8686
8687 return fold_convert_loc (loc, integer_type_node,
8688 build1 (INDIRECT_REF, cst_uchar_node,
8689 fold_convert_loc (loc,
8690 cst_uchar_ptr_node,
8691 arg1)));
8692 }
8693
8694 /* If the first arg is "", and the length is greater than zero,
8695 return -*(const unsigned char*)arg2. */
8696 if (p1 && *p1 == '\0'
8697 && TREE_CODE (len) == INTEGER_CST
8698 && tree_int_cst_sgn (len) == 1)
8699 {
8700 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8701 tree cst_uchar_ptr_node
8702 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8703
8704 tree temp = fold_convert_loc (loc, integer_type_node,
8705 build1 (INDIRECT_REF, cst_uchar_node,
8706 fold_convert_loc (loc,
8707 cst_uchar_ptr_node,
8708 arg2)));
8709 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8710 }
8711
8712 /* If len parameter is one, return an expression corresponding to
8713 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8714 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8715 {
8716 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8717 tree cst_uchar_ptr_node
8718 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8719
8720 tree ind1 = fold_convert_loc (loc, integer_type_node,
8721 build1 (INDIRECT_REF, cst_uchar_node,
8722 fold_convert_loc (loc,
8723 cst_uchar_ptr_node,
8724 arg1)));
8725 tree ind2 = fold_convert_loc (loc, integer_type_node,
8726 build1 (INDIRECT_REF, cst_uchar_node,
8727 fold_convert_loc (loc,
8728 cst_uchar_ptr_node,
8729 arg2)));
8730 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8731 }
8732
8733 return NULL_TREE;
8734 }
8735
8736 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8737 ARG. Return NULL_TREE if no simplification can be made. */
8738
8739 static tree
8740 fold_builtin_signbit (location_t loc, tree arg, tree type)
8741 {
8742 if (!validate_arg (arg, REAL_TYPE))
8743 return NULL_TREE;
8744
8745 /* If ARG is a compile-time constant, determine the result. */
8746 if (TREE_CODE (arg) == REAL_CST
8747 && !TREE_OVERFLOW (arg))
8748 {
8749 REAL_VALUE_TYPE c;
8750
8751 c = TREE_REAL_CST (arg);
8752 return (REAL_VALUE_NEGATIVE (c)
8753 ? build_one_cst (type)
8754 : build_zero_cst (type));
8755 }
8756
8757 /* If ARG is non-negative, the result is always zero. */
8758 if (tree_expr_nonnegative_p (arg))
8759 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8760
8761 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8762 if (!HONOR_SIGNED_ZEROS (arg))
8763 return fold_convert (type,
8764 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8765 build_real (TREE_TYPE (arg), dconst0)));
8766
8767 return NULL_TREE;
8768 }
8769
8770 /* Fold function call to builtin copysign, copysignf or copysignl with
8771 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8772 be made. */
8773
8774 static tree
8775 fold_builtin_copysign (location_t loc, tree fndecl,
8776 tree arg1, tree arg2, tree type)
8777 {
8778 tree tem;
8779
8780 if (!validate_arg (arg1, REAL_TYPE)
8781 || !validate_arg (arg2, REAL_TYPE))
8782 return NULL_TREE;
8783
8784 /* copysign(X,X) is X. */
8785 if (operand_equal_p (arg1, arg2, 0))
8786 return fold_convert_loc (loc, type, arg1);
8787
8788 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8789 if (TREE_CODE (arg1) == REAL_CST
8790 && TREE_CODE (arg2) == REAL_CST
8791 && !TREE_OVERFLOW (arg1)
8792 && !TREE_OVERFLOW (arg2))
8793 {
8794 REAL_VALUE_TYPE c1, c2;
8795
8796 c1 = TREE_REAL_CST (arg1);
8797 c2 = TREE_REAL_CST (arg2);
8798 /* c1.sign := c2.sign. */
8799 real_copysign (&c1, &c2);
8800 return build_real (type, c1);
8801 }
8802
8803 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8804 Remember to evaluate Y for side-effects. */
8805 if (tree_expr_nonnegative_p (arg2))
8806 return omit_one_operand_loc (loc, type,
8807 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8808 arg2);
8809
8810 /* Strip sign changing operations for the first argument. */
8811 tem = fold_strip_sign_ops (arg1);
8812 if (tem)
8813 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8814
8815 return NULL_TREE;
8816 }
8817
8818 /* Fold a call to builtin isascii with argument ARG. */
8819
8820 static tree
8821 fold_builtin_isascii (location_t loc, tree arg)
8822 {
8823 if (!validate_arg (arg, INTEGER_TYPE))
8824 return NULL_TREE;
8825 else
8826 {
8827 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8828 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8829 build_int_cst (integer_type_node,
8830 ~ (unsigned HOST_WIDE_INT) 0x7f));
8831 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8832 arg, integer_zero_node);
8833 }
8834 }
8835
8836 /* Fold a call to builtin toascii with argument ARG. */
8837
8838 static tree
8839 fold_builtin_toascii (location_t loc, tree arg)
8840 {
8841 if (!validate_arg (arg, INTEGER_TYPE))
8842 return NULL_TREE;
8843
8844 /* Transform toascii(c) -> (c & 0x7f). */
8845 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8846 build_int_cst (integer_type_node, 0x7f));
8847 }
8848
8849 /* Fold a call to builtin isdigit with argument ARG. */
8850
8851 static tree
8852 fold_builtin_isdigit (location_t loc, tree arg)
8853 {
8854 if (!validate_arg (arg, INTEGER_TYPE))
8855 return NULL_TREE;
8856 else
8857 {
8858 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8859 /* According to the C standard, isdigit is unaffected by locale.
8860 However, it definitely is affected by the target character set. */
8861 unsigned HOST_WIDE_INT target_digit0
8862 = lang_hooks.to_target_charset ('0');
8863
8864 if (target_digit0 == 0)
8865 return NULL_TREE;
8866
8867 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8868 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8869 build_int_cst (unsigned_type_node, target_digit0));
8870 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8871 build_int_cst (unsigned_type_node, 9));
8872 }
8873 }
8874
8875 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8876
8877 static tree
8878 fold_builtin_fabs (location_t loc, tree arg, tree type)
8879 {
8880 if (!validate_arg (arg, REAL_TYPE))
8881 return NULL_TREE;
8882
8883 arg = fold_convert_loc (loc, type, arg);
8884 if (TREE_CODE (arg) == REAL_CST)
8885 return fold_abs_const (arg, type);
8886 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8887 }
8888
8889 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8890
8891 static tree
8892 fold_builtin_abs (location_t loc, tree arg, tree type)
8893 {
8894 if (!validate_arg (arg, INTEGER_TYPE))
8895 return NULL_TREE;
8896
8897 arg = fold_convert_loc (loc, type, arg);
8898 if (TREE_CODE (arg) == INTEGER_CST)
8899 return fold_abs_const (arg, type);
8900 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8901 }
8902
8903 /* Fold a fma operation with arguments ARG[012]. */
8904
8905 tree
8906 fold_fma (location_t loc ATTRIBUTE_UNUSED,
8907 tree type, tree arg0, tree arg1, tree arg2)
8908 {
8909 if (TREE_CODE (arg0) == REAL_CST
8910 && TREE_CODE (arg1) == REAL_CST
8911 && TREE_CODE (arg2) == REAL_CST)
8912 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
8913
8914 return NULL_TREE;
8915 }
8916
8917 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8918
8919 static tree
8920 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8921 {
8922 if (validate_arg (arg0, REAL_TYPE)
8923 && validate_arg (arg1, REAL_TYPE)
8924 && validate_arg (arg2, REAL_TYPE))
8925 {
8926 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
8927 if (tem)
8928 return tem;
8929
8930 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8931 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8932 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8933 }
8934 return NULL_TREE;
8935 }
8936
8937 /* Fold a call to builtin fmin or fmax. */
8938
8939 static tree
8940 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8941 tree type, bool max)
8942 {
8943 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8944 {
8945 /* Calculate the result when the argument is a constant. */
8946 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8947
8948 if (res)
8949 return res;
8950
8951 /* If either argument is NaN, return the other one. Avoid the
8952 transformation if we get (and honor) a signalling NaN. Using
8953 omit_one_operand() ensures we create a non-lvalue. */
8954 if (TREE_CODE (arg0) == REAL_CST
8955 && real_isnan (&TREE_REAL_CST (arg0))
8956 && (! HONOR_SNANS (arg0)
8957 || ! TREE_REAL_CST (arg0).signalling))
8958 return omit_one_operand_loc (loc, type, arg1, arg0);
8959 if (TREE_CODE (arg1) == REAL_CST
8960 && real_isnan (&TREE_REAL_CST (arg1))
8961 && (! HONOR_SNANS (arg1)
8962 || ! TREE_REAL_CST (arg1).signalling))
8963 return omit_one_operand_loc (loc, type, arg0, arg1);
8964
8965 /* Transform fmin/fmax(x,x) -> x. */
8966 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8967 return omit_one_operand_loc (loc, type, arg0, arg1);
8968
8969 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
8970 functions to return the numeric arg if the other one is NaN.
8971 These tree codes don't honor that, so only transform if
8972 -ffinite-math-only is set. C99 doesn't require -0.0 to be
8973 handled, so we don't have to worry about it either. */
8974 if (flag_finite_math_only)
8975 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
8976 fold_convert_loc (loc, type, arg0),
8977 fold_convert_loc (loc, type, arg1));
8978 }
8979 return NULL_TREE;
8980 }
8981
8982 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8983
8984 static tree
8985 fold_builtin_carg (location_t loc, tree arg, tree type)
8986 {
8987 if (validate_arg (arg, COMPLEX_TYPE)
8988 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8989 {
8990 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8991
8992 if (atan2_fn)
8993 {
8994 tree new_arg = builtin_save_expr (arg);
8995 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8996 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8997 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8998 }
8999 }
9000
9001 return NULL_TREE;
9002 }
9003
9004 /* Fold a call to builtin logb/ilogb. */
9005
9006 static tree
9007 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9008 {
9009 if (! validate_arg (arg, REAL_TYPE))
9010 return NULL_TREE;
9011
9012 STRIP_NOPS (arg);
9013
9014 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9015 {
9016 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9017
9018 switch (value->cl)
9019 {
9020 case rvc_nan:
9021 case rvc_inf:
9022 /* If arg is Inf or NaN and we're logb, return it. */
9023 if (TREE_CODE (rettype) == REAL_TYPE)
9024 {
9025 /* For logb(-Inf) we have to return +Inf. */
9026 if (real_isinf (value) && real_isneg (value))
9027 {
9028 REAL_VALUE_TYPE tem;
9029 real_inf (&tem);
9030 return build_real (rettype, tem);
9031 }
9032 return fold_convert_loc (loc, rettype, arg);
9033 }
9034 /* Fall through... */
9035 case rvc_zero:
9036 /* Zero may set errno and/or raise an exception for logb, also
9037 for ilogb we don't know FP_ILOGB0. */
9038 return NULL_TREE;
9039 case rvc_normal:
9040 /* For normal numbers, proceed iff radix == 2. In GCC,
9041 normalized significands are in the range [0.5, 1.0). We
9042 want the exponent as if they were [1.0, 2.0) so get the
9043 exponent and subtract 1. */
9044 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9045 return fold_convert_loc (loc, rettype,
9046 build_int_cst (integer_type_node,
9047 REAL_EXP (value)-1));
9048 break;
9049 }
9050 }
9051
9052 return NULL_TREE;
9053 }
9054
9055 /* Fold a call to builtin significand, if radix == 2. */
9056
9057 static tree
9058 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9059 {
9060 if (! validate_arg (arg, REAL_TYPE))
9061 return NULL_TREE;
9062
9063 STRIP_NOPS (arg);
9064
9065 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9066 {
9067 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9068
9069 switch (value->cl)
9070 {
9071 case rvc_zero:
9072 case rvc_nan:
9073 case rvc_inf:
9074 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9075 return fold_convert_loc (loc, rettype, arg);
9076 case rvc_normal:
9077 /* For normal numbers, proceed iff radix == 2. */
9078 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9079 {
9080 REAL_VALUE_TYPE result = *value;
9081 /* In GCC, normalized significands are in the range [0.5,
9082 1.0). We want them to be [1.0, 2.0) so set the
9083 exponent to 1. */
9084 SET_REAL_EXP (&result, 1);
9085 return build_real (rettype, result);
9086 }
9087 break;
9088 }
9089 }
9090
9091 return NULL_TREE;
9092 }
9093
9094 /* Fold a call to builtin frexp, we can assume the base is 2. */
9095
9096 static tree
9097 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9098 {
9099 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9100 return NULL_TREE;
9101
9102 STRIP_NOPS (arg0);
9103
9104 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9105 return NULL_TREE;
9106
9107 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9108
9109 /* Proceed if a valid pointer type was passed in. */
9110 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9111 {
9112 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9113 tree frac, exp;
9114
9115 switch (value->cl)
9116 {
9117 case rvc_zero:
9118 /* For +-0, return (*exp = 0, +-0). */
9119 exp = integer_zero_node;
9120 frac = arg0;
9121 break;
9122 case rvc_nan:
9123 case rvc_inf:
9124 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9125 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9126 case rvc_normal:
9127 {
9128 /* Since the frexp function always expects base 2, and in
9129 GCC normalized significands are already in the range
9130 [0.5, 1.0), we have exactly what frexp wants. */
9131 REAL_VALUE_TYPE frac_rvt = *value;
9132 SET_REAL_EXP (&frac_rvt, 0);
9133 frac = build_real (rettype, frac_rvt);
9134 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9135 }
9136 break;
9137 default:
9138 gcc_unreachable ();
9139 }
9140
9141 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9142 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9143 TREE_SIDE_EFFECTS (arg1) = 1;
9144 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9145 }
9146
9147 return NULL_TREE;
9148 }
9149
9150 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9151 then we can assume the base is two. If it's false, then we have to
9152 check the mode of the TYPE parameter in certain cases. */
9153
9154 static tree
9155 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9156 tree type, bool ldexp)
9157 {
9158 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9159 {
9160 STRIP_NOPS (arg0);
9161 STRIP_NOPS (arg1);
9162
9163 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9164 if (real_zerop (arg0) || integer_zerop (arg1)
9165 || (TREE_CODE (arg0) == REAL_CST
9166 && !real_isfinite (&TREE_REAL_CST (arg0))))
9167 return omit_one_operand_loc (loc, type, arg0, arg1);
9168
9169 /* If both arguments are constant, then try to evaluate it. */
9170 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9171 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9172 && tree_fits_shwi_p (arg1))
9173 {
9174 /* Bound the maximum adjustment to twice the range of the
9175 mode's valid exponents. Use abs to ensure the range is
9176 positive as a sanity check. */
9177 const long max_exp_adj = 2 *
9178 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9179 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9180
9181 /* Get the user-requested adjustment. */
9182 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9183
9184 /* The requested adjustment must be inside this range. This
9185 is a preliminary cap to avoid things like overflow, we
9186 may still fail to compute the result for other reasons. */
9187 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9188 {
9189 REAL_VALUE_TYPE initial_result;
9190
9191 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9192
9193 /* Ensure we didn't overflow. */
9194 if (! real_isinf (&initial_result))
9195 {
9196 const REAL_VALUE_TYPE trunc_result
9197 = real_value_truncate (TYPE_MODE (type), initial_result);
9198
9199 /* Only proceed if the target mode can hold the
9200 resulting value. */
9201 if (real_equal (&initial_result, &trunc_result))
9202 return build_real (type, trunc_result);
9203 }
9204 }
9205 }
9206 }
9207
9208 return NULL_TREE;
9209 }
9210
9211 /* Fold a call to builtin modf. */
9212
9213 static tree
9214 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9215 {
9216 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9217 return NULL_TREE;
9218
9219 STRIP_NOPS (arg0);
9220
9221 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9222 return NULL_TREE;
9223
9224 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9225
9226 /* Proceed if a valid pointer type was passed in. */
9227 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9228 {
9229 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9230 REAL_VALUE_TYPE trunc, frac;
9231
9232 switch (value->cl)
9233 {
9234 case rvc_nan:
9235 case rvc_zero:
9236 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9237 trunc = frac = *value;
9238 break;
9239 case rvc_inf:
9240 /* For +-Inf, return (*arg1 = arg0, +-0). */
9241 frac = dconst0;
9242 frac.sign = value->sign;
9243 trunc = *value;
9244 break;
9245 case rvc_normal:
9246 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9247 real_trunc (&trunc, VOIDmode, value);
9248 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9249 /* If the original number was negative and already
9250 integral, then the fractional part is -0.0. */
9251 if (value->sign && frac.cl == rvc_zero)
9252 frac.sign = value->sign;
9253 break;
9254 }
9255
9256 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9257 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9258 build_real (rettype, trunc));
9259 TREE_SIDE_EFFECTS (arg1) = 1;
9260 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9261 build_real (rettype, frac));
9262 }
9263
9264 return NULL_TREE;
9265 }
9266
9267 /* Given a location LOC, an interclass builtin function decl FNDECL
9268 and its single argument ARG, return an folded expression computing
9269 the same, or NULL_TREE if we either couldn't or didn't want to fold
9270 (the latter happen if there's an RTL instruction available). */
9271
9272 static tree
9273 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9274 {
9275 machine_mode mode;
9276
9277 if (!validate_arg (arg, REAL_TYPE))
9278 return NULL_TREE;
9279
9280 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9281 return NULL_TREE;
9282
9283 mode = TYPE_MODE (TREE_TYPE (arg));
9284
9285 /* If there is no optab, try generic code. */
9286 switch (DECL_FUNCTION_CODE (fndecl))
9287 {
9288 tree result;
9289
9290 CASE_FLT_FN (BUILT_IN_ISINF):
9291 {
9292 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9293 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9294 tree const type = TREE_TYPE (arg);
9295 REAL_VALUE_TYPE r;
9296 char buf[128];
9297
9298 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9299 real_from_string (&r, buf);
9300 result = build_call_expr (isgr_fn, 2,
9301 fold_build1_loc (loc, ABS_EXPR, type, arg),
9302 build_real (type, r));
9303 return result;
9304 }
9305 CASE_FLT_FN (BUILT_IN_FINITE):
9306 case BUILT_IN_ISFINITE:
9307 {
9308 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9309 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9310 tree const type = TREE_TYPE (arg);
9311 REAL_VALUE_TYPE r;
9312 char buf[128];
9313
9314 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9315 real_from_string (&r, buf);
9316 result = build_call_expr (isle_fn, 2,
9317 fold_build1_loc (loc, ABS_EXPR, type, arg),
9318 build_real (type, r));
9319 /*result = fold_build2_loc (loc, UNGT_EXPR,
9320 TREE_TYPE (TREE_TYPE (fndecl)),
9321 fold_build1_loc (loc, ABS_EXPR, type, arg),
9322 build_real (type, r));
9323 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9324 TREE_TYPE (TREE_TYPE (fndecl)),
9325 result);*/
9326 return result;
9327 }
9328 case BUILT_IN_ISNORMAL:
9329 {
9330 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9331 islessequal(fabs(x),DBL_MAX). */
9332 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9333 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9334 tree const type = TREE_TYPE (arg);
9335 REAL_VALUE_TYPE rmax, rmin;
9336 char buf[128];
9337
9338 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9339 real_from_string (&rmax, buf);
9340 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9341 real_from_string (&rmin, buf);
9342 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9343 result = build_call_expr (isle_fn, 2, arg,
9344 build_real (type, rmax));
9345 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9346 build_call_expr (isge_fn, 2, arg,
9347 build_real (type, rmin)));
9348 return result;
9349 }
9350 default:
9351 break;
9352 }
9353
9354 return NULL_TREE;
9355 }
9356
9357 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9358 ARG is the argument for the call. */
9359
9360 static tree
9361 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9362 {
9363 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9364 REAL_VALUE_TYPE r;
9365
9366 if (!validate_arg (arg, REAL_TYPE))
9367 return NULL_TREE;
9368
9369 switch (builtin_index)
9370 {
9371 case BUILT_IN_ISINF:
9372 if (!HONOR_INFINITIES (arg))
9373 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9374
9375 if (TREE_CODE (arg) == REAL_CST)
9376 {
9377 r = TREE_REAL_CST (arg);
9378 if (real_isinf (&r))
9379 return real_compare (GT_EXPR, &r, &dconst0)
9380 ? integer_one_node : integer_minus_one_node;
9381 else
9382 return integer_zero_node;
9383 }
9384
9385 return NULL_TREE;
9386
9387 case BUILT_IN_ISINF_SIGN:
9388 {
9389 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9390 /* In a boolean context, GCC will fold the inner COND_EXPR to
9391 1. So e.g. "if (isinf_sign(x))" would be folded to just
9392 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9393 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9394 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9395 tree tmp = NULL_TREE;
9396
9397 arg = builtin_save_expr (arg);
9398
9399 if (signbit_fn && isinf_fn)
9400 {
9401 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9402 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9403
9404 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9405 signbit_call, integer_zero_node);
9406 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9407 isinf_call, integer_zero_node);
9408
9409 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9410 integer_minus_one_node, integer_one_node);
9411 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9412 isinf_call, tmp,
9413 integer_zero_node);
9414 }
9415
9416 return tmp;
9417 }
9418
9419 case BUILT_IN_ISFINITE:
9420 if (!HONOR_NANS (arg)
9421 && !HONOR_INFINITIES (arg))
9422 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9423
9424 if (TREE_CODE (arg) == REAL_CST)
9425 {
9426 r = TREE_REAL_CST (arg);
9427 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9428 }
9429
9430 return NULL_TREE;
9431
9432 case BUILT_IN_ISNAN:
9433 if (!HONOR_NANS (arg))
9434 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9435
9436 if (TREE_CODE (arg) == REAL_CST)
9437 {
9438 r = TREE_REAL_CST (arg);
9439 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9440 }
9441
9442 arg = builtin_save_expr (arg);
9443 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9444
9445 default:
9446 gcc_unreachable ();
9447 }
9448 }
9449
9450 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9451 This builtin will generate code to return the appropriate floating
9452 point classification depending on the value of the floating point
9453 number passed in. The possible return values must be supplied as
9454 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9455 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9456 one floating point argument which is "type generic". */
9457
9458 static tree
9459 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9460 {
9461 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9462 arg, type, res, tmp;
9463 machine_mode mode;
9464 REAL_VALUE_TYPE r;
9465 char buf[128];
9466
9467 /* Verify the required arguments in the original call. */
9468 if (nargs != 6
9469 || !validate_arg (args[0], INTEGER_TYPE)
9470 || !validate_arg (args[1], INTEGER_TYPE)
9471 || !validate_arg (args[2], INTEGER_TYPE)
9472 || !validate_arg (args[3], INTEGER_TYPE)
9473 || !validate_arg (args[4], INTEGER_TYPE)
9474 || !validate_arg (args[5], REAL_TYPE))
9475 return NULL_TREE;
9476
9477 fp_nan = args[0];
9478 fp_infinite = args[1];
9479 fp_normal = args[2];
9480 fp_subnormal = args[3];
9481 fp_zero = args[4];
9482 arg = args[5];
9483 type = TREE_TYPE (arg);
9484 mode = TYPE_MODE (type);
9485 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9486
9487 /* fpclassify(x) ->
9488 isnan(x) ? FP_NAN :
9489 (fabs(x) == Inf ? FP_INFINITE :
9490 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9491 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9492
9493 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9494 build_real (type, dconst0));
9495 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9496 tmp, fp_zero, fp_subnormal);
9497
9498 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9499 real_from_string (&r, buf);
9500 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9501 arg, build_real (type, r));
9502 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9503
9504 if (HONOR_INFINITIES (mode))
9505 {
9506 real_inf (&r);
9507 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9508 build_real (type, r));
9509 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9510 fp_infinite, res);
9511 }
9512
9513 if (HONOR_NANS (mode))
9514 {
9515 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9516 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9517 }
9518
9519 return res;
9520 }
9521
9522 /* Fold a call to an unordered comparison function such as
9523 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9524 being called and ARG0 and ARG1 are the arguments for the call.
9525 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9526 the opposite of the desired result. UNORDERED_CODE is used
9527 for modes that can hold NaNs and ORDERED_CODE is used for
9528 the rest. */
9529
9530 static tree
9531 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9532 enum tree_code unordered_code,
9533 enum tree_code ordered_code)
9534 {
9535 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9536 enum tree_code code;
9537 tree type0, type1;
9538 enum tree_code code0, code1;
9539 tree cmp_type = NULL_TREE;
9540
9541 type0 = TREE_TYPE (arg0);
9542 type1 = TREE_TYPE (arg1);
9543
9544 code0 = TREE_CODE (type0);
9545 code1 = TREE_CODE (type1);
9546
9547 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9548 /* Choose the wider of two real types. */
9549 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9550 ? type0 : type1;
9551 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9552 cmp_type = type0;
9553 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9554 cmp_type = type1;
9555
9556 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9557 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9558
9559 if (unordered_code == UNORDERED_EXPR)
9560 {
9561 if (!HONOR_NANS (arg0))
9562 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9563 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9564 }
9565
9566 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9567 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9568 fold_build2_loc (loc, code, type, arg0, arg1));
9569 }
9570
9571 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9572 arithmetics if it can never overflow, or into internal functions that
9573 return both result of arithmetics and overflowed boolean flag in
9574 a complex integer result, or some other check for overflow. */
9575
9576 static tree
9577 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9578 tree arg0, tree arg1, tree arg2)
9579 {
9580 enum internal_fn ifn = IFN_LAST;
9581 tree type = TREE_TYPE (TREE_TYPE (arg2));
9582 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9583 switch (fcode)
9584 {
9585 case BUILT_IN_ADD_OVERFLOW:
9586 case BUILT_IN_SADD_OVERFLOW:
9587 case BUILT_IN_SADDL_OVERFLOW:
9588 case BUILT_IN_SADDLL_OVERFLOW:
9589 case BUILT_IN_UADD_OVERFLOW:
9590 case BUILT_IN_UADDL_OVERFLOW:
9591 case BUILT_IN_UADDLL_OVERFLOW:
9592 ifn = IFN_ADD_OVERFLOW;
9593 break;
9594 case BUILT_IN_SUB_OVERFLOW:
9595 case BUILT_IN_SSUB_OVERFLOW:
9596 case BUILT_IN_SSUBL_OVERFLOW:
9597 case BUILT_IN_SSUBLL_OVERFLOW:
9598 case BUILT_IN_USUB_OVERFLOW:
9599 case BUILT_IN_USUBL_OVERFLOW:
9600 case BUILT_IN_USUBLL_OVERFLOW:
9601 ifn = IFN_SUB_OVERFLOW;
9602 break;
9603 case BUILT_IN_MUL_OVERFLOW:
9604 case BUILT_IN_SMUL_OVERFLOW:
9605 case BUILT_IN_SMULL_OVERFLOW:
9606 case BUILT_IN_SMULLL_OVERFLOW:
9607 case BUILT_IN_UMUL_OVERFLOW:
9608 case BUILT_IN_UMULL_OVERFLOW:
9609 case BUILT_IN_UMULLL_OVERFLOW:
9610 ifn = IFN_MUL_OVERFLOW;
9611 break;
9612 default:
9613 gcc_unreachable ();
9614 }
9615 tree ctype = build_complex_type (type);
9616 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9617 2, arg0, arg1);
9618 tree tgt = save_expr (call);
9619 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9620 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9621 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9622 tree store
9623 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9624 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9625 }
9626
9627 /* Fold a call to built-in function FNDECL with 0 arguments.
9628 This function returns NULL_TREE if no simplification was possible. */
9629
9630 static tree
9631 fold_builtin_0 (location_t loc, tree fndecl)
9632 {
9633 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9634 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9635 switch (fcode)
9636 {
9637 CASE_FLT_FN (BUILT_IN_INF):
9638 case BUILT_IN_INFD32:
9639 case BUILT_IN_INFD64:
9640 case BUILT_IN_INFD128:
9641 return fold_builtin_inf (loc, type, true);
9642
9643 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9644 return fold_builtin_inf (loc, type, false);
9645
9646 case BUILT_IN_CLASSIFY_TYPE:
9647 return fold_builtin_classify_type (NULL_TREE);
9648
9649 default:
9650 break;
9651 }
9652 return NULL_TREE;
9653 }
9654
9655 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9656 This function returns NULL_TREE if no simplification was possible. */
9657
9658 static tree
9659 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9660 {
9661 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9662 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9663 switch (fcode)
9664 {
9665 case BUILT_IN_CONSTANT_P:
9666 {
9667 tree val = fold_builtin_constant_p (arg0);
9668
9669 /* Gimplification will pull the CALL_EXPR for the builtin out of
9670 an if condition. When not optimizing, we'll not CSE it back.
9671 To avoid link error types of regressions, return false now. */
9672 if (!val && !optimize)
9673 val = integer_zero_node;
9674
9675 return val;
9676 }
9677
9678 case BUILT_IN_CLASSIFY_TYPE:
9679 return fold_builtin_classify_type (arg0);
9680
9681 case BUILT_IN_STRLEN:
9682 return fold_builtin_strlen (loc, type, arg0);
9683
9684 CASE_FLT_FN (BUILT_IN_FABS):
9685 case BUILT_IN_FABSD32:
9686 case BUILT_IN_FABSD64:
9687 case BUILT_IN_FABSD128:
9688 return fold_builtin_fabs (loc, arg0, type);
9689
9690 case BUILT_IN_ABS:
9691 case BUILT_IN_LABS:
9692 case BUILT_IN_LLABS:
9693 case BUILT_IN_IMAXABS:
9694 return fold_builtin_abs (loc, arg0, type);
9695
9696 CASE_FLT_FN (BUILT_IN_CONJ):
9697 if (validate_arg (arg0, COMPLEX_TYPE)
9698 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9699 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9700 break;
9701
9702 CASE_FLT_FN (BUILT_IN_CREAL):
9703 if (validate_arg (arg0, COMPLEX_TYPE)
9704 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9705 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9706 break;
9707
9708 CASE_FLT_FN (BUILT_IN_CIMAG):
9709 if (validate_arg (arg0, COMPLEX_TYPE)
9710 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9711 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9712 break;
9713
9714 CASE_FLT_FN (BUILT_IN_CCOS):
9715 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9716
9717 CASE_FLT_FN (BUILT_IN_CCOSH):
9718 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9719
9720 CASE_FLT_FN (BUILT_IN_CPROJ):
9721 return fold_builtin_cproj (loc, arg0, type);
9722
9723 CASE_FLT_FN (BUILT_IN_CSIN):
9724 if (validate_arg (arg0, COMPLEX_TYPE)
9725 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9726 return do_mpc_arg1 (arg0, type, mpc_sin);
9727 break;
9728
9729 CASE_FLT_FN (BUILT_IN_CSINH):
9730 if (validate_arg (arg0, COMPLEX_TYPE)
9731 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9732 return do_mpc_arg1 (arg0, type, mpc_sinh);
9733 break;
9734
9735 CASE_FLT_FN (BUILT_IN_CTAN):
9736 if (validate_arg (arg0, COMPLEX_TYPE)
9737 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9738 return do_mpc_arg1 (arg0, type, mpc_tan);
9739 break;
9740
9741 CASE_FLT_FN (BUILT_IN_CTANH):
9742 if (validate_arg (arg0, COMPLEX_TYPE)
9743 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9744 return do_mpc_arg1 (arg0, type, mpc_tanh);
9745 break;
9746
9747 CASE_FLT_FN (BUILT_IN_CLOG):
9748 if (validate_arg (arg0, COMPLEX_TYPE)
9749 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9750 return do_mpc_arg1 (arg0, type, mpc_log);
9751 break;
9752
9753 CASE_FLT_FN (BUILT_IN_CSQRT):
9754 if (validate_arg (arg0, COMPLEX_TYPE)
9755 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9756 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9757 break;
9758
9759 CASE_FLT_FN (BUILT_IN_CASIN):
9760 if (validate_arg (arg0, COMPLEX_TYPE)
9761 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9762 return do_mpc_arg1 (arg0, type, mpc_asin);
9763 break;
9764
9765 CASE_FLT_FN (BUILT_IN_CACOS):
9766 if (validate_arg (arg0, COMPLEX_TYPE)
9767 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9768 return do_mpc_arg1 (arg0, type, mpc_acos);
9769 break;
9770
9771 CASE_FLT_FN (BUILT_IN_CATAN):
9772 if (validate_arg (arg0, COMPLEX_TYPE)
9773 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9774 return do_mpc_arg1 (arg0, type, mpc_atan);
9775 break;
9776
9777 CASE_FLT_FN (BUILT_IN_CASINH):
9778 if (validate_arg (arg0, COMPLEX_TYPE)
9779 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9780 return do_mpc_arg1 (arg0, type, mpc_asinh);
9781 break;
9782
9783 CASE_FLT_FN (BUILT_IN_CACOSH):
9784 if (validate_arg (arg0, COMPLEX_TYPE)
9785 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9786 return do_mpc_arg1 (arg0, type, mpc_acosh);
9787 break;
9788
9789 CASE_FLT_FN (BUILT_IN_CATANH):
9790 if (validate_arg (arg0, COMPLEX_TYPE)
9791 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9792 return do_mpc_arg1 (arg0, type, mpc_atanh);
9793 break;
9794
9795 CASE_FLT_FN (BUILT_IN_CABS):
9796 return fold_builtin_cabs (loc, arg0, type, fndecl);
9797
9798 CASE_FLT_FN (BUILT_IN_CARG):
9799 return fold_builtin_carg (loc, arg0, type);
9800
9801 CASE_FLT_FN (BUILT_IN_SQRT):
9802 if (validate_arg (arg0, REAL_TYPE))
9803 return do_mpfr_arg1 (arg0, type, mpfr_sqrt, &dconst0, NULL, true);
9804 break;
9805
9806 CASE_FLT_FN (BUILT_IN_CBRT):
9807 if (validate_arg (arg0, REAL_TYPE))
9808 return do_mpfr_arg1 (arg0, type, mpfr_cbrt, NULL, NULL, 0);
9809 break;
9810
9811 CASE_FLT_FN (BUILT_IN_ASIN):
9812 if (validate_arg (arg0, REAL_TYPE))
9813 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9814 &dconstm1, &dconst1, true);
9815 break;
9816
9817 CASE_FLT_FN (BUILT_IN_ACOS):
9818 if (validate_arg (arg0, REAL_TYPE))
9819 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9820 &dconstm1, &dconst1, true);
9821 break;
9822
9823 CASE_FLT_FN (BUILT_IN_ATAN):
9824 if (validate_arg (arg0, REAL_TYPE))
9825 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9826 break;
9827
9828 CASE_FLT_FN (BUILT_IN_ASINH):
9829 if (validate_arg (arg0, REAL_TYPE))
9830 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9831 break;
9832
9833 CASE_FLT_FN (BUILT_IN_ACOSH):
9834 if (validate_arg (arg0, REAL_TYPE))
9835 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9836 &dconst1, NULL, true);
9837 break;
9838
9839 CASE_FLT_FN (BUILT_IN_ATANH):
9840 if (validate_arg (arg0, REAL_TYPE))
9841 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9842 &dconstm1, &dconst1, false);
9843 break;
9844
9845 CASE_FLT_FN (BUILT_IN_SIN):
9846 if (validate_arg (arg0, REAL_TYPE))
9847 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9848 break;
9849
9850 CASE_FLT_FN (BUILT_IN_COS):
9851 return fold_builtin_cos (loc, arg0, type, fndecl);
9852
9853 CASE_FLT_FN (BUILT_IN_TAN):
9854 return fold_builtin_tan (arg0, type);
9855
9856 CASE_FLT_FN (BUILT_IN_CEXP):
9857 return fold_builtin_cexp (loc, arg0, type);
9858
9859 CASE_FLT_FN (BUILT_IN_CEXPI):
9860 if (validate_arg (arg0, REAL_TYPE))
9861 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9862 break;
9863
9864 CASE_FLT_FN (BUILT_IN_SINH):
9865 if (validate_arg (arg0, REAL_TYPE))
9866 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9867 break;
9868
9869 CASE_FLT_FN (BUILT_IN_COSH):
9870 return fold_builtin_cosh (loc, arg0, type, fndecl);
9871
9872 CASE_FLT_FN (BUILT_IN_TANH):
9873 if (validate_arg (arg0, REAL_TYPE))
9874 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9875 break;
9876
9877 CASE_FLT_FN (BUILT_IN_ERF):
9878 if (validate_arg (arg0, REAL_TYPE))
9879 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9880 break;
9881
9882 CASE_FLT_FN (BUILT_IN_ERFC):
9883 if (validate_arg (arg0, REAL_TYPE))
9884 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9885 break;
9886
9887 CASE_FLT_FN (BUILT_IN_TGAMMA):
9888 if (validate_arg (arg0, REAL_TYPE))
9889 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9890 break;
9891
9892 CASE_FLT_FN (BUILT_IN_EXP):
9893 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9894
9895 CASE_FLT_FN (BUILT_IN_EXP2):
9896 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9897
9898 CASE_FLT_FN (BUILT_IN_EXP10):
9899 CASE_FLT_FN (BUILT_IN_POW10):
9900 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9901
9902 CASE_FLT_FN (BUILT_IN_EXPM1):
9903 if (validate_arg (arg0, REAL_TYPE))
9904 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9905 break;
9906
9907 CASE_FLT_FN (BUILT_IN_LOG):
9908 if (validate_arg (arg0, REAL_TYPE))
9909 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
9910 break;
9911
9912 CASE_FLT_FN (BUILT_IN_LOG2):
9913 if (validate_arg (arg0, REAL_TYPE))
9914 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
9915 break;
9916
9917 CASE_FLT_FN (BUILT_IN_LOG10):
9918 if (validate_arg (arg0, REAL_TYPE))
9919 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
9920 break;
9921
9922 CASE_FLT_FN (BUILT_IN_LOG1P):
9923 if (validate_arg (arg0, REAL_TYPE))
9924 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9925 &dconstm1, NULL, false);
9926 break;
9927
9928 CASE_FLT_FN (BUILT_IN_J0):
9929 if (validate_arg (arg0, REAL_TYPE))
9930 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9931 NULL, NULL, 0);
9932 break;
9933
9934 CASE_FLT_FN (BUILT_IN_J1):
9935 if (validate_arg (arg0, REAL_TYPE))
9936 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9937 NULL, NULL, 0);
9938 break;
9939
9940 CASE_FLT_FN (BUILT_IN_Y0):
9941 if (validate_arg (arg0, REAL_TYPE))
9942 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9943 &dconst0, NULL, false);
9944 break;
9945
9946 CASE_FLT_FN (BUILT_IN_Y1):
9947 if (validate_arg (arg0, REAL_TYPE))
9948 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9949 &dconst0, NULL, false);
9950 break;
9951
9952 CASE_FLT_FN (BUILT_IN_NAN):
9953 case BUILT_IN_NAND32:
9954 case BUILT_IN_NAND64:
9955 case BUILT_IN_NAND128:
9956 return fold_builtin_nan (arg0, type, true);
9957
9958 CASE_FLT_FN (BUILT_IN_NANS):
9959 return fold_builtin_nan (arg0, type, false);
9960
9961 CASE_FLT_FN (BUILT_IN_FLOOR):
9962 return fold_builtin_floor (loc, fndecl, arg0);
9963
9964 CASE_FLT_FN (BUILT_IN_CEIL):
9965 return fold_builtin_ceil (loc, fndecl, arg0);
9966
9967 CASE_FLT_FN (BUILT_IN_TRUNC):
9968 return fold_builtin_trunc (loc, fndecl, arg0);
9969
9970 CASE_FLT_FN (BUILT_IN_ROUND):
9971 return fold_builtin_round (loc, fndecl, arg0);
9972
9973 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9974 CASE_FLT_FN (BUILT_IN_RINT):
9975 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9976
9977 CASE_FLT_FN (BUILT_IN_ICEIL):
9978 CASE_FLT_FN (BUILT_IN_LCEIL):
9979 CASE_FLT_FN (BUILT_IN_LLCEIL):
9980 CASE_FLT_FN (BUILT_IN_LFLOOR):
9981 CASE_FLT_FN (BUILT_IN_IFLOOR):
9982 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9983 CASE_FLT_FN (BUILT_IN_IROUND):
9984 CASE_FLT_FN (BUILT_IN_LROUND):
9985 CASE_FLT_FN (BUILT_IN_LLROUND):
9986 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9987
9988 CASE_FLT_FN (BUILT_IN_IRINT):
9989 CASE_FLT_FN (BUILT_IN_LRINT):
9990 CASE_FLT_FN (BUILT_IN_LLRINT):
9991 return fold_fixed_mathfn (loc, fndecl, arg0);
9992
9993 case BUILT_IN_BSWAP16:
9994 case BUILT_IN_BSWAP32:
9995 case BUILT_IN_BSWAP64:
9996 return fold_builtin_bswap (fndecl, arg0);
9997
9998 CASE_INT_FN (BUILT_IN_FFS):
9999 CASE_INT_FN (BUILT_IN_CLZ):
10000 CASE_INT_FN (BUILT_IN_CTZ):
10001 CASE_INT_FN (BUILT_IN_CLRSB):
10002 CASE_INT_FN (BUILT_IN_POPCOUNT):
10003 CASE_INT_FN (BUILT_IN_PARITY):
10004 return fold_builtin_bitop (fndecl, arg0);
10005
10006 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10007 return fold_builtin_signbit (loc, arg0, type);
10008
10009 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10010 return fold_builtin_significand (loc, arg0, type);
10011
10012 CASE_FLT_FN (BUILT_IN_ILOGB):
10013 CASE_FLT_FN (BUILT_IN_LOGB):
10014 return fold_builtin_logb (loc, arg0, type);
10015
10016 case BUILT_IN_ISASCII:
10017 return fold_builtin_isascii (loc, arg0);
10018
10019 case BUILT_IN_TOASCII:
10020 return fold_builtin_toascii (loc, arg0);
10021
10022 case BUILT_IN_ISDIGIT:
10023 return fold_builtin_isdigit (loc, arg0);
10024
10025 CASE_FLT_FN (BUILT_IN_FINITE):
10026 case BUILT_IN_FINITED32:
10027 case BUILT_IN_FINITED64:
10028 case BUILT_IN_FINITED128:
10029 case BUILT_IN_ISFINITE:
10030 {
10031 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10032 if (ret)
10033 return ret;
10034 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10035 }
10036
10037 CASE_FLT_FN (BUILT_IN_ISINF):
10038 case BUILT_IN_ISINFD32:
10039 case BUILT_IN_ISINFD64:
10040 case BUILT_IN_ISINFD128:
10041 {
10042 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10043 if (ret)
10044 return ret;
10045 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10046 }
10047
10048 case BUILT_IN_ISNORMAL:
10049 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10050
10051 case BUILT_IN_ISINF_SIGN:
10052 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10053
10054 CASE_FLT_FN (BUILT_IN_ISNAN):
10055 case BUILT_IN_ISNAND32:
10056 case BUILT_IN_ISNAND64:
10057 case BUILT_IN_ISNAND128:
10058 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10059
10060 case BUILT_IN_FREE:
10061 if (integer_zerop (arg0))
10062 return build_empty_stmt (loc);
10063 break;
10064
10065 default:
10066 break;
10067 }
10068
10069 return NULL_TREE;
10070
10071 }
10072
10073 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10074 This function returns NULL_TREE if no simplification was possible. */
10075
10076 static tree
10077 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
10078 {
10079 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10080 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10081
10082 switch (fcode)
10083 {
10084 CASE_FLT_FN (BUILT_IN_JN):
10085 if (validate_arg (arg0, INTEGER_TYPE)
10086 && validate_arg (arg1, REAL_TYPE))
10087 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10088 break;
10089
10090 CASE_FLT_FN (BUILT_IN_YN):
10091 if (validate_arg (arg0, INTEGER_TYPE)
10092 && validate_arg (arg1, REAL_TYPE))
10093 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10094 &dconst0, false);
10095 break;
10096
10097 CASE_FLT_FN (BUILT_IN_DREM):
10098 CASE_FLT_FN (BUILT_IN_REMAINDER):
10099 if (validate_arg (arg0, REAL_TYPE)
10100 && validate_arg (arg1, REAL_TYPE))
10101 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10102 break;
10103
10104 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10105 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10106 if (validate_arg (arg0, REAL_TYPE)
10107 && validate_arg (arg1, POINTER_TYPE))
10108 return do_mpfr_lgamma_r (arg0, arg1, type);
10109 break;
10110
10111 CASE_FLT_FN (BUILT_IN_ATAN2):
10112 if (validate_arg (arg0, REAL_TYPE)
10113 && validate_arg (arg1, REAL_TYPE))
10114 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10115 break;
10116
10117 CASE_FLT_FN (BUILT_IN_FDIM):
10118 if (validate_arg (arg0, REAL_TYPE)
10119 && validate_arg (arg1, REAL_TYPE))
10120 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10121 break;
10122
10123 CASE_FLT_FN (BUILT_IN_HYPOT):
10124 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10125
10126 CASE_FLT_FN (BUILT_IN_CPOW):
10127 if (validate_arg (arg0, COMPLEX_TYPE)
10128 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10129 && validate_arg (arg1, COMPLEX_TYPE)
10130 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10131 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10132 break;
10133
10134 CASE_FLT_FN (BUILT_IN_LDEXP):
10135 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10136 CASE_FLT_FN (BUILT_IN_SCALBN):
10137 CASE_FLT_FN (BUILT_IN_SCALBLN):
10138 return fold_builtin_load_exponent (loc, arg0, arg1,
10139 type, /*ldexp=*/false);
10140
10141 CASE_FLT_FN (BUILT_IN_FREXP):
10142 return fold_builtin_frexp (loc, arg0, arg1, type);
10143
10144 CASE_FLT_FN (BUILT_IN_MODF):
10145 return fold_builtin_modf (loc, arg0, arg1, type);
10146
10147 case BUILT_IN_STRSTR:
10148 return fold_builtin_strstr (loc, arg0, arg1, type);
10149
10150 case BUILT_IN_STRSPN:
10151 return fold_builtin_strspn (loc, arg0, arg1);
10152
10153 case BUILT_IN_STRCSPN:
10154 return fold_builtin_strcspn (loc, arg0, arg1);
10155
10156 case BUILT_IN_STRCHR:
10157 case BUILT_IN_INDEX:
10158 return fold_builtin_strchr (loc, arg0, arg1, type);
10159
10160 case BUILT_IN_STRRCHR:
10161 case BUILT_IN_RINDEX:
10162 return fold_builtin_strrchr (loc, arg0, arg1, type);
10163
10164 case BUILT_IN_STRCMP:
10165 return fold_builtin_strcmp (loc, arg0, arg1);
10166
10167 case BUILT_IN_STRPBRK:
10168 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10169
10170 case BUILT_IN_EXPECT:
10171 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10172
10173 CASE_FLT_FN (BUILT_IN_POW):
10174 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10175
10176 CASE_FLT_FN (BUILT_IN_POWI):
10177 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10178
10179 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10180 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10181
10182 CASE_FLT_FN (BUILT_IN_FMIN):
10183 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10184
10185 CASE_FLT_FN (BUILT_IN_FMAX):
10186 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10187
10188 case BUILT_IN_ISGREATER:
10189 return fold_builtin_unordered_cmp (loc, fndecl,
10190 arg0, arg1, UNLE_EXPR, LE_EXPR);
10191 case BUILT_IN_ISGREATEREQUAL:
10192 return fold_builtin_unordered_cmp (loc, fndecl,
10193 arg0, arg1, UNLT_EXPR, LT_EXPR);
10194 case BUILT_IN_ISLESS:
10195 return fold_builtin_unordered_cmp (loc, fndecl,
10196 arg0, arg1, UNGE_EXPR, GE_EXPR);
10197 case BUILT_IN_ISLESSEQUAL:
10198 return fold_builtin_unordered_cmp (loc, fndecl,
10199 arg0, arg1, UNGT_EXPR, GT_EXPR);
10200 case BUILT_IN_ISLESSGREATER:
10201 return fold_builtin_unordered_cmp (loc, fndecl,
10202 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10203 case BUILT_IN_ISUNORDERED:
10204 return fold_builtin_unordered_cmp (loc, fndecl,
10205 arg0, arg1, UNORDERED_EXPR,
10206 NOP_EXPR);
10207
10208 /* We do the folding for va_start in the expander. */
10209 case BUILT_IN_VA_START:
10210 break;
10211
10212 case BUILT_IN_OBJECT_SIZE:
10213 return fold_builtin_object_size (arg0, arg1);
10214
10215 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10216 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10217
10218 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10219 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10220
10221 default:
10222 break;
10223 }
10224 return NULL_TREE;
10225 }
10226
10227 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10228 and ARG2.
10229 This function returns NULL_TREE if no simplification was possible. */
10230
10231 static tree
10232 fold_builtin_3 (location_t loc, tree fndecl,
10233 tree arg0, tree arg1, tree arg2)
10234 {
10235 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10236 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10237 switch (fcode)
10238 {
10239
10240 CASE_FLT_FN (BUILT_IN_SINCOS):
10241 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10242
10243 CASE_FLT_FN (BUILT_IN_FMA):
10244 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10245 break;
10246
10247 CASE_FLT_FN (BUILT_IN_REMQUO):
10248 if (validate_arg (arg0, REAL_TYPE)
10249 && validate_arg (arg1, REAL_TYPE)
10250 && validate_arg (arg2, POINTER_TYPE))
10251 return do_mpfr_remquo (arg0, arg1, arg2);
10252 break;
10253
10254 case BUILT_IN_STRNCMP:
10255 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10256
10257 case BUILT_IN_MEMCHR:
10258 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10259
10260 case BUILT_IN_BCMP:
10261 case BUILT_IN_MEMCMP:
10262 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10263
10264 case BUILT_IN_EXPECT:
10265 return fold_builtin_expect (loc, arg0, arg1, arg2);
10266
10267 case BUILT_IN_ADD_OVERFLOW:
10268 case BUILT_IN_SUB_OVERFLOW:
10269 case BUILT_IN_MUL_OVERFLOW:
10270 case BUILT_IN_SADD_OVERFLOW:
10271 case BUILT_IN_SADDL_OVERFLOW:
10272 case BUILT_IN_SADDLL_OVERFLOW:
10273 case BUILT_IN_SSUB_OVERFLOW:
10274 case BUILT_IN_SSUBL_OVERFLOW:
10275 case BUILT_IN_SSUBLL_OVERFLOW:
10276 case BUILT_IN_SMUL_OVERFLOW:
10277 case BUILT_IN_SMULL_OVERFLOW:
10278 case BUILT_IN_SMULLL_OVERFLOW:
10279 case BUILT_IN_UADD_OVERFLOW:
10280 case BUILT_IN_UADDL_OVERFLOW:
10281 case BUILT_IN_UADDLL_OVERFLOW:
10282 case BUILT_IN_USUB_OVERFLOW:
10283 case BUILT_IN_USUBL_OVERFLOW:
10284 case BUILT_IN_USUBLL_OVERFLOW:
10285 case BUILT_IN_UMUL_OVERFLOW:
10286 case BUILT_IN_UMULL_OVERFLOW:
10287 case BUILT_IN_UMULLL_OVERFLOW:
10288 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10289
10290 default:
10291 break;
10292 }
10293 return NULL_TREE;
10294 }
10295
10296 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10297 arguments. IGNORE is true if the result of the
10298 function call is ignored. This function returns NULL_TREE if no
10299 simplification was possible. */
10300
10301 tree
10302 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
10303 {
10304 tree ret = NULL_TREE;
10305
10306 switch (nargs)
10307 {
10308 case 0:
10309 ret = fold_builtin_0 (loc, fndecl);
10310 break;
10311 case 1:
10312 ret = fold_builtin_1 (loc, fndecl, args[0]);
10313 break;
10314 case 2:
10315 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
10316 break;
10317 case 3:
10318 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10319 break;
10320 default:
10321 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10322 break;
10323 }
10324 if (ret)
10325 {
10326 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10327 SET_EXPR_LOCATION (ret, loc);
10328 TREE_NO_WARNING (ret) = 1;
10329 return ret;
10330 }
10331 return NULL_TREE;
10332 }
10333
10334 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10335 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10336 of arguments in ARGS to be omitted. OLDNARGS is the number of
10337 elements in ARGS. */
10338
10339 static tree
10340 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10341 int skip, tree fndecl, int n, va_list newargs)
10342 {
10343 int nargs = oldnargs - skip + n;
10344 tree *buffer;
10345
10346 if (n > 0)
10347 {
10348 int i, j;
10349
10350 buffer = XALLOCAVEC (tree, nargs);
10351 for (i = 0; i < n; i++)
10352 buffer[i] = va_arg (newargs, tree);
10353 for (j = skip; j < oldnargs; j++, i++)
10354 buffer[i] = args[j];
10355 }
10356 else
10357 buffer = args + skip;
10358
10359 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10360 }
10361
10362 /* Return true if FNDECL shouldn't be folded right now.
10363 If a built-in function has an inline attribute always_inline
10364 wrapper, defer folding it after always_inline functions have
10365 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10366 might not be performed. */
10367
10368 bool
10369 avoid_folding_inline_builtin (tree fndecl)
10370 {
10371 return (DECL_DECLARED_INLINE_P (fndecl)
10372 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10373 && cfun
10374 && !cfun->always_inline_functions_inlined
10375 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10376 }
10377
10378 /* A wrapper function for builtin folding that prevents warnings for
10379 "statement without effect" and the like, caused by removing the
10380 call node earlier than the warning is generated. */
10381
10382 tree
10383 fold_call_expr (location_t loc, tree exp, bool ignore)
10384 {
10385 tree ret = NULL_TREE;
10386 tree fndecl = get_callee_fndecl (exp);
10387 if (fndecl
10388 && TREE_CODE (fndecl) == FUNCTION_DECL
10389 && DECL_BUILT_IN (fndecl)
10390 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10391 yet. Defer folding until we see all the arguments
10392 (after inlining). */
10393 && !CALL_EXPR_VA_ARG_PACK (exp))
10394 {
10395 int nargs = call_expr_nargs (exp);
10396
10397 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10398 instead last argument is __builtin_va_arg_pack (). Defer folding
10399 even in that case, until arguments are finalized. */
10400 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10401 {
10402 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10403 if (fndecl2
10404 && TREE_CODE (fndecl2) == FUNCTION_DECL
10405 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10406 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10407 return NULL_TREE;
10408 }
10409
10410 if (avoid_folding_inline_builtin (fndecl))
10411 return NULL_TREE;
10412
10413 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10414 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10415 CALL_EXPR_ARGP (exp), ignore);
10416 else
10417 {
10418 tree *args = CALL_EXPR_ARGP (exp);
10419 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10420 if (ret)
10421 return ret;
10422 }
10423 }
10424 return NULL_TREE;
10425 }
10426
10427 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10428 N arguments are passed in the array ARGARRAY. Return a folded
10429 expression or NULL_TREE if no simplification was possible. */
10430
10431 tree
10432 fold_builtin_call_array (location_t loc, tree,
10433 tree fn,
10434 int n,
10435 tree *argarray)
10436 {
10437 if (TREE_CODE (fn) != ADDR_EXPR)
10438 return NULL_TREE;
10439
10440 tree fndecl = TREE_OPERAND (fn, 0);
10441 if (TREE_CODE (fndecl) == FUNCTION_DECL
10442 && DECL_BUILT_IN (fndecl))
10443 {
10444 /* If last argument is __builtin_va_arg_pack (), arguments to this
10445 function are not finalized yet. Defer folding until they are. */
10446 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10447 {
10448 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10449 if (fndecl2
10450 && TREE_CODE (fndecl2) == FUNCTION_DECL
10451 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10452 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10453 return NULL_TREE;
10454 }
10455 if (avoid_folding_inline_builtin (fndecl))
10456 return NULL_TREE;
10457 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10458 return targetm.fold_builtin (fndecl, n, argarray, false);
10459 else
10460 return fold_builtin_n (loc, fndecl, argarray, n, false);
10461 }
10462
10463 return NULL_TREE;
10464 }
10465
10466 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10467 along with N new arguments specified as the "..." parameters. SKIP
10468 is the number of arguments in EXP to be omitted. This function is used
10469 to do varargs-to-varargs transformations. */
10470
10471 static tree
10472 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10473 {
10474 va_list ap;
10475 tree t;
10476
10477 va_start (ap, n);
10478 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10479 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10480 va_end (ap);
10481
10482 return t;
10483 }
10484
10485 /* Validate a single argument ARG against a tree code CODE representing
10486 a type. */
10487
10488 static bool
10489 validate_arg (const_tree arg, enum tree_code code)
10490 {
10491 if (!arg)
10492 return false;
10493 else if (code == POINTER_TYPE)
10494 return POINTER_TYPE_P (TREE_TYPE (arg));
10495 else if (code == INTEGER_TYPE)
10496 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10497 return code == TREE_CODE (TREE_TYPE (arg));
10498 }
10499
10500 /* This function validates the types of a function call argument list
10501 against a specified list of tree_codes. If the last specifier is a 0,
10502 that represents an ellipses, otherwise the last specifier must be a
10503 VOID_TYPE.
10504
10505 This is the GIMPLE version of validate_arglist. Eventually we want to
10506 completely convert builtins.c to work from GIMPLEs and the tree based
10507 validate_arglist will then be removed. */
10508
10509 bool
10510 validate_gimple_arglist (const gcall *call, ...)
10511 {
10512 enum tree_code code;
10513 bool res = 0;
10514 va_list ap;
10515 const_tree arg;
10516 size_t i;
10517
10518 va_start (ap, call);
10519 i = 0;
10520
10521 do
10522 {
10523 code = (enum tree_code) va_arg (ap, int);
10524 switch (code)
10525 {
10526 case 0:
10527 /* This signifies an ellipses, any further arguments are all ok. */
10528 res = true;
10529 goto end;
10530 case VOID_TYPE:
10531 /* This signifies an endlink, if no arguments remain, return
10532 true, otherwise return false. */
10533 res = (i == gimple_call_num_args (call));
10534 goto end;
10535 default:
10536 /* If no parameters remain or the parameter's code does not
10537 match the specified code, return false. Otherwise continue
10538 checking any remaining arguments. */
10539 arg = gimple_call_arg (call, i++);
10540 if (!validate_arg (arg, code))
10541 goto end;
10542 break;
10543 }
10544 }
10545 while (1);
10546
10547 /* We need gotos here since we can only have one VA_CLOSE in a
10548 function. */
10549 end: ;
10550 va_end (ap);
10551
10552 return res;
10553 }
10554
10555 /* Default target-specific builtin expander that does nothing. */
10556
10557 rtx
10558 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10559 rtx target ATTRIBUTE_UNUSED,
10560 rtx subtarget ATTRIBUTE_UNUSED,
10561 machine_mode mode ATTRIBUTE_UNUSED,
10562 int ignore ATTRIBUTE_UNUSED)
10563 {
10564 return NULL_RTX;
10565 }
10566
10567 /* Returns true is EXP represents data that would potentially reside
10568 in a readonly section. */
10569
10570 bool
10571 readonly_data_expr (tree exp)
10572 {
10573 STRIP_NOPS (exp);
10574
10575 if (TREE_CODE (exp) != ADDR_EXPR)
10576 return false;
10577
10578 exp = get_base_address (TREE_OPERAND (exp, 0));
10579 if (!exp)
10580 return false;
10581
10582 /* Make sure we call decl_readonly_section only for trees it
10583 can handle (since it returns true for everything it doesn't
10584 understand). */
10585 if (TREE_CODE (exp) == STRING_CST
10586 || TREE_CODE (exp) == CONSTRUCTOR
10587 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10588 return decl_readonly_section (exp, 0);
10589 else
10590 return false;
10591 }
10592
10593 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10594 to the call, and TYPE is its return type.
10595
10596 Return NULL_TREE if no simplification was possible, otherwise return the
10597 simplified form of the call as a tree.
10598
10599 The simplified form may be a constant or other expression which
10600 computes the same value, but in a more efficient manner (including
10601 calls to other builtin functions).
10602
10603 The call may contain arguments which need to be evaluated, but
10604 which are not useful to determine the result of the call. In
10605 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10606 COMPOUND_EXPR will be an argument which must be evaluated.
10607 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10608 COMPOUND_EXPR in the chain will contain the tree for the simplified
10609 form of the builtin function call. */
10610
10611 static tree
10612 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10613 {
10614 if (!validate_arg (s1, POINTER_TYPE)
10615 || !validate_arg (s2, POINTER_TYPE))
10616 return NULL_TREE;
10617 else
10618 {
10619 tree fn;
10620 const char *p1, *p2;
10621
10622 p2 = c_getstr (s2);
10623 if (p2 == NULL)
10624 return NULL_TREE;
10625
10626 p1 = c_getstr (s1);
10627 if (p1 != NULL)
10628 {
10629 const char *r = strstr (p1, p2);
10630 tree tem;
10631
10632 if (r == NULL)
10633 return build_int_cst (TREE_TYPE (s1), 0);
10634
10635 /* Return an offset into the constant string argument. */
10636 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10637 return fold_convert_loc (loc, type, tem);
10638 }
10639
10640 /* The argument is const char *, and the result is char *, so we need
10641 a type conversion here to avoid a warning. */
10642 if (p2[0] == '\0')
10643 return fold_convert_loc (loc, type, s1);
10644
10645 if (p2[1] != '\0')
10646 return NULL_TREE;
10647
10648 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10649 if (!fn)
10650 return NULL_TREE;
10651
10652 /* New argument list transforming strstr(s1, s2) to
10653 strchr(s1, s2[0]). */
10654 return build_call_expr_loc (loc, fn, 2, s1,
10655 build_int_cst (integer_type_node, p2[0]));
10656 }
10657 }
10658
10659 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10660 the call, and TYPE is its return type.
10661
10662 Return NULL_TREE if no simplification was possible, otherwise return the
10663 simplified form of the call as a tree.
10664
10665 The simplified form may be a constant or other expression which
10666 computes the same value, but in a more efficient manner (including
10667 calls to other builtin functions).
10668
10669 The call may contain arguments which need to be evaluated, but
10670 which are not useful to determine the result of the call. In
10671 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10672 COMPOUND_EXPR will be an argument which must be evaluated.
10673 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10674 COMPOUND_EXPR in the chain will contain the tree for the simplified
10675 form of the builtin function call. */
10676
10677 static tree
10678 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10679 {
10680 if (!validate_arg (s1, POINTER_TYPE)
10681 || !validate_arg (s2, INTEGER_TYPE))
10682 return NULL_TREE;
10683 else
10684 {
10685 const char *p1;
10686
10687 if (TREE_CODE (s2) != INTEGER_CST)
10688 return NULL_TREE;
10689
10690 p1 = c_getstr (s1);
10691 if (p1 != NULL)
10692 {
10693 char c;
10694 const char *r;
10695 tree tem;
10696
10697 if (target_char_cast (s2, &c))
10698 return NULL_TREE;
10699
10700 r = strchr (p1, c);
10701
10702 if (r == NULL)
10703 return build_int_cst (TREE_TYPE (s1), 0);
10704
10705 /* Return an offset into the constant string argument. */
10706 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10707 return fold_convert_loc (loc, type, tem);
10708 }
10709 return NULL_TREE;
10710 }
10711 }
10712
10713 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10714 the call, and TYPE is its return type.
10715
10716 Return NULL_TREE if no simplification was possible, otherwise return the
10717 simplified form of the call as a tree.
10718
10719 The simplified form may be a constant or other expression which
10720 computes the same value, but in a more efficient manner (including
10721 calls to other builtin functions).
10722
10723 The call may contain arguments which need to be evaluated, but
10724 which are not useful to determine the result of the call. In
10725 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10726 COMPOUND_EXPR will be an argument which must be evaluated.
10727 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10728 COMPOUND_EXPR in the chain will contain the tree for the simplified
10729 form of the builtin function call. */
10730
10731 static tree
10732 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10733 {
10734 if (!validate_arg (s1, POINTER_TYPE)
10735 || !validate_arg (s2, INTEGER_TYPE))
10736 return NULL_TREE;
10737 else
10738 {
10739 tree fn;
10740 const char *p1;
10741
10742 if (TREE_CODE (s2) != INTEGER_CST)
10743 return NULL_TREE;
10744
10745 p1 = c_getstr (s1);
10746 if (p1 != NULL)
10747 {
10748 char c;
10749 const char *r;
10750 tree tem;
10751
10752 if (target_char_cast (s2, &c))
10753 return NULL_TREE;
10754
10755 r = strrchr (p1, c);
10756
10757 if (r == NULL)
10758 return build_int_cst (TREE_TYPE (s1), 0);
10759
10760 /* Return an offset into the constant string argument. */
10761 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10762 return fold_convert_loc (loc, type, tem);
10763 }
10764
10765 if (! integer_zerop (s2))
10766 return NULL_TREE;
10767
10768 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10769 if (!fn)
10770 return NULL_TREE;
10771
10772 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10773 return build_call_expr_loc (loc, fn, 2, s1, s2);
10774 }
10775 }
10776
10777 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10778 to the call, and TYPE is its return type.
10779
10780 Return NULL_TREE if no simplification was possible, otherwise return the
10781 simplified form of the call as a tree.
10782
10783 The simplified form may be a constant or other expression which
10784 computes the same value, but in a more efficient manner (including
10785 calls to other builtin functions).
10786
10787 The call may contain arguments which need to be evaluated, but
10788 which are not useful to determine the result of the call. In
10789 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10790 COMPOUND_EXPR will be an argument which must be evaluated.
10791 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10792 COMPOUND_EXPR in the chain will contain the tree for the simplified
10793 form of the builtin function call. */
10794
10795 static tree
10796 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10797 {
10798 if (!validate_arg (s1, POINTER_TYPE)
10799 || !validate_arg (s2, POINTER_TYPE))
10800 return NULL_TREE;
10801 else
10802 {
10803 tree fn;
10804 const char *p1, *p2;
10805
10806 p2 = c_getstr (s2);
10807 if (p2 == NULL)
10808 return NULL_TREE;
10809
10810 p1 = c_getstr (s1);
10811 if (p1 != NULL)
10812 {
10813 const char *r = strpbrk (p1, p2);
10814 tree tem;
10815
10816 if (r == NULL)
10817 return build_int_cst (TREE_TYPE (s1), 0);
10818
10819 /* Return an offset into the constant string argument. */
10820 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10821 return fold_convert_loc (loc, type, tem);
10822 }
10823
10824 if (p2[0] == '\0')
10825 /* strpbrk(x, "") == NULL.
10826 Evaluate and ignore s1 in case it had side-effects. */
10827 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
10828
10829 if (p2[1] != '\0')
10830 return NULL_TREE; /* Really call strpbrk. */
10831
10832 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10833 if (!fn)
10834 return NULL_TREE;
10835
10836 /* New argument list transforming strpbrk(s1, s2) to
10837 strchr(s1, s2[0]). */
10838 return build_call_expr_loc (loc, fn, 2, s1,
10839 build_int_cst (integer_type_node, p2[0]));
10840 }
10841 }
10842
10843 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10844 to the call.
10845
10846 Return NULL_TREE if no simplification was possible, otherwise return the
10847 simplified form of the call as a tree.
10848
10849 The simplified form may be a constant or other expression which
10850 computes the same value, but in a more efficient manner (including
10851 calls to other builtin functions).
10852
10853 The call may contain arguments which need to be evaluated, but
10854 which are not useful to determine the result of the call. In
10855 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10856 COMPOUND_EXPR will be an argument which must be evaluated.
10857 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10858 COMPOUND_EXPR in the chain will contain the tree for the simplified
10859 form of the builtin function call. */
10860
10861 static tree
10862 fold_builtin_strspn (location_t loc, tree s1, tree s2)
10863 {
10864 if (!validate_arg (s1, POINTER_TYPE)
10865 || !validate_arg (s2, POINTER_TYPE))
10866 return NULL_TREE;
10867 else
10868 {
10869 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10870
10871 /* If both arguments are constants, evaluate at compile-time. */
10872 if (p1 && p2)
10873 {
10874 const size_t r = strspn (p1, p2);
10875 return build_int_cst (size_type_node, r);
10876 }
10877
10878 /* If either argument is "", return NULL_TREE. */
10879 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10880 /* Evaluate and ignore both arguments in case either one has
10881 side-effects. */
10882 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10883 s1, s2);
10884 return NULL_TREE;
10885 }
10886 }
10887
10888 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10889 to the call.
10890
10891 Return NULL_TREE if no simplification was possible, otherwise return the
10892 simplified form of the call as a tree.
10893
10894 The simplified form may be a constant or other expression which
10895 computes the same value, but in a more efficient manner (including
10896 calls to other builtin functions).
10897
10898 The call may contain arguments which need to be evaluated, but
10899 which are not useful to determine the result of the call. In
10900 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10901 COMPOUND_EXPR will be an argument which must be evaluated.
10902 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10903 COMPOUND_EXPR in the chain will contain the tree for the simplified
10904 form of the builtin function call. */
10905
10906 static tree
10907 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
10908 {
10909 if (!validate_arg (s1, POINTER_TYPE)
10910 || !validate_arg (s2, POINTER_TYPE))
10911 return NULL_TREE;
10912 else
10913 {
10914 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10915
10916 /* If both arguments are constants, evaluate at compile-time. */
10917 if (p1 && p2)
10918 {
10919 const size_t r = strcspn (p1, p2);
10920 return build_int_cst (size_type_node, r);
10921 }
10922
10923 /* If the first argument is "", return NULL_TREE. */
10924 if (p1 && *p1 == '\0')
10925 {
10926 /* Evaluate and ignore argument s2 in case it has
10927 side-effects. */
10928 return omit_one_operand_loc (loc, size_type_node,
10929 size_zero_node, s2);
10930 }
10931
10932 /* If the second argument is "", return __builtin_strlen(s1). */
10933 if (p2 && *p2 == '\0')
10934 {
10935 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10936
10937 /* If the replacement _DECL isn't initialized, don't do the
10938 transformation. */
10939 if (!fn)
10940 return NULL_TREE;
10941
10942 return build_call_expr_loc (loc, fn, 1, s1);
10943 }
10944 return NULL_TREE;
10945 }
10946 }
10947
10948 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10949 produced. False otherwise. This is done so that we don't output the error
10950 or warning twice or three times. */
10951
10952 bool
10953 fold_builtin_next_arg (tree exp, bool va_start_p)
10954 {
10955 tree fntype = TREE_TYPE (current_function_decl);
10956 int nargs = call_expr_nargs (exp);
10957 tree arg;
10958 /* There is good chance the current input_location points inside the
10959 definition of the va_start macro (perhaps on the token for
10960 builtin) in a system header, so warnings will not be emitted.
10961 Use the location in real source code. */
10962 source_location current_location =
10963 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10964 NULL);
10965
10966 if (!stdarg_p (fntype))
10967 {
10968 error ("%<va_start%> used in function with fixed args");
10969 return true;
10970 }
10971
10972 if (va_start_p)
10973 {
10974 if (va_start_p && (nargs != 2))
10975 {
10976 error ("wrong number of arguments to function %<va_start%>");
10977 return true;
10978 }
10979 arg = CALL_EXPR_ARG (exp, 1);
10980 }
10981 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10982 when we checked the arguments and if needed issued a warning. */
10983 else
10984 {
10985 if (nargs == 0)
10986 {
10987 /* Evidently an out of date version of <stdarg.h>; can't validate
10988 va_start's second argument, but can still work as intended. */
10989 warning_at (current_location,
10990 OPT_Wvarargs,
10991 "%<__builtin_next_arg%> called without an argument");
10992 return true;
10993 }
10994 else if (nargs > 1)
10995 {
10996 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10997 return true;
10998 }
10999 arg = CALL_EXPR_ARG (exp, 0);
11000 }
11001
11002 if (TREE_CODE (arg) == SSA_NAME)
11003 arg = SSA_NAME_VAR (arg);
11004
11005 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11006 or __builtin_next_arg (0) the first time we see it, after checking
11007 the arguments and if needed issuing a warning. */
11008 if (!integer_zerop (arg))
11009 {
11010 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11011
11012 /* Strip off all nops for the sake of the comparison. This
11013 is not quite the same as STRIP_NOPS. It does more.
11014 We must also strip off INDIRECT_EXPR for C++ reference
11015 parameters. */
11016 while (CONVERT_EXPR_P (arg)
11017 || TREE_CODE (arg) == INDIRECT_REF)
11018 arg = TREE_OPERAND (arg, 0);
11019 if (arg != last_parm)
11020 {
11021 /* FIXME: Sometimes with the tree optimizers we can get the
11022 not the last argument even though the user used the last
11023 argument. We just warn and set the arg to be the last
11024 argument so that we will get wrong-code because of
11025 it. */
11026 warning_at (current_location,
11027 OPT_Wvarargs,
11028 "second parameter of %<va_start%> not last named argument");
11029 }
11030
11031 /* Undefined by C99 7.15.1.4p4 (va_start):
11032 "If the parameter parmN is declared with the register storage
11033 class, with a function or array type, or with a type that is
11034 not compatible with the type that results after application of
11035 the default argument promotions, the behavior is undefined."
11036 */
11037 else if (DECL_REGISTER (arg))
11038 {
11039 warning_at (current_location,
11040 OPT_Wvarargs,
11041 "undefined behaviour when second parameter of "
11042 "%<va_start%> is declared with %<register%> storage");
11043 }
11044
11045 /* We want to verify the second parameter just once before the tree
11046 optimizers are run and then avoid keeping it in the tree,
11047 as otherwise we could warn even for correct code like:
11048 void foo (int i, ...)
11049 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11050 if (va_start_p)
11051 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11052 else
11053 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11054 }
11055 return false;
11056 }
11057
11058
11059 /* Expand a call EXP to __builtin_object_size. */
11060
11061 static rtx
11062 expand_builtin_object_size (tree exp)
11063 {
11064 tree ost;
11065 int object_size_type;
11066 tree fndecl = get_callee_fndecl (exp);
11067
11068 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11069 {
11070 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11071 exp, fndecl);
11072 expand_builtin_trap ();
11073 return const0_rtx;
11074 }
11075
11076 ost = CALL_EXPR_ARG (exp, 1);
11077 STRIP_NOPS (ost);
11078
11079 if (TREE_CODE (ost) != INTEGER_CST
11080 || tree_int_cst_sgn (ost) < 0
11081 || compare_tree_int (ost, 3) > 0)
11082 {
11083 error ("%Klast argument of %D is not integer constant between 0 and 3",
11084 exp, fndecl);
11085 expand_builtin_trap ();
11086 return const0_rtx;
11087 }
11088
11089 object_size_type = tree_to_shwi (ost);
11090
11091 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11092 }
11093
11094 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11095 FCODE is the BUILT_IN_* to use.
11096 Return NULL_RTX if we failed; the caller should emit a normal call,
11097 otherwise try to get the result in TARGET, if convenient (and in
11098 mode MODE if that's convenient). */
11099
11100 static rtx
11101 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11102 enum built_in_function fcode)
11103 {
11104 tree dest, src, len, size;
11105
11106 if (!validate_arglist (exp,
11107 POINTER_TYPE,
11108 fcode == BUILT_IN_MEMSET_CHK
11109 ? INTEGER_TYPE : POINTER_TYPE,
11110 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11111 return NULL_RTX;
11112
11113 dest = CALL_EXPR_ARG (exp, 0);
11114 src = CALL_EXPR_ARG (exp, 1);
11115 len = CALL_EXPR_ARG (exp, 2);
11116 size = CALL_EXPR_ARG (exp, 3);
11117
11118 if (! tree_fits_uhwi_p (size))
11119 return NULL_RTX;
11120
11121 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11122 {
11123 tree fn;
11124
11125 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11126 {
11127 warning_at (tree_nonartificial_location (exp),
11128 0, "%Kcall to %D will always overflow destination buffer",
11129 exp, get_callee_fndecl (exp));
11130 return NULL_RTX;
11131 }
11132
11133 fn = NULL_TREE;
11134 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11135 mem{cpy,pcpy,move,set} is available. */
11136 switch (fcode)
11137 {
11138 case BUILT_IN_MEMCPY_CHK:
11139 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11140 break;
11141 case BUILT_IN_MEMPCPY_CHK:
11142 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11143 break;
11144 case BUILT_IN_MEMMOVE_CHK:
11145 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11146 break;
11147 case BUILT_IN_MEMSET_CHK:
11148 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11149 break;
11150 default:
11151 break;
11152 }
11153
11154 if (! fn)
11155 return NULL_RTX;
11156
11157 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11158 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11159 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11160 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11161 }
11162 else if (fcode == BUILT_IN_MEMSET_CHK)
11163 return NULL_RTX;
11164 else
11165 {
11166 unsigned int dest_align = get_pointer_alignment (dest);
11167
11168 /* If DEST is not a pointer type, call the normal function. */
11169 if (dest_align == 0)
11170 return NULL_RTX;
11171
11172 /* If SRC and DEST are the same (and not volatile), do nothing. */
11173 if (operand_equal_p (src, dest, 0))
11174 {
11175 tree expr;
11176
11177 if (fcode != BUILT_IN_MEMPCPY_CHK)
11178 {
11179 /* Evaluate and ignore LEN in case it has side-effects. */
11180 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11181 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11182 }
11183
11184 expr = fold_build_pointer_plus (dest, len);
11185 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11186 }
11187
11188 /* __memmove_chk special case. */
11189 if (fcode == BUILT_IN_MEMMOVE_CHK)
11190 {
11191 unsigned int src_align = get_pointer_alignment (src);
11192
11193 if (src_align == 0)
11194 return NULL_RTX;
11195
11196 /* If src is categorized for a readonly section we can use
11197 normal __memcpy_chk. */
11198 if (readonly_data_expr (src))
11199 {
11200 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11201 if (!fn)
11202 return NULL_RTX;
11203 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11204 dest, src, len, size);
11205 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11206 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11207 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11208 }
11209 }
11210 return NULL_RTX;
11211 }
11212 }
11213
11214 /* Emit warning if a buffer overflow is detected at compile time. */
11215
11216 static void
11217 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11218 {
11219 int is_strlen = 0;
11220 tree len, size;
11221 location_t loc = tree_nonartificial_location (exp);
11222
11223 switch (fcode)
11224 {
11225 case BUILT_IN_STRCPY_CHK:
11226 case BUILT_IN_STPCPY_CHK:
11227 /* For __strcat_chk the warning will be emitted only if overflowing
11228 by at least strlen (dest) + 1 bytes. */
11229 case BUILT_IN_STRCAT_CHK:
11230 len = CALL_EXPR_ARG (exp, 1);
11231 size = CALL_EXPR_ARG (exp, 2);
11232 is_strlen = 1;
11233 break;
11234 case BUILT_IN_STRNCAT_CHK:
11235 case BUILT_IN_STRNCPY_CHK:
11236 case BUILT_IN_STPNCPY_CHK:
11237 len = CALL_EXPR_ARG (exp, 2);
11238 size = CALL_EXPR_ARG (exp, 3);
11239 break;
11240 case BUILT_IN_SNPRINTF_CHK:
11241 case BUILT_IN_VSNPRINTF_CHK:
11242 len = CALL_EXPR_ARG (exp, 1);
11243 size = CALL_EXPR_ARG (exp, 3);
11244 break;
11245 default:
11246 gcc_unreachable ();
11247 }
11248
11249 if (!len || !size)
11250 return;
11251
11252 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11253 return;
11254
11255 if (is_strlen)
11256 {
11257 len = c_strlen (len, 1);
11258 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11259 return;
11260 }
11261 else if (fcode == BUILT_IN_STRNCAT_CHK)
11262 {
11263 tree src = CALL_EXPR_ARG (exp, 1);
11264 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11265 return;
11266 src = c_strlen (src, 1);
11267 if (! src || ! tree_fits_uhwi_p (src))
11268 {
11269 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11270 exp, get_callee_fndecl (exp));
11271 return;
11272 }
11273 else if (tree_int_cst_lt (src, size))
11274 return;
11275 }
11276 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11277 return;
11278
11279 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11280 exp, get_callee_fndecl (exp));
11281 }
11282
11283 /* Emit warning if a buffer overflow is detected at compile time
11284 in __sprintf_chk/__vsprintf_chk calls. */
11285
11286 static void
11287 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11288 {
11289 tree size, len, fmt;
11290 const char *fmt_str;
11291 int nargs = call_expr_nargs (exp);
11292
11293 /* Verify the required arguments in the original call. */
11294
11295 if (nargs < 4)
11296 return;
11297 size = CALL_EXPR_ARG (exp, 2);
11298 fmt = CALL_EXPR_ARG (exp, 3);
11299
11300 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11301 return;
11302
11303 /* Check whether the format is a literal string constant. */
11304 fmt_str = c_getstr (fmt);
11305 if (fmt_str == NULL)
11306 return;
11307
11308 if (!init_target_chars ())
11309 return;
11310
11311 /* If the format doesn't contain % args or %%, we know its size. */
11312 if (strchr (fmt_str, target_percent) == 0)
11313 len = build_int_cstu (size_type_node, strlen (fmt_str));
11314 /* If the format is "%s" and first ... argument is a string literal,
11315 we know it too. */
11316 else if (fcode == BUILT_IN_SPRINTF_CHK
11317 && strcmp (fmt_str, target_percent_s) == 0)
11318 {
11319 tree arg;
11320
11321 if (nargs < 5)
11322 return;
11323 arg = CALL_EXPR_ARG (exp, 4);
11324 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11325 return;
11326
11327 len = c_strlen (arg, 1);
11328 if (!len || ! tree_fits_uhwi_p (len))
11329 return;
11330 }
11331 else
11332 return;
11333
11334 if (! tree_int_cst_lt (len, size))
11335 warning_at (tree_nonartificial_location (exp),
11336 0, "%Kcall to %D will always overflow destination buffer",
11337 exp, get_callee_fndecl (exp));
11338 }
11339
11340 /* Emit warning if a free is called with address of a variable. */
11341
11342 static void
11343 maybe_emit_free_warning (tree exp)
11344 {
11345 tree arg = CALL_EXPR_ARG (exp, 0);
11346
11347 STRIP_NOPS (arg);
11348 if (TREE_CODE (arg) != ADDR_EXPR)
11349 return;
11350
11351 arg = get_base_address (TREE_OPERAND (arg, 0));
11352 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11353 return;
11354
11355 if (SSA_VAR_P (arg))
11356 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11357 "%Kattempt to free a non-heap object %qD", exp, arg);
11358 else
11359 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11360 "%Kattempt to free a non-heap object", exp);
11361 }
11362
11363 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11364 if possible. */
11365
11366 static tree
11367 fold_builtin_object_size (tree ptr, tree ost)
11368 {
11369 unsigned HOST_WIDE_INT bytes;
11370 int object_size_type;
11371
11372 if (!validate_arg (ptr, POINTER_TYPE)
11373 || !validate_arg (ost, INTEGER_TYPE))
11374 return NULL_TREE;
11375
11376 STRIP_NOPS (ost);
11377
11378 if (TREE_CODE (ost) != INTEGER_CST
11379 || tree_int_cst_sgn (ost) < 0
11380 || compare_tree_int (ost, 3) > 0)
11381 return NULL_TREE;
11382
11383 object_size_type = tree_to_shwi (ost);
11384
11385 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11386 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11387 and (size_t) 0 for types 2 and 3. */
11388 if (TREE_SIDE_EFFECTS (ptr))
11389 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11390
11391 if (TREE_CODE (ptr) == ADDR_EXPR)
11392 {
11393 bytes = compute_builtin_object_size (ptr, object_size_type);
11394 if (wi::fits_to_tree_p (bytes, size_type_node))
11395 return build_int_cstu (size_type_node, bytes);
11396 }
11397 else if (TREE_CODE (ptr) == SSA_NAME)
11398 {
11399 /* If object size is not known yet, delay folding until
11400 later. Maybe subsequent passes will help determining
11401 it. */
11402 bytes = compute_builtin_object_size (ptr, object_size_type);
11403 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11404 && wi::fits_to_tree_p (bytes, size_type_node))
11405 return build_int_cstu (size_type_node, bytes);
11406 }
11407
11408 return NULL_TREE;
11409 }
11410
11411 /* Builtins with folding operations that operate on "..." arguments
11412 need special handling; we need to store the arguments in a convenient
11413 data structure before attempting any folding. Fortunately there are
11414 only a few builtins that fall into this category. FNDECL is the
11415 function, EXP is the CALL_EXPR for the call. */
11416
11417 static tree
11418 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11419 {
11420 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11421 tree ret = NULL_TREE;
11422
11423 switch (fcode)
11424 {
11425 case BUILT_IN_FPCLASSIFY:
11426 ret = fold_builtin_fpclassify (loc, args, nargs);
11427 break;
11428
11429 default:
11430 break;
11431 }
11432 if (ret)
11433 {
11434 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11435 SET_EXPR_LOCATION (ret, loc);
11436 TREE_NO_WARNING (ret) = 1;
11437 return ret;
11438 }
11439 return NULL_TREE;
11440 }
11441
11442 /* Initialize format string characters in the target charset. */
11443
11444 bool
11445 init_target_chars (void)
11446 {
11447 static bool init;
11448 if (!init)
11449 {
11450 target_newline = lang_hooks.to_target_charset ('\n');
11451 target_percent = lang_hooks.to_target_charset ('%');
11452 target_c = lang_hooks.to_target_charset ('c');
11453 target_s = lang_hooks.to_target_charset ('s');
11454 if (target_newline == 0 || target_percent == 0 || target_c == 0
11455 || target_s == 0)
11456 return false;
11457
11458 target_percent_c[0] = target_percent;
11459 target_percent_c[1] = target_c;
11460 target_percent_c[2] = '\0';
11461
11462 target_percent_s[0] = target_percent;
11463 target_percent_s[1] = target_s;
11464 target_percent_s[2] = '\0';
11465
11466 target_percent_s_newline[0] = target_percent;
11467 target_percent_s_newline[1] = target_s;
11468 target_percent_s_newline[2] = target_newline;
11469 target_percent_s_newline[3] = '\0';
11470
11471 init = true;
11472 }
11473 return true;
11474 }
11475
11476 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11477 and no overflow/underflow occurred. INEXACT is true if M was not
11478 exactly calculated. TYPE is the tree type for the result. This
11479 function assumes that you cleared the MPFR flags and then
11480 calculated M to see if anything subsequently set a flag prior to
11481 entering this function. Return NULL_TREE if any checks fail. */
11482
11483 static tree
11484 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11485 {
11486 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11487 overflow/underflow occurred. If -frounding-math, proceed iff the
11488 result of calling FUNC was exact. */
11489 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11490 && (!flag_rounding_math || !inexact))
11491 {
11492 REAL_VALUE_TYPE rr;
11493
11494 real_from_mpfr (&rr, m, type, GMP_RNDN);
11495 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11496 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11497 but the mpft_t is not, then we underflowed in the
11498 conversion. */
11499 if (real_isfinite (&rr)
11500 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11501 {
11502 REAL_VALUE_TYPE rmode;
11503
11504 real_convert (&rmode, TYPE_MODE (type), &rr);
11505 /* Proceed iff the specified mode can hold the value. */
11506 if (real_identical (&rmode, &rr))
11507 return build_real (type, rmode);
11508 }
11509 }
11510 return NULL_TREE;
11511 }
11512
11513 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11514 number and no overflow/underflow occurred. INEXACT is true if M
11515 was not exactly calculated. TYPE is the tree type for the result.
11516 This function assumes that you cleared the MPFR flags and then
11517 calculated M to see if anything subsequently set a flag prior to
11518 entering this function. Return NULL_TREE if any checks fail, if
11519 FORCE_CONVERT is true, then bypass the checks. */
11520
11521 static tree
11522 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11523 {
11524 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11525 overflow/underflow occurred. If -frounding-math, proceed iff the
11526 result of calling FUNC was exact. */
11527 if (force_convert
11528 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11529 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11530 && (!flag_rounding_math || !inexact)))
11531 {
11532 REAL_VALUE_TYPE re, im;
11533
11534 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11535 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11536 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11537 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11538 but the mpft_t is not, then we underflowed in the
11539 conversion. */
11540 if (force_convert
11541 || (real_isfinite (&re) && real_isfinite (&im)
11542 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11543 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11544 {
11545 REAL_VALUE_TYPE re_mode, im_mode;
11546
11547 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11548 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11549 /* Proceed iff the specified mode can hold the value. */
11550 if (force_convert
11551 || (real_identical (&re_mode, &re)
11552 && real_identical (&im_mode, &im)))
11553 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11554 build_real (TREE_TYPE (type), im_mode));
11555 }
11556 }
11557 return NULL_TREE;
11558 }
11559
11560 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11561 FUNC on it and return the resulting value as a tree with type TYPE.
11562 If MIN and/or MAX are not NULL, then the supplied ARG must be
11563 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11564 acceptable values, otherwise they are not. The mpfr precision is
11565 set to the precision of TYPE. We assume that function FUNC returns
11566 zero if the result could be calculated exactly within the requested
11567 precision. */
11568
11569 static tree
11570 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11571 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11572 bool inclusive)
11573 {
11574 tree result = NULL_TREE;
11575
11576 STRIP_NOPS (arg);
11577
11578 /* To proceed, MPFR must exactly represent the target floating point
11579 format, which only happens when the target base equals two. */
11580 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11581 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
11582 {
11583 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11584
11585 if (real_isfinite (ra)
11586 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11587 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
11588 {
11589 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11590 const int prec = fmt->p;
11591 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11592 int inexact;
11593 mpfr_t m;
11594
11595 mpfr_init2 (m, prec);
11596 mpfr_from_real (m, ra, GMP_RNDN);
11597 mpfr_clear_flags ();
11598 inexact = func (m, m, rnd);
11599 result = do_mpfr_ckconv (m, type, inexact);
11600 mpfr_clear (m);
11601 }
11602 }
11603
11604 return result;
11605 }
11606
11607 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11608 FUNC on it and return the resulting value as a tree with type TYPE.
11609 The mpfr precision is set to the precision of TYPE. We assume that
11610 function FUNC returns zero if the result could be calculated
11611 exactly within the requested precision. */
11612
11613 static tree
11614 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11615 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11616 {
11617 tree result = NULL_TREE;
11618
11619 STRIP_NOPS (arg1);
11620 STRIP_NOPS (arg2);
11621
11622 /* To proceed, MPFR must exactly represent the target floating point
11623 format, which only happens when the target base equals two. */
11624 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11625 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11626 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11627 {
11628 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11629 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11630
11631 if (real_isfinite (ra1) && real_isfinite (ra2))
11632 {
11633 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11634 const int prec = fmt->p;
11635 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11636 int inexact;
11637 mpfr_t m1, m2;
11638
11639 mpfr_inits2 (prec, m1, m2, NULL);
11640 mpfr_from_real (m1, ra1, GMP_RNDN);
11641 mpfr_from_real (m2, ra2, GMP_RNDN);
11642 mpfr_clear_flags ();
11643 inexact = func (m1, m1, m2, rnd);
11644 result = do_mpfr_ckconv (m1, type, inexact);
11645 mpfr_clears (m1, m2, NULL);
11646 }
11647 }
11648
11649 return result;
11650 }
11651
11652 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11653 FUNC on it and return the resulting value as a tree with type TYPE.
11654 The mpfr precision is set to the precision of TYPE. We assume that
11655 function FUNC returns zero if the result could be calculated
11656 exactly within the requested precision. */
11657
11658 static tree
11659 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11660 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11661 {
11662 tree result = NULL_TREE;
11663
11664 STRIP_NOPS (arg1);
11665 STRIP_NOPS (arg2);
11666 STRIP_NOPS (arg3);
11667
11668 /* To proceed, MPFR must exactly represent the target floating point
11669 format, which only happens when the target base equals two. */
11670 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11671 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11672 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11673 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
11674 {
11675 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11676 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11677 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11678
11679 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
11680 {
11681 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11682 const int prec = fmt->p;
11683 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11684 int inexact;
11685 mpfr_t m1, m2, m3;
11686
11687 mpfr_inits2 (prec, m1, m2, m3, NULL);
11688 mpfr_from_real (m1, ra1, GMP_RNDN);
11689 mpfr_from_real (m2, ra2, GMP_RNDN);
11690 mpfr_from_real (m3, ra3, GMP_RNDN);
11691 mpfr_clear_flags ();
11692 inexact = func (m1, m1, m2, m3, rnd);
11693 result = do_mpfr_ckconv (m1, type, inexact);
11694 mpfr_clears (m1, m2, m3, NULL);
11695 }
11696 }
11697
11698 return result;
11699 }
11700
11701 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11702 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11703 If ARG_SINP and ARG_COSP are NULL then the result is returned
11704 as a complex value.
11705 The type is taken from the type of ARG and is used for setting the
11706 precision of the calculation and results. */
11707
11708 static tree
11709 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11710 {
11711 tree const type = TREE_TYPE (arg);
11712 tree result = NULL_TREE;
11713
11714 STRIP_NOPS (arg);
11715
11716 /* To proceed, MPFR must exactly represent the target floating point
11717 format, which only happens when the target base equals two. */
11718 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11719 && TREE_CODE (arg) == REAL_CST
11720 && !TREE_OVERFLOW (arg))
11721 {
11722 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11723
11724 if (real_isfinite (ra))
11725 {
11726 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11727 const int prec = fmt->p;
11728 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11729 tree result_s, result_c;
11730 int inexact;
11731 mpfr_t m, ms, mc;
11732
11733 mpfr_inits2 (prec, m, ms, mc, NULL);
11734 mpfr_from_real (m, ra, GMP_RNDN);
11735 mpfr_clear_flags ();
11736 inexact = mpfr_sin_cos (ms, mc, m, rnd);
11737 result_s = do_mpfr_ckconv (ms, type, inexact);
11738 result_c = do_mpfr_ckconv (mc, type, inexact);
11739 mpfr_clears (m, ms, mc, NULL);
11740 if (result_s && result_c)
11741 {
11742 /* If we are to return in a complex value do so. */
11743 if (!arg_sinp && !arg_cosp)
11744 return build_complex (build_complex_type (type),
11745 result_c, result_s);
11746
11747 /* Dereference the sin/cos pointer arguments. */
11748 arg_sinp = build_fold_indirect_ref (arg_sinp);
11749 arg_cosp = build_fold_indirect_ref (arg_cosp);
11750 /* Proceed if valid pointer type were passed in. */
11751 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
11752 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
11753 {
11754 /* Set the values. */
11755 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
11756 result_s);
11757 TREE_SIDE_EFFECTS (result_s) = 1;
11758 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
11759 result_c);
11760 TREE_SIDE_EFFECTS (result_c) = 1;
11761 /* Combine the assignments into a compound expr. */
11762 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11763 result_s, result_c));
11764 }
11765 }
11766 }
11767 }
11768 return result;
11769 }
11770
11771 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
11772 two-argument mpfr order N Bessel function FUNC on them and return
11773 the resulting value as a tree with type TYPE. The mpfr precision
11774 is set to the precision of TYPE. We assume that function FUNC
11775 returns zero if the result could be calculated exactly within the
11776 requested precision. */
11777 static tree
11778 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
11779 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
11780 const REAL_VALUE_TYPE *min, bool inclusive)
11781 {
11782 tree result = NULL_TREE;
11783
11784 STRIP_NOPS (arg1);
11785 STRIP_NOPS (arg2);
11786
11787 /* To proceed, MPFR must exactly represent the target floating point
11788 format, which only happens when the target base equals two. */
11789 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11790 && tree_fits_shwi_p (arg1)
11791 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11792 {
11793 const HOST_WIDE_INT n = tree_to_shwi (arg1);
11794 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
11795
11796 if (n == (long)n
11797 && real_isfinite (ra)
11798 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
11799 {
11800 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11801 const int prec = fmt->p;
11802 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11803 int inexact;
11804 mpfr_t m;
11805
11806 mpfr_init2 (m, prec);
11807 mpfr_from_real (m, ra, GMP_RNDN);
11808 mpfr_clear_flags ();
11809 inexact = func (m, n, m, rnd);
11810 result = do_mpfr_ckconv (m, type, inexact);
11811 mpfr_clear (m);
11812 }
11813 }
11814
11815 return result;
11816 }
11817
11818 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11819 the pointer *(ARG_QUO) and return the result. The type is taken
11820 from the type of ARG0 and is used for setting the precision of the
11821 calculation and results. */
11822
11823 static tree
11824 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11825 {
11826 tree const type = TREE_TYPE (arg0);
11827 tree result = NULL_TREE;
11828
11829 STRIP_NOPS (arg0);
11830 STRIP_NOPS (arg1);
11831
11832 /* To proceed, MPFR must exactly represent the target floating point
11833 format, which only happens when the target base equals two. */
11834 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11835 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
11836 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
11837 {
11838 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
11839 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
11840
11841 if (real_isfinite (ra0) && real_isfinite (ra1))
11842 {
11843 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11844 const int prec = fmt->p;
11845 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11846 tree result_rem;
11847 long integer_quo;
11848 mpfr_t m0, m1;
11849
11850 mpfr_inits2 (prec, m0, m1, NULL);
11851 mpfr_from_real (m0, ra0, GMP_RNDN);
11852 mpfr_from_real (m1, ra1, GMP_RNDN);
11853 mpfr_clear_flags ();
11854 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
11855 /* Remquo is independent of the rounding mode, so pass
11856 inexact=0 to do_mpfr_ckconv(). */
11857 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
11858 mpfr_clears (m0, m1, NULL);
11859 if (result_rem)
11860 {
11861 /* MPFR calculates quo in the host's long so it may
11862 return more bits in quo than the target int can hold
11863 if sizeof(host long) > sizeof(target int). This can
11864 happen even for native compilers in LP64 mode. In
11865 these cases, modulo the quo value with the largest
11866 number that the target int can hold while leaving one
11867 bit for the sign. */
11868 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
11869 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
11870
11871 /* Dereference the quo pointer argument. */
11872 arg_quo = build_fold_indirect_ref (arg_quo);
11873 /* Proceed iff a valid pointer type was passed in. */
11874 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
11875 {
11876 /* Set the value. */
11877 tree result_quo
11878 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
11879 build_int_cst (TREE_TYPE (arg_quo),
11880 integer_quo));
11881 TREE_SIDE_EFFECTS (result_quo) = 1;
11882 /* Combine the quo assignment with the rem. */
11883 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11884 result_quo, result_rem));
11885 }
11886 }
11887 }
11888 }
11889 return result;
11890 }
11891
11892 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11893 resulting value as a tree with type TYPE. The mpfr precision is
11894 set to the precision of TYPE. We assume that this mpfr function
11895 returns zero if the result could be calculated exactly within the
11896 requested precision. In addition, the integer pointer represented
11897 by ARG_SG will be dereferenced and set to the appropriate signgam
11898 (-1,1) value. */
11899
11900 static tree
11901 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11902 {
11903 tree result = NULL_TREE;
11904
11905 STRIP_NOPS (arg);
11906
11907 /* To proceed, MPFR must exactly represent the target floating point
11908 format, which only happens when the target base equals two. Also
11909 verify ARG is a constant and that ARG_SG is an int pointer. */
11910 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11911 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11912 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11913 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11914 {
11915 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11916
11917 /* In addition to NaN and Inf, the argument cannot be zero or a
11918 negative integer. */
11919 if (real_isfinite (ra)
11920 && ra->cl != rvc_zero
11921 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
11922 {
11923 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11924 const int prec = fmt->p;
11925 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11926 int inexact, sg;
11927 mpfr_t m;
11928 tree result_lg;
11929
11930 mpfr_init2 (m, prec);
11931 mpfr_from_real (m, ra, GMP_RNDN);
11932 mpfr_clear_flags ();
11933 inexact = mpfr_lgamma (m, &sg, m, rnd);
11934 result_lg = do_mpfr_ckconv (m, type, inexact);
11935 mpfr_clear (m);
11936 if (result_lg)
11937 {
11938 tree result_sg;
11939
11940 /* Dereference the arg_sg pointer argument. */
11941 arg_sg = build_fold_indirect_ref (arg_sg);
11942 /* Assign the signgam value into *arg_sg. */
11943 result_sg = fold_build2 (MODIFY_EXPR,
11944 TREE_TYPE (arg_sg), arg_sg,
11945 build_int_cst (TREE_TYPE (arg_sg), sg));
11946 TREE_SIDE_EFFECTS (result_sg) = 1;
11947 /* Combine the signgam assignment with the lgamma result. */
11948 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11949 result_sg, result_lg));
11950 }
11951 }
11952 }
11953
11954 return result;
11955 }
11956
11957 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
11958 function FUNC on it and return the resulting value as a tree with
11959 type TYPE. The mpfr precision is set to the precision of TYPE. We
11960 assume that function FUNC returns zero if the result could be
11961 calculated exactly within the requested precision. */
11962
11963 static tree
11964 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
11965 {
11966 tree result = NULL_TREE;
11967
11968 STRIP_NOPS (arg);
11969
11970 /* To proceed, MPFR must exactly represent the target floating point
11971 format, which only happens when the target base equals two. */
11972 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
11973 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
11974 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
11975 {
11976 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
11977 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
11978
11979 if (real_isfinite (re) && real_isfinite (im))
11980 {
11981 const struct real_format *const fmt =
11982 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11983 const int prec = fmt->p;
11984 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11985 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11986 int inexact;
11987 mpc_t m;
11988
11989 mpc_init2 (m, prec);
11990 mpfr_from_real (mpc_realref (m), re, rnd);
11991 mpfr_from_real (mpc_imagref (m), im, rnd);
11992 mpfr_clear_flags ();
11993 inexact = func (m, m, crnd);
11994 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
11995 mpc_clear (m);
11996 }
11997 }
11998
11999 return result;
12000 }
12001
12002 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12003 mpc function FUNC on it and return the resulting value as a tree
12004 with type TYPE. The mpfr precision is set to the precision of
12005 TYPE. We assume that function FUNC returns zero if the result
12006 could be calculated exactly within the requested precision. If
12007 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12008 in the arguments and/or results. */
12009
12010 tree
12011 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12012 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12013 {
12014 tree result = NULL_TREE;
12015
12016 STRIP_NOPS (arg0);
12017 STRIP_NOPS (arg1);
12018
12019 /* To proceed, MPFR must exactly represent the target floating point
12020 format, which only happens when the target base equals two. */
12021 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12022 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12023 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12024 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12025 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12026 {
12027 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12028 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12029 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12030 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12031
12032 if (do_nonfinite
12033 || (real_isfinite (re0) && real_isfinite (im0)
12034 && real_isfinite (re1) && real_isfinite (im1)))
12035 {
12036 const struct real_format *const fmt =
12037 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12038 const int prec = fmt->p;
12039 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12040 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12041 int inexact;
12042 mpc_t m0, m1;
12043
12044 mpc_init2 (m0, prec);
12045 mpc_init2 (m1, prec);
12046 mpfr_from_real (mpc_realref (m0), re0, rnd);
12047 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12048 mpfr_from_real (mpc_realref (m1), re1, rnd);
12049 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12050 mpfr_clear_flags ();
12051 inexact = func (m0, m0, m1, crnd);
12052 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12053 mpc_clear (m0);
12054 mpc_clear (m1);
12055 }
12056 }
12057
12058 return result;
12059 }
12060
12061 /* A wrapper function for builtin folding that prevents warnings for
12062 "statement without effect" and the like, caused by removing the
12063 call node earlier than the warning is generated. */
12064
12065 tree
12066 fold_call_stmt (gcall *stmt, bool ignore)
12067 {
12068 tree ret = NULL_TREE;
12069 tree fndecl = gimple_call_fndecl (stmt);
12070 location_t loc = gimple_location (stmt);
12071 if (fndecl
12072 && TREE_CODE (fndecl) == FUNCTION_DECL
12073 && DECL_BUILT_IN (fndecl)
12074 && !gimple_call_va_arg_pack_p (stmt))
12075 {
12076 int nargs = gimple_call_num_args (stmt);
12077 tree *args = (nargs > 0
12078 ? gimple_call_arg_ptr (stmt, 0)
12079 : &error_mark_node);
12080
12081 if (avoid_folding_inline_builtin (fndecl))
12082 return NULL_TREE;
12083 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12084 {
12085 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12086 }
12087 else
12088 {
12089 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12090 if (ret)
12091 {
12092 /* Propagate location information from original call to
12093 expansion of builtin. Otherwise things like
12094 maybe_emit_chk_warning, that operate on the expansion
12095 of a builtin, will use the wrong location information. */
12096 if (gimple_has_location (stmt))
12097 {
12098 tree realret = ret;
12099 if (TREE_CODE (ret) == NOP_EXPR)
12100 realret = TREE_OPERAND (ret, 0);
12101 if (CAN_HAVE_LOCATION_P (realret)
12102 && !EXPR_HAS_LOCATION (realret))
12103 SET_EXPR_LOCATION (realret, loc);
12104 return realret;
12105 }
12106 return ret;
12107 }
12108 }
12109 }
12110 return NULL_TREE;
12111 }
12112
12113 /* Look up the function in builtin_decl that corresponds to DECL
12114 and set ASMSPEC as its user assembler name. DECL must be a
12115 function decl that declares a builtin. */
12116
12117 void
12118 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12119 {
12120 tree builtin;
12121 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12122 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12123 && asmspec != 0);
12124
12125 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12126 set_user_assembler_name (builtin, asmspec);
12127 switch (DECL_FUNCTION_CODE (decl))
12128 {
12129 case BUILT_IN_MEMCPY:
12130 init_block_move_fn (asmspec);
12131 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12132 break;
12133 case BUILT_IN_MEMSET:
12134 init_block_clear_fn (asmspec);
12135 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12136 break;
12137 case BUILT_IN_MEMMOVE:
12138 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12139 break;
12140 case BUILT_IN_MEMCMP:
12141 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12142 break;
12143 case BUILT_IN_ABORT:
12144 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12145 break;
12146 case BUILT_IN_FFS:
12147 if (INT_TYPE_SIZE < BITS_PER_WORD)
12148 {
12149 set_user_assembler_libfunc ("ffs", asmspec);
12150 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12151 MODE_INT, 0), "ffs");
12152 }
12153 break;
12154 default:
12155 break;
12156 }
12157 }
12158
12159 /* Return true if DECL is a builtin that expands to a constant or similarly
12160 simple code. */
12161 bool
12162 is_simple_builtin (tree decl)
12163 {
12164 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12165 switch (DECL_FUNCTION_CODE (decl))
12166 {
12167 /* Builtins that expand to constants. */
12168 case BUILT_IN_CONSTANT_P:
12169 case BUILT_IN_EXPECT:
12170 case BUILT_IN_OBJECT_SIZE:
12171 case BUILT_IN_UNREACHABLE:
12172 /* Simple register moves or loads from stack. */
12173 case BUILT_IN_ASSUME_ALIGNED:
12174 case BUILT_IN_RETURN_ADDRESS:
12175 case BUILT_IN_EXTRACT_RETURN_ADDR:
12176 case BUILT_IN_FROB_RETURN_ADDR:
12177 case BUILT_IN_RETURN:
12178 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12179 case BUILT_IN_FRAME_ADDRESS:
12180 case BUILT_IN_VA_END:
12181 case BUILT_IN_STACK_SAVE:
12182 case BUILT_IN_STACK_RESTORE:
12183 /* Exception state returns or moves registers around. */
12184 case BUILT_IN_EH_FILTER:
12185 case BUILT_IN_EH_POINTER:
12186 case BUILT_IN_EH_COPY_VALUES:
12187 return true;
12188
12189 default:
12190 return false;
12191 }
12192
12193 return false;
12194 }
12195
12196 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12197 most probably expanded inline into reasonably simple code. This is a
12198 superset of is_simple_builtin. */
12199 bool
12200 is_inexpensive_builtin (tree decl)
12201 {
12202 if (!decl)
12203 return false;
12204 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12205 return true;
12206 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12207 switch (DECL_FUNCTION_CODE (decl))
12208 {
12209 case BUILT_IN_ABS:
12210 case BUILT_IN_ALLOCA:
12211 case BUILT_IN_ALLOCA_WITH_ALIGN:
12212 case BUILT_IN_BSWAP16:
12213 case BUILT_IN_BSWAP32:
12214 case BUILT_IN_BSWAP64:
12215 case BUILT_IN_CLZ:
12216 case BUILT_IN_CLZIMAX:
12217 case BUILT_IN_CLZL:
12218 case BUILT_IN_CLZLL:
12219 case BUILT_IN_CTZ:
12220 case BUILT_IN_CTZIMAX:
12221 case BUILT_IN_CTZL:
12222 case BUILT_IN_CTZLL:
12223 case BUILT_IN_FFS:
12224 case BUILT_IN_FFSIMAX:
12225 case BUILT_IN_FFSL:
12226 case BUILT_IN_FFSLL:
12227 case BUILT_IN_IMAXABS:
12228 case BUILT_IN_FINITE:
12229 case BUILT_IN_FINITEF:
12230 case BUILT_IN_FINITEL:
12231 case BUILT_IN_FINITED32:
12232 case BUILT_IN_FINITED64:
12233 case BUILT_IN_FINITED128:
12234 case BUILT_IN_FPCLASSIFY:
12235 case BUILT_IN_ISFINITE:
12236 case BUILT_IN_ISINF_SIGN:
12237 case BUILT_IN_ISINF:
12238 case BUILT_IN_ISINFF:
12239 case BUILT_IN_ISINFL:
12240 case BUILT_IN_ISINFD32:
12241 case BUILT_IN_ISINFD64:
12242 case BUILT_IN_ISINFD128:
12243 case BUILT_IN_ISNAN:
12244 case BUILT_IN_ISNANF:
12245 case BUILT_IN_ISNANL:
12246 case BUILT_IN_ISNAND32:
12247 case BUILT_IN_ISNAND64:
12248 case BUILT_IN_ISNAND128:
12249 case BUILT_IN_ISNORMAL:
12250 case BUILT_IN_ISGREATER:
12251 case BUILT_IN_ISGREATEREQUAL:
12252 case BUILT_IN_ISLESS:
12253 case BUILT_IN_ISLESSEQUAL:
12254 case BUILT_IN_ISLESSGREATER:
12255 case BUILT_IN_ISUNORDERED:
12256 case BUILT_IN_VA_ARG_PACK:
12257 case BUILT_IN_VA_ARG_PACK_LEN:
12258 case BUILT_IN_VA_COPY:
12259 case BUILT_IN_TRAP:
12260 case BUILT_IN_SAVEREGS:
12261 case BUILT_IN_POPCOUNTL:
12262 case BUILT_IN_POPCOUNTLL:
12263 case BUILT_IN_POPCOUNTIMAX:
12264 case BUILT_IN_POPCOUNT:
12265 case BUILT_IN_PARITYL:
12266 case BUILT_IN_PARITYLL:
12267 case BUILT_IN_PARITYIMAX:
12268 case BUILT_IN_PARITY:
12269 case BUILT_IN_LABS:
12270 case BUILT_IN_LLABS:
12271 case BUILT_IN_PREFETCH:
12272 case BUILT_IN_ACC_ON_DEVICE:
12273 return true;
12274
12275 default:
12276 return is_simple_builtin (decl);
12277 }
12278
12279 return false;
12280 }