Cache reals for 1/4, 1/6 and 1/9
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "predict.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "rtl.h"
32 #include "alias.h"
33 #include "fold-const.h"
34 #include "stringpool.h"
35 #include "stor-layout.h"
36 #include "calls.h"
37 #include "varasm.h"
38 #include "tree-object-size.h"
39 #include "realmpfr.h"
40 #include "cfgrtl.h"
41 #include "internal-fn.h"
42 #include "flags.h"
43 #include "regs.h"
44 #include "except.h"
45 #include "insn-config.h"
46 #include "expmed.h"
47 #include "dojump.h"
48 #include "explow.h"
49 #include "emit-rtl.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "insn-codes.h"
53 #include "optabs.h"
54 #include "libfuncs.h"
55 #include "recog.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "tm_p.h"
59 #include "target.h"
60 #include "langhooks.h"
61 #include "tree-ssanames.h"
62 #include "tree-dfa.h"
63 #include "value-prof.h"
64 #include "diagnostic-core.h"
65 #include "builtins.h"
66 #include "asan.h"
67 #include "cilk.h"
68 #include "cgraph.h"
69 #include "tree-chkp.h"
70 #include "rtl-chkp.h"
71
72
73 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
74
75 struct target_builtins default_target_builtins;
76 #if SWITCHABLE_TARGET
77 struct target_builtins *this_target_builtins = &default_target_builtins;
78 #endif
79
80 /* Define the names of the builtin function types and codes. */
81 const char *const built_in_class_names[BUILT_IN_LAST]
82 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83
84 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
85 const char * built_in_names[(int) END_BUILTINS] =
86 {
87 #include "builtins.def"
88 };
89 #undef DEF_BUILTIN
90
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
94
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
97
98 static rtx c_readstr (const char *, machine_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static void expand_errno_check (tree, rtx);
112 static rtx expand_builtin_mathfn (tree, rtx, rtx);
113 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
114 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
115 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
116 static rtx expand_builtin_interclass_mathfn (tree, rtx);
117 static rtx expand_builtin_sincos (tree);
118 static rtx expand_builtin_cexpi (tree, rtx);
119 static rtx expand_builtin_int_roundingfn (tree, rtx);
120 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
121 static rtx expand_builtin_next_arg (void);
122 static rtx expand_builtin_va_start (tree);
123 static rtx expand_builtin_va_end (tree);
124 static rtx expand_builtin_va_copy (tree);
125 static rtx expand_builtin_strcmp (tree, rtx);
126 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
127 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
128 static rtx expand_builtin_memcpy (tree, rtx);
129 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
130 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
131 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
132 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
133 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
134 machine_mode, int, tree);
135 static rtx expand_builtin_strcpy (tree, rtx);
136 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
137 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
138 static rtx expand_builtin_strncpy (tree, rtx);
139 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
140 static rtx expand_builtin_memset (tree, rtx, machine_mode);
141 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_alloca (tree, bool);
146 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static tree stabilize_va_list_loc (location_t, tree, int);
149 static rtx expand_builtin_expect (tree, rtx);
150 static tree fold_builtin_constant_p (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (location_t, tree, tree);
153 static tree fold_builtin_inf (location_t, tree, int);
154 static tree fold_builtin_nan (tree, tree, int);
155 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
156 static bool validate_arg (const_tree, enum tree_code code);
157 static bool integer_valued_real_p (tree);
158 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
159 static rtx expand_builtin_fabs (tree, rtx, rtx);
160 static rtx expand_builtin_signbit (tree, rtx);
161 static tree fold_builtin_sqrt (location_t, tree, tree);
162 static tree fold_builtin_cbrt (location_t, tree, tree);
163 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
165 static tree fold_builtin_cos (location_t, tree, tree, tree);
166 static tree fold_builtin_cosh (location_t, tree, tree, tree);
167 static tree fold_builtin_tan (tree, tree);
168 static tree fold_builtin_trunc (location_t, tree, tree);
169 static tree fold_builtin_floor (location_t, tree, tree);
170 static tree fold_builtin_ceil (location_t, tree, tree);
171 static tree fold_builtin_round (location_t, tree, tree);
172 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
173 static tree fold_builtin_bitop (tree, tree);
174 static tree fold_builtin_strchr (location_t, tree, tree, tree);
175 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
177 static tree fold_builtin_strcmp (location_t, tree, tree);
178 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
179 static tree fold_builtin_signbit (location_t, tree, tree);
180 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
181 static tree fold_builtin_isascii (location_t, tree);
182 static tree fold_builtin_toascii (location_t, tree);
183 static tree fold_builtin_isdigit (location_t, tree);
184 static tree fold_builtin_fabs (location_t, tree, tree);
185 static tree fold_builtin_abs (location_t, tree, tree);
186 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
187 enum tree_code);
188 static tree fold_builtin_0 (location_t, tree);
189 static tree fold_builtin_1 (location_t, tree, tree);
190 static tree fold_builtin_2 (location_t, tree, tree, tree);
191 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
192 static tree fold_builtin_varargs (location_t, tree, tree*, int);
193
194 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
195 static tree fold_builtin_strstr (location_t, tree, tree, tree);
196 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
197 static tree fold_builtin_strspn (location_t, tree, tree);
198 static tree fold_builtin_strcspn (location_t, tree, tree);
199
200 static rtx expand_builtin_object_size (tree);
201 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
202 enum built_in_function);
203 static void maybe_emit_chk_warning (tree, enum built_in_function);
204 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
205 static void maybe_emit_free_warning (tree);
206 static tree fold_builtin_object_size (tree, tree);
207
208 unsigned HOST_WIDE_INT target_newline;
209 unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 char target_percent_c[3];
213 char target_percent_s[3];
214 char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
227 static void expand_builtin_sync_synchronize (void);
228
229 /* Return true if NAME starts with __builtin_ or __sync_. */
230
231 static bool
232 is_builtin_name (const char *name)
233 {
234 if (strncmp (name, "__builtin_", 10) == 0)
235 return true;
236 if (strncmp (name, "__sync_", 7) == 0)
237 return true;
238 if (strncmp (name, "__atomic_", 9) == 0)
239 return true;
240 if (flag_cilkplus
241 && (!strcmp (name, "__cilkrts_detach")
242 || !strcmp (name, "__cilkrts_pop_frame")))
243 return true;
244 return false;
245 }
246
247
248 /* Return true if DECL is a function symbol representing a built-in. */
249
250 bool
251 is_builtin_fn (tree decl)
252 {
253 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
254 }
255
256 /* Return true if NODE should be considered for inline expansion regardless
257 of the optimization level. This means whenever a function is invoked with
258 its "internal" name, which normally contains the prefix "__builtin". */
259
260 static bool
261 called_as_built_in (tree node)
262 {
263 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
264 we want the name used to call the function, not the name it
265 will have. */
266 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
267 return is_builtin_name (name);
268 }
269
270 /* Compute values M and N such that M divides (address of EXP - N) and such
271 that N < M. If these numbers can be determined, store M in alignp and N in
272 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
273 *alignp and any bit-offset to *bitposp.
274
275 Note that the address (and thus the alignment) computed here is based
276 on the address to which a symbol resolves, whereas DECL_ALIGN is based
277 on the address at which an object is actually located. These two
278 addresses are not always the same. For example, on ARM targets,
279 the address &foo of a Thumb function foo() has the lowest bit set,
280 whereas foo() itself starts on an even address.
281
282 If ADDR_P is true we are taking the address of the memory reference EXP
283 and thus cannot rely on the access taking place. */
284
285 static bool
286 get_object_alignment_2 (tree exp, unsigned int *alignp,
287 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
288 {
289 HOST_WIDE_INT bitsize, bitpos;
290 tree offset;
291 machine_mode mode;
292 int unsignedp, volatilep;
293 unsigned int align = BITS_PER_UNIT;
294 bool known_alignment = false;
295
296 /* Get the innermost object and the constant (bitpos) and possibly
297 variable (offset) offset of the access. */
298 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
299 &mode, &unsignedp, &volatilep, true);
300
301 /* Extract alignment information from the innermost object and
302 possibly adjust bitpos and offset. */
303 if (TREE_CODE (exp) == FUNCTION_DECL)
304 {
305 /* Function addresses can encode extra information besides their
306 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
307 allows the low bit to be used as a virtual bit, we know
308 that the address itself must be at least 2-byte aligned. */
309 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
310 align = 2 * BITS_PER_UNIT;
311 }
312 else if (TREE_CODE (exp) == LABEL_DECL)
313 ;
314 else if (TREE_CODE (exp) == CONST_DECL)
315 {
316 /* The alignment of a CONST_DECL is determined by its initializer. */
317 exp = DECL_INITIAL (exp);
318 align = TYPE_ALIGN (TREE_TYPE (exp));
319 if (CONSTANT_CLASS_P (exp))
320 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
321
322 known_alignment = true;
323 }
324 else if (DECL_P (exp))
325 {
326 align = DECL_ALIGN (exp);
327 known_alignment = true;
328 }
329 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
330 {
331 align = TYPE_ALIGN (TREE_TYPE (exp));
332 }
333 else if (TREE_CODE (exp) == INDIRECT_REF
334 || TREE_CODE (exp) == MEM_REF
335 || TREE_CODE (exp) == TARGET_MEM_REF)
336 {
337 tree addr = TREE_OPERAND (exp, 0);
338 unsigned ptr_align;
339 unsigned HOST_WIDE_INT ptr_bitpos;
340 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
341
342 /* If the address is explicitely aligned, handle that. */
343 if (TREE_CODE (addr) == BIT_AND_EXPR
344 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
345 {
346 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
347 ptr_bitmask *= BITS_PER_UNIT;
348 align = ptr_bitmask & -ptr_bitmask;
349 addr = TREE_OPERAND (addr, 0);
350 }
351
352 known_alignment
353 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
354 align = MAX (ptr_align, align);
355
356 /* Re-apply explicit alignment to the bitpos. */
357 ptr_bitpos &= ptr_bitmask;
358
359 /* The alignment of the pointer operand in a TARGET_MEM_REF
360 has to take the variable offset parts into account. */
361 if (TREE_CODE (exp) == TARGET_MEM_REF)
362 {
363 if (TMR_INDEX (exp))
364 {
365 unsigned HOST_WIDE_INT step = 1;
366 if (TMR_STEP (exp))
367 step = TREE_INT_CST_LOW (TMR_STEP (exp));
368 align = MIN (align, (step & -step) * BITS_PER_UNIT);
369 }
370 if (TMR_INDEX2 (exp))
371 align = BITS_PER_UNIT;
372 known_alignment = false;
373 }
374
375 /* When EXP is an actual memory reference then we can use
376 TYPE_ALIGN of a pointer indirection to derive alignment.
377 Do so only if get_pointer_alignment_1 did not reveal absolute
378 alignment knowledge and if using that alignment would
379 improve the situation. */
380 if (!addr_p && !known_alignment
381 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
382 align = TYPE_ALIGN (TREE_TYPE (exp));
383 else
384 {
385 /* Else adjust bitpos accordingly. */
386 bitpos += ptr_bitpos;
387 if (TREE_CODE (exp) == MEM_REF
388 || TREE_CODE (exp) == TARGET_MEM_REF)
389 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
390 }
391 }
392 else if (TREE_CODE (exp) == STRING_CST)
393 {
394 /* STRING_CST are the only constant objects we allow to be not
395 wrapped inside a CONST_DECL. */
396 align = TYPE_ALIGN (TREE_TYPE (exp));
397 if (CONSTANT_CLASS_P (exp))
398 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
399
400 known_alignment = true;
401 }
402
403 /* If there is a non-constant offset part extract the maximum
404 alignment that can prevail. */
405 if (offset)
406 {
407 unsigned int trailing_zeros = tree_ctz (offset);
408 if (trailing_zeros < HOST_BITS_PER_INT)
409 {
410 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
411 if (inner)
412 align = MIN (align, inner);
413 }
414 }
415
416 *alignp = align;
417 *bitposp = bitpos & (*alignp - 1);
418 return known_alignment;
419 }
420
421 /* For a memory reference expression EXP compute values M and N such that M
422 divides (&EXP - N) and such that N < M. If these numbers can be determined,
423 store M in alignp and N in *BITPOSP and return true. Otherwise return false
424 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
425
426 bool
427 get_object_alignment_1 (tree exp, unsigned int *alignp,
428 unsigned HOST_WIDE_INT *bitposp)
429 {
430 return get_object_alignment_2 (exp, alignp, bitposp, false);
431 }
432
433 /* Return the alignment in bits of EXP, an object. */
434
435 unsigned int
436 get_object_alignment (tree exp)
437 {
438 unsigned HOST_WIDE_INT bitpos = 0;
439 unsigned int align;
440
441 get_object_alignment_1 (exp, &align, &bitpos);
442
443 /* align and bitpos now specify known low bits of the pointer.
444 ptr & (align - 1) == bitpos. */
445
446 if (bitpos != 0)
447 align = (bitpos & -bitpos);
448 return align;
449 }
450
451 /* For a pointer valued expression EXP compute values M and N such that M
452 divides (EXP - N) and such that N < M. If these numbers can be determined,
453 store M in alignp and N in *BITPOSP and return true. Return false if
454 the results are just a conservative approximation.
455
456 If EXP is not a pointer, false is returned too. */
457
458 bool
459 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
460 unsigned HOST_WIDE_INT *bitposp)
461 {
462 STRIP_NOPS (exp);
463
464 if (TREE_CODE (exp) == ADDR_EXPR)
465 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
466 alignp, bitposp, true);
467 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
468 {
469 unsigned int align;
470 unsigned HOST_WIDE_INT bitpos;
471 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
472 &align, &bitpos);
473 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
474 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
475 else
476 {
477 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
478 if (trailing_zeros < HOST_BITS_PER_INT)
479 {
480 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
481 if (inner)
482 align = MIN (align, inner);
483 }
484 }
485 *alignp = align;
486 *bitposp = bitpos & (align - 1);
487 return res;
488 }
489 else if (TREE_CODE (exp) == SSA_NAME
490 && POINTER_TYPE_P (TREE_TYPE (exp)))
491 {
492 unsigned int ptr_align, ptr_misalign;
493 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
494
495 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
496 {
497 *bitposp = ptr_misalign * BITS_PER_UNIT;
498 *alignp = ptr_align * BITS_PER_UNIT;
499 /* We cannot really tell whether this result is an approximation. */
500 return true;
501 }
502 else
503 {
504 *bitposp = 0;
505 *alignp = BITS_PER_UNIT;
506 return false;
507 }
508 }
509 else if (TREE_CODE (exp) == INTEGER_CST)
510 {
511 *alignp = BIGGEST_ALIGNMENT;
512 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
513 & (BIGGEST_ALIGNMENT - 1));
514 return true;
515 }
516
517 *bitposp = 0;
518 *alignp = BITS_PER_UNIT;
519 return false;
520 }
521
522 /* Return the alignment in bits of EXP, a pointer valued expression.
523 The alignment returned is, by default, the alignment of the thing that
524 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
525
526 Otherwise, look at the expression to see if we can do better, i.e., if the
527 expression is actually pointing at an object whose alignment is tighter. */
528
529 unsigned int
530 get_pointer_alignment (tree exp)
531 {
532 unsigned HOST_WIDE_INT bitpos = 0;
533 unsigned int align;
534
535 get_pointer_alignment_1 (exp, &align, &bitpos);
536
537 /* align and bitpos now specify known low bits of the pointer.
538 ptr & (align - 1) == bitpos. */
539
540 if (bitpos != 0)
541 align = (bitpos & -bitpos);
542
543 return align;
544 }
545
546 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
547 way, because it could contain a zero byte in the middle.
548 TREE_STRING_LENGTH is the size of the character array, not the string.
549
550 ONLY_VALUE should be nonzero if the result is not going to be emitted
551 into the instruction stream and zero if it is going to be expanded.
552 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
553 is returned, otherwise NULL, since
554 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
555 evaluate the side-effects.
556
557 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
558 accesses. Note that this implies the result is not going to be emitted
559 into the instruction stream.
560
561 The value returned is of type `ssizetype'.
562
563 Unfortunately, string_constant can't access the values of const char
564 arrays with initializers, so neither can we do so here. */
565
566 tree
567 c_strlen (tree src, int only_value)
568 {
569 tree offset_node;
570 HOST_WIDE_INT offset;
571 int max;
572 const char *ptr;
573 location_t loc;
574
575 STRIP_NOPS (src);
576 if (TREE_CODE (src) == COND_EXPR
577 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
578 {
579 tree len1, len2;
580
581 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
582 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
583 if (tree_int_cst_equal (len1, len2))
584 return len1;
585 }
586
587 if (TREE_CODE (src) == COMPOUND_EXPR
588 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
589 return c_strlen (TREE_OPERAND (src, 1), only_value);
590
591 loc = EXPR_LOC_OR_LOC (src, input_location);
592
593 src = string_constant (src, &offset_node);
594 if (src == 0)
595 return NULL_TREE;
596
597 max = TREE_STRING_LENGTH (src) - 1;
598 ptr = TREE_STRING_POINTER (src);
599
600 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
601 {
602 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
603 compute the offset to the following null if we don't know where to
604 start searching for it. */
605 int i;
606
607 for (i = 0; i < max; i++)
608 if (ptr[i] == 0)
609 return NULL_TREE;
610
611 /* We don't know the starting offset, but we do know that the string
612 has no internal zero bytes. We can assume that the offset falls
613 within the bounds of the string; otherwise, the programmer deserves
614 what he gets. Subtract the offset from the length of the string,
615 and return that. This would perhaps not be valid if we were dealing
616 with named arrays in addition to literal string constants. */
617
618 return size_diffop_loc (loc, size_int (max), offset_node);
619 }
620
621 /* We have a known offset into the string. Start searching there for
622 a null character if we can represent it as a single HOST_WIDE_INT. */
623 if (offset_node == 0)
624 offset = 0;
625 else if (! tree_fits_shwi_p (offset_node))
626 offset = -1;
627 else
628 offset = tree_to_shwi (offset_node);
629
630 /* If the offset is known to be out of bounds, warn, and call strlen at
631 runtime. */
632 if (offset < 0 || offset > max)
633 {
634 /* Suppress multiple warnings for propagated constant strings. */
635 if (only_value != 2
636 && !TREE_NO_WARNING (src))
637 {
638 warning_at (loc, 0, "offset outside bounds of constant string");
639 TREE_NO_WARNING (src) = 1;
640 }
641 return NULL_TREE;
642 }
643
644 /* Use strlen to search for the first zero byte. Since any strings
645 constructed with build_string will have nulls appended, we win even
646 if we get handed something like (char[4])"abcd".
647
648 Since OFFSET is our starting index into the string, no further
649 calculation is needed. */
650 return ssize_int (strlen (ptr + offset));
651 }
652
653 /* Return a char pointer for a C string if it is a string constant
654 or sum of string constant and integer constant. */
655
656 const char *
657 c_getstr (tree src)
658 {
659 tree offset_node;
660
661 src = string_constant (src, &offset_node);
662 if (src == 0)
663 return 0;
664
665 if (offset_node == 0)
666 return TREE_STRING_POINTER (src);
667 else if (!tree_fits_uhwi_p (offset_node)
668 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
669 return 0;
670
671 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
672 }
673
674 /* Return a constant integer corresponding to target reading
675 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
676
677 static rtx
678 c_readstr (const char *str, machine_mode mode)
679 {
680 HOST_WIDE_INT ch;
681 unsigned int i, j;
682 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
683
684 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
685 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
686 / HOST_BITS_PER_WIDE_INT;
687
688 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
689 for (i = 0; i < len; i++)
690 tmp[i] = 0;
691
692 ch = 1;
693 for (i = 0; i < GET_MODE_SIZE (mode); i++)
694 {
695 j = i;
696 if (WORDS_BIG_ENDIAN)
697 j = GET_MODE_SIZE (mode) - i - 1;
698 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
699 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
700 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
701 j *= BITS_PER_UNIT;
702
703 if (ch)
704 ch = (unsigned char) str[i];
705 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
706 }
707
708 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
709 return immed_wide_int_const (c, mode);
710 }
711
712 /* Cast a target constant CST to target CHAR and if that value fits into
713 host char type, return zero and put that value into variable pointed to by
714 P. */
715
716 static int
717 target_char_cast (tree cst, char *p)
718 {
719 unsigned HOST_WIDE_INT val, hostval;
720
721 if (TREE_CODE (cst) != INTEGER_CST
722 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
723 return 1;
724
725 /* Do not care if it fits or not right here. */
726 val = TREE_INT_CST_LOW (cst);
727
728 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
729 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
730
731 hostval = val;
732 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
733 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
734
735 if (val != hostval)
736 return 1;
737
738 *p = hostval;
739 return 0;
740 }
741
742 /* Similar to save_expr, but assumes that arbitrary code is not executed
743 in between the multiple evaluations. In particular, we assume that a
744 non-addressable local variable will not be modified. */
745
746 static tree
747 builtin_save_expr (tree exp)
748 {
749 if (TREE_CODE (exp) == SSA_NAME
750 || (TREE_ADDRESSABLE (exp) == 0
751 && (TREE_CODE (exp) == PARM_DECL
752 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
753 return exp;
754
755 return save_expr (exp);
756 }
757
758 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
759 times to get the address of either a higher stack frame, or a return
760 address located within it (depending on FNDECL_CODE). */
761
762 static rtx
763 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
764 {
765 int i;
766
767 #ifdef INITIAL_FRAME_ADDRESS_RTX
768 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
769 #else
770 rtx tem;
771
772 /* For a zero count with __builtin_return_address, we don't care what
773 frame address we return, because target-specific definitions will
774 override us. Therefore frame pointer elimination is OK, and using
775 the soft frame pointer is OK.
776
777 For a nonzero count, or a zero count with __builtin_frame_address,
778 we require a stable offset from the current frame pointer to the
779 previous one, so we must use the hard frame pointer, and
780 we must disable frame pointer elimination. */
781 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
782 tem = frame_pointer_rtx;
783 else
784 {
785 tem = hard_frame_pointer_rtx;
786
787 /* Tell reload not to eliminate the frame pointer. */
788 crtl->accesses_prior_frames = 1;
789 }
790 #endif
791
792 /* Some machines need special handling before we can access
793 arbitrary frames. For example, on the SPARC, we must first flush
794 all register windows to the stack. */
795 #ifdef SETUP_FRAME_ADDRESSES
796 if (count > 0)
797 SETUP_FRAME_ADDRESSES ();
798 #endif
799
800 /* On the SPARC, the return address is not in the frame, it is in a
801 register. There is no way to access it off of the current frame
802 pointer, but it can be accessed off the previous frame pointer by
803 reading the value from the register window save area. */
804 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
805 count--;
806
807 /* Scan back COUNT frames to the specified frame. */
808 for (i = 0; i < count; i++)
809 {
810 /* Assume the dynamic chain pointer is in the word that the
811 frame address points to, unless otherwise specified. */
812 #ifdef DYNAMIC_CHAIN_ADDRESS
813 tem = DYNAMIC_CHAIN_ADDRESS (tem);
814 #endif
815 tem = memory_address (Pmode, tem);
816 tem = gen_frame_mem (Pmode, tem);
817 tem = copy_to_reg (tem);
818 }
819
820 /* For __builtin_frame_address, return what we've got. But, on
821 the SPARC for example, we may have to add a bias. */
822 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
823 #ifdef FRAME_ADDR_RTX
824 return FRAME_ADDR_RTX (tem);
825 #else
826 return tem;
827 #endif
828
829 /* For __builtin_return_address, get the return address from that frame. */
830 #ifdef RETURN_ADDR_RTX
831 tem = RETURN_ADDR_RTX (count, tem);
832 #else
833 tem = memory_address (Pmode,
834 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
835 tem = gen_frame_mem (Pmode, tem);
836 #endif
837 return tem;
838 }
839
840 /* Alias set used for setjmp buffer. */
841 static alias_set_type setjmp_alias_set = -1;
842
843 /* Construct the leading half of a __builtin_setjmp call. Control will
844 return to RECEIVER_LABEL. This is also called directly by the SJLJ
845 exception handling code. */
846
847 void
848 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
849 {
850 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
851 rtx stack_save;
852 rtx mem;
853
854 if (setjmp_alias_set == -1)
855 setjmp_alias_set = new_alias_set ();
856
857 buf_addr = convert_memory_address (Pmode, buf_addr);
858
859 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
860
861 /* We store the frame pointer and the address of receiver_label in
862 the buffer and use the rest of it for the stack save area, which
863 is machine-dependent. */
864
865 mem = gen_rtx_MEM (Pmode, buf_addr);
866 set_mem_alias_set (mem, setjmp_alias_set);
867 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
868
869 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
870 GET_MODE_SIZE (Pmode))),
871 set_mem_alias_set (mem, setjmp_alias_set);
872
873 emit_move_insn (validize_mem (mem),
874 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
875
876 stack_save = gen_rtx_MEM (sa_mode,
877 plus_constant (Pmode, buf_addr,
878 2 * GET_MODE_SIZE (Pmode)));
879 set_mem_alias_set (stack_save, setjmp_alias_set);
880 emit_stack_save (SAVE_NONLOCAL, &stack_save);
881
882 /* If there is further processing to do, do it. */
883 if (targetm.have_builtin_setjmp_setup ())
884 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
885
886 /* We have a nonlocal label. */
887 cfun->has_nonlocal_label = 1;
888 }
889
890 /* Construct the trailing part of a __builtin_setjmp call. This is
891 also called directly by the SJLJ exception handling code.
892 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
893
894 void
895 expand_builtin_setjmp_receiver (rtx receiver_label)
896 {
897 rtx chain;
898
899 /* Mark the FP as used when we get here, so we have to make sure it's
900 marked as used by this function. */
901 emit_use (hard_frame_pointer_rtx);
902
903 /* Mark the static chain as clobbered here so life information
904 doesn't get messed up for it. */
905 chain = targetm.calls.static_chain (current_function_decl, true);
906 if (chain && REG_P (chain))
907 emit_clobber (chain);
908
909 /* Now put in the code to restore the frame pointer, and argument
910 pointer, if needed. */
911 if (! targetm.have_nonlocal_goto ())
912 {
913 /* First adjust our frame pointer to its actual value. It was
914 previously set to the start of the virtual area corresponding to
915 the stacked variables when we branched here and now needs to be
916 adjusted to the actual hardware fp value.
917
918 Assignments to virtual registers are converted by
919 instantiate_virtual_regs into the corresponding assignment
920 to the underlying register (fp in this case) that makes
921 the original assignment true.
922 So the following insn will actually be decrementing fp by
923 STARTING_FRAME_OFFSET. */
924 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
925
926 /* Restoring the frame pointer also modifies the hard frame pointer.
927 Mark it used (so that the previous assignment remains live once
928 the frame pointer is eliminated) and clobbered (to represent the
929 implicit update from the assignment). */
930 emit_use (hard_frame_pointer_rtx);
931 emit_clobber (hard_frame_pointer_rtx);
932 }
933
934 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
935 if (fixed_regs[ARG_POINTER_REGNUM])
936 {
937 #ifdef ELIMINABLE_REGS
938 /* If the argument pointer can be eliminated in favor of the
939 frame pointer, we don't need to restore it. We assume here
940 that if such an elimination is present, it can always be used.
941 This is the case on all known machines; if we don't make this
942 assumption, we do unnecessary saving on many machines. */
943 size_t i;
944 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
945
946 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
947 if (elim_regs[i].from == ARG_POINTER_REGNUM
948 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
949 break;
950
951 if (i == ARRAY_SIZE (elim_regs))
952 #endif
953 {
954 /* Now restore our arg pointer from the address at which it
955 was saved in our stack frame. */
956 emit_move_insn (crtl->args.internal_arg_pointer,
957 copy_to_reg (get_arg_pointer_save_area ()));
958 }
959 }
960 #endif
961
962 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
963 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
964 else if (targetm.have_nonlocal_goto_receiver ())
965 emit_insn (targetm.gen_nonlocal_goto_receiver ());
966 else
967 { /* Nothing */ }
968
969 /* We must not allow the code we just generated to be reordered by
970 scheduling. Specifically, the update of the frame pointer must
971 happen immediately, not later. */
972 emit_insn (gen_blockage ());
973 }
974
975 /* __builtin_longjmp is passed a pointer to an array of five words (not
976 all will be used on all machines). It operates similarly to the C
977 library function of the same name, but is more efficient. Much of
978 the code below is copied from the handling of non-local gotos. */
979
980 static void
981 expand_builtin_longjmp (rtx buf_addr, rtx value)
982 {
983 rtx fp, lab, stack;
984 rtx_insn *insn, *last;
985 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
986
987 /* DRAP is needed for stack realign if longjmp is expanded to current
988 function */
989 if (SUPPORTS_STACK_ALIGNMENT)
990 crtl->need_drap = true;
991
992 if (setjmp_alias_set == -1)
993 setjmp_alias_set = new_alias_set ();
994
995 buf_addr = convert_memory_address (Pmode, buf_addr);
996
997 buf_addr = force_reg (Pmode, buf_addr);
998
999 /* We require that the user must pass a second argument of 1, because
1000 that is what builtin_setjmp will return. */
1001 gcc_assert (value == const1_rtx);
1002
1003 last = get_last_insn ();
1004 if (targetm.have_builtin_longjmp ())
1005 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1006 else
1007 {
1008 fp = gen_rtx_MEM (Pmode, buf_addr);
1009 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1010 GET_MODE_SIZE (Pmode)));
1011
1012 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1013 2 * GET_MODE_SIZE (Pmode)));
1014 set_mem_alias_set (fp, setjmp_alias_set);
1015 set_mem_alias_set (lab, setjmp_alias_set);
1016 set_mem_alias_set (stack, setjmp_alias_set);
1017
1018 /* Pick up FP, label, and SP from the block and jump. This code is
1019 from expand_goto in stmt.c; see there for detailed comments. */
1020 if (targetm.have_nonlocal_goto ())
1021 /* We have to pass a value to the nonlocal_goto pattern that will
1022 get copied into the static_chain pointer, but it does not matter
1023 what that value is, because builtin_setjmp does not use it. */
1024 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1025 else
1026 {
1027 lab = copy_to_reg (lab);
1028
1029 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1030 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1031
1032 emit_move_insn (hard_frame_pointer_rtx, fp);
1033 emit_stack_restore (SAVE_NONLOCAL, stack);
1034
1035 emit_use (hard_frame_pointer_rtx);
1036 emit_use (stack_pointer_rtx);
1037 emit_indirect_jump (lab);
1038 }
1039 }
1040
1041 /* Search backwards and mark the jump insn as a non-local goto.
1042 Note that this precludes the use of __builtin_longjmp to a
1043 __builtin_setjmp target in the same function. However, we've
1044 already cautioned the user that these functions are for
1045 internal exception handling use only. */
1046 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1047 {
1048 gcc_assert (insn != last);
1049
1050 if (JUMP_P (insn))
1051 {
1052 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1053 break;
1054 }
1055 else if (CALL_P (insn))
1056 break;
1057 }
1058 }
1059
1060 static inline bool
1061 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1062 {
1063 return (iter->i < iter->n);
1064 }
1065
1066 /* This function validates the types of a function call argument list
1067 against a specified list of tree_codes. If the last specifier is a 0,
1068 that represents an ellipses, otherwise the last specifier must be a
1069 VOID_TYPE. */
1070
1071 static bool
1072 validate_arglist (const_tree callexpr, ...)
1073 {
1074 enum tree_code code;
1075 bool res = 0;
1076 va_list ap;
1077 const_call_expr_arg_iterator iter;
1078 const_tree arg;
1079
1080 va_start (ap, callexpr);
1081 init_const_call_expr_arg_iterator (callexpr, &iter);
1082
1083 do
1084 {
1085 code = (enum tree_code) va_arg (ap, int);
1086 switch (code)
1087 {
1088 case 0:
1089 /* This signifies an ellipses, any further arguments are all ok. */
1090 res = true;
1091 goto end;
1092 case VOID_TYPE:
1093 /* This signifies an endlink, if no arguments remain, return
1094 true, otherwise return false. */
1095 res = !more_const_call_expr_args_p (&iter);
1096 goto end;
1097 default:
1098 /* If no parameters remain or the parameter's code does not
1099 match the specified code, return false. Otherwise continue
1100 checking any remaining arguments. */
1101 arg = next_const_call_expr_arg (&iter);
1102 if (!validate_arg (arg, code))
1103 goto end;
1104 break;
1105 }
1106 }
1107 while (1);
1108
1109 /* We need gotos here since we can only have one VA_CLOSE in a
1110 function. */
1111 end: ;
1112 va_end (ap);
1113
1114 return res;
1115 }
1116
1117 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1118 and the address of the save area. */
1119
1120 static rtx
1121 expand_builtin_nonlocal_goto (tree exp)
1122 {
1123 tree t_label, t_save_area;
1124 rtx r_label, r_save_area, r_fp, r_sp;
1125 rtx_insn *insn;
1126
1127 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1128 return NULL_RTX;
1129
1130 t_label = CALL_EXPR_ARG (exp, 0);
1131 t_save_area = CALL_EXPR_ARG (exp, 1);
1132
1133 r_label = expand_normal (t_label);
1134 r_label = convert_memory_address (Pmode, r_label);
1135 r_save_area = expand_normal (t_save_area);
1136 r_save_area = convert_memory_address (Pmode, r_save_area);
1137 /* Copy the address of the save location to a register just in case it was
1138 based on the frame pointer. */
1139 r_save_area = copy_to_reg (r_save_area);
1140 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1141 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1142 plus_constant (Pmode, r_save_area,
1143 GET_MODE_SIZE (Pmode)));
1144
1145 crtl->has_nonlocal_goto = 1;
1146
1147 /* ??? We no longer need to pass the static chain value, afaik. */
1148 if (targetm.have_nonlocal_goto ())
1149 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1150 else
1151 {
1152 r_label = copy_to_reg (r_label);
1153
1154 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1155 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1156
1157 /* Restore frame pointer for containing function. */
1158 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1159 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1160
1161 /* USE of hard_frame_pointer_rtx added for consistency;
1162 not clear if really needed. */
1163 emit_use (hard_frame_pointer_rtx);
1164 emit_use (stack_pointer_rtx);
1165
1166 /* If the architecture is using a GP register, we must
1167 conservatively assume that the target function makes use of it.
1168 The prologue of functions with nonlocal gotos must therefore
1169 initialize the GP register to the appropriate value, and we
1170 must then make sure that this value is live at the point
1171 of the jump. (Note that this doesn't necessarily apply
1172 to targets with a nonlocal_goto pattern; they are free
1173 to implement it in their own way. Note also that this is
1174 a no-op if the GP register is a global invariant.) */
1175 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1176 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1177 emit_use (pic_offset_table_rtx);
1178
1179 emit_indirect_jump (r_label);
1180 }
1181
1182 /* Search backwards to the jump insn and mark it as a
1183 non-local goto. */
1184 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1185 {
1186 if (JUMP_P (insn))
1187 {
1188 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1189 break;
1190 }
1191 else if (CALL_P (insn))
1192 break;
1193 }
1194
1195 return const0_rtx;
1196 }
1197
1198 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1199 (not all will be used on all machines) that was passed to __builtin_setjmp.
1200 It updates the stack pointer in that block to the current value. This is
1201 also called directly by the SJLJ exception handling code. */
1202
1203 void
1204 expand_builtin_update_setjmp_buf (rtx buf_addr)
1205 {
1206 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1207 rtx stack_save
1208 = gen_rtx_MEM (sa_mode,
1209 memory_address
1210 (sa_mode,
1211 plus_constant (Pmode, buf_addr,
1212 2 * GET_MODE_SIZE (Pmode))));
1213
1214 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1215 }
1216
1217 /* Expand a call to __builtin_prefetch. For a target that does not support
1218 data prefetch, evaluate the memory address argument in case it has side
1219 effects. */
1220
1221 static void
1222 expand_builtin_prefetch (tree exp)
1223 {
1224 tree arg0, arg1, arg2;
1225 int nargs;
1226 rtx op0, op1, op2;
1227
1228 if (!validate_arglist (exp, POINTER_TYPE, 0))
1229 return;
1230
1231 arg0 = CALL_EXPR_ARG (exp, 0);
1232
1233 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1234 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1235 locality). */
1236 nargs = call_expr_nargs (exp);
1237 if (nargs > 1)
1238 arg1 = CALL_EXPR_ARG (exp, 1);
1239 else
1240 arg1 = integer_zero_node;
1241 if (nargs > 2)
1242 arg2 = CALL_EXPR_ARG (exp, 2);
1243 else
1244 arg2 = integer_three_node;
1245
1246 /* Argument 0 is an address. */
1247 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1248
1249 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1250 if (TREE_CODE (arg1) != INTEGER_CST)
1251 {
1252 error ("second argument to %<__builtin_prefetch%> must be a constant");
1253 arg1 = integer_zero_node;
1254 }
1255 op1 = expand_normal (arg1);
1256 /* Argument 1 must be either zero or one. */
1257 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1258 {
1259 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1260 " using zero");
1261 op1 = const0_rtx;
1262 }
1263
1264 /* Argument 2 (locality) must be a compile-time constant int. */
1265 if (TREE_CODE (arg2) != INTEGER_CST)
1266 {
1267 error ("third argument to %<__builtin_prefetch%> must be a constant");
1268 arg2 = integer_zero_node;
1269 }
1270 op2 = expand_normal (arg2);
1271 /* Argument 2 must be 0, 1, 2, or 3. */
1272 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1273 {
1274 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1275 op2 = const0_rtx;
1276 }
1277
1278 if (targetm.have_prefetch ())
1279 {
1280 struct expand_operand ops[3];
1281
1282 create_address_operand (&ops[0], op0);
1283 create_integer_operand (&ops[1], INTVAL (op1));
1284 create_integer_operand (&ops[2], INTVAL (op2));
1285 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1286 return;
1287 }
1288
1289 /* Don't do anything with direct references to volatile memory, but
1290 generate code to handle other side effects. */
1291 if (!MEM_P (op0) && side_effects_p (op0))
1292 emit_insn (op0);
1293 }
1294
1295 /* Get a MEM rtx for expression EXP which is the address of an operand
1296 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1297 the maximum length of the block of memory that might be accessed or
1298 NULL if unknown. */
1299
1300 static rtx
1301 get_memory_rtx (tree exp, tree len)
1302 {
1303 tree orig_exp = exp;
1304 rtx addr, mem;
1305
1306 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1307 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1308 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1309 exp = TREE_OPERAND (exp, 0);
1310
1311 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1312 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1313
1314 /* Get an expression we can use to find the attributes to assign to MEM.
1315 First remove any nops. */
1316 while (CONVERT_EXPR_P (exp)
1317 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1318 exp = TREE_OPERAND (exp, 0);
1319
1320 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1321 (as builtin stringops may alias with anything). */
1322 exp = fold_build2 (MEM_REF,
1323 build_array_type (char_type_node,
1324 build_range_type (sizetype,
1325 size_one_node, len)),
1326 exp, build_int_cst (ptr_type_node, 0));
1327
1328 /* If the MEM_REF has no acceptable address, try to get the base object
1329 from the original address we got, and build an all-aliasing
1330 unknown-sized access to that one. */
1331 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1332 set_mem_attributes (mem, exp, 0);
1333 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1334 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1335 0))))
1336 {
1337 exp = build_fold_addr_expr (exp);
1338 exp = fold_build2 (MEM_REF,
1339 build_array_type (char_type_node,
1340 build_range_type (sizetype,
1341 size_zero_node,
1342 NULL)),
1343 exp, build_int_cst (ptr_type_node, 0));
1344 set_mem_attributes (mem, exp, 0);
1345 }
1346 set_mem_alias_set (mem, 0);
1347 return mem;
1348 }
1349 \f
1350 /* Built-in functions to perform an untyped call and return. */
1351
1352 #define apply_args_mode \
1353 (this_target_builtins->x_apply_args_mode)
1354 #define apply_result_mode \
1355 (this_target_builtins->x_apply_result_mode)
1356
1357 /* Return the size required for the block returned by __builtin_apply_args,
1358 and initialize apply_args_mode. */
1359
1360 static int
1361 apply_args_size (void)
1362 {
1363 static int size = -1;
1364 int align;
1365 unsigned int regno;
1366 machine_mode mode;
1367
1368 /* The values computed by this function never change. */
1369 if (size < 0)
1370 {
1371 /* The first value is the incoming arg-pointer. */
1372 size = GET_MODE_SIZE (Pmode);
1373
1374 /* The second value is the structure value address unless this is
1375 passed as an "invisible" first argument. */
1376 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1377 size += GET_MODE_SIZE (Pmode);
1378
1379 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1380 if (FUNCTION_ARG_REGNO_P (regno))
1381 {
1382 mode = targetm.calls.get_raw_arg_mode (regno);
1383
1384 gcc_assert (mode != VOIDmode);
1385
1386 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1387 if (size % align != 0)
1388 size = CEIL (size, align) * align;
1389 size += GET_MODE_SIZE (mode);
1390 apply_args_mode[regno] = mode;
1391 }
1392 else
1393 {
1394 apply_args_mode[regno] = VOIDmode;
1395 }
1396 }
1397 return size;
1398 }
1399
1400 /* Return the size required for the block returned by __builtin_apply,
1401 and initialize apply_result_mode. */
1402
1403 static int
1404 apply_result_size (void)
1405 {
1406 static int size = -1;
1407 int align, regno;
1408 machine_mode mode;
1409
1410 /* The values computed by this function never change. */
1411 if (size < 0)
1412 {
1413 size = 0;
1414
1415 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1416 if (targetm.calls.function_value_regno_p (regno))
1417 {
1418 mode = targetm.calls.get_raw_result_mode (regno);
1419
1420 gcc_assert (mode != VOIDmode);
1421
1422 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1423 if (size % align != 0)
1424 size = CEIL (size, align) * align;
1425 size += GET_MODE_SIZE (mode);
1426 apply_result_mode[regno] = mode;
1427 }
1428 else
1429 apply_result_mode[regno] = VOIDmode;
1430
1431 /* Allow targets that use untyped_call and untyped_return to override
1432 the size so that machine-specific information can be stored here. */
1433 #ifdef APPLY_RESULT_SIZE
1434 size = APPLY_RESULT_SIZE;
1435 #endif
1436 }
1437 return size;
1438 }
1439
1440 /* Create a vector describing the result block RESULT. If SAVEP is true,
1441 the result block is used to save the values; otherwise it is used to
1442 restore the values. */
1443
1444 static rtx
1445 result_vector (int savep, rtx result)
1446 {
1447 int regno, size, align, nelts;
1448 machine_mode mode;
1449 rtx reg, mem;
1450 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1451
1452 size = nelts = 0;
1453 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1454 if ((mode = apply_result_mode[regno]) != VOIDmode)
1455 {
1456 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1457 if (size % align != 0)
1458 size = CEIL (size, align) * align;
1459 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1460 mem = adjust_address (result, mode, size);
1461 savevec[nelts++] = (savep
1462 ? gen_rtx_SET (mem, reg)
1463 : gen_rtx_SET (reg, mem));
1464 size += GET_MODE_SIZE (mode);
1465 }
1466 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1467 }
1468
1469 /* Save the state required to perform an untyped call with the same
1470 arguments as were passed to the current function. */
1471
1472 static rtx
1473 expand_builtin_apply_args_1 (void)
1474 {
1475 rtx registers, tem;
1476 int size, align, regno;
1477 machine_mode mode;
1478 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1479
1480 /* Create a block where the arg-pointer, structure value address,
1481 and argument registers can be saved. */
1482 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1483
1484 /* Walk past the arg-pointer and structure value address. */
1485 size = GET_MODE_SIZE (Pmode);
1486 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1487 size += GET_MODE_SIZE (Pmode);
1488
1489 /* Save each register used in calling a function to the block. */
1490 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1491 if ((mode = apply_args_mode[regno]) != VOIDmode)
1492 {
1493 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1494 if (size % align != 0)
1495 size = CEIL (size, align) * align;
1496
1497 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1498
1499 emit_move_insn (adjust_address (registers, mode, size), tem);
1500 size += GET_MODE_SIZE (mode);
1501 }
1502
1503 /* Save the arg pointer to the block. */
1504 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1505 /* We need the pointer as the caller actually passed them to us, not
1506 as we might have pretended they were passed. Make sure it's a valid
1507 operand, as emit_move_insn isn't expected to handle a PLUS. */
1508 if (STACK_GROWS_DOWNWARD)
1509 tem
1510 = force_operand (plus_constant (Pmode, tem,
1511 crtl->args.pretend_args_size),
1512 NULL_RTX);
1513 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1514
1515 size = GET_MODE_SIZE (Pmode);
1516
1517 /* Save the structure value address unless this is passed as an
1518 "invisible" first argument. */
1519 if (struct_incoming_value)
1520 {
1521 emit_move_insn (adjust_address (registers, Pmode, size),
1522 copy_to_reg (struct_incoming_value));
1523 size += GET_MODE_SIZE (Pmode);
1524 }
1525
1526 /* Return the address of the block. */
1527 return copy_addr_to_reg (XEXP (registers, 0));
1528 }
1529
1530 /* __builtin_apply_args returns block of memory allocated on
1531 the stack into which is stored the arg pointer, structure
1532 value address, static chain, and all the registers that might
1533 possibly be used in performing a function call. The code is
1534 moved to the start of the function so the incoming values are
1535 saved. */
1536
1537 static rtx
1538 expand_builtin_apply_args (void)
1539 {
1540 /* Don't do __builtin_apply_args more than once in a function.
1541 Save the result of the first call and reuse it. */
1542 if (apply_args_value != 0)
1543 return apply_args_value;
1544 {
1545 /* When this function is called, it means that registers must be
1546 saved on entry to this function. So we migrate the
1547 call to the first insn of this function. */
1548 rtx temp;
1549
1550 start_sequence ();
1551 temp = expand_builtin_apply_args_1 ();
1552 rtx_insn *seq = get_insns ();
1553 end_sequence ();
1554
1555 apply_args_value = temp;
1556
1557 /* Put the insns after the NOTE that starts the function.
1558 If this is inside a start_sequence, make the outer-level insn
1559 chain current, so the code is placed at the start of the
1560 function. If internal_arg_pointer is a non-virtual pseudo,
1561 it needs to be placed after the function that initializes
1562 that pseudo. */
1563 push_topmost_sequence ();
1564 if (REG_P (crtl->args.internal_arg_pointer)
1565 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1566 emit_insn_before (seq, parm_birth_insn);
1567 else
1568 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1569 pop_topmost_sequence ();
1570 return temp;
1571 }
1572 }
1573
1574 /* Perform an untyped call and save the state required to perform an
1575 untyped return of whatever value was returned by the given function. */
1576
1577 static rtx
1578 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1579 {
1580 int size, align, regno;
1581 machine_mode mode;
1582 rtx incoming_args, result, reg, dest, src;
1583 rtx_call_insn *call_insn;
1584 rtx old_stack_level = 0;
1585 rtx call_fusage = 0;
1586 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1587
1588 arguments = convert_memory_address (Pmode, arguments);
1589
1590 /* Create a block where the return registers can be saved. */
1591 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1592
1593 /* Fetch the arg pointer from the ARGUMENTS block. */
1594 incoming_args = gen_reg_rtx (Pmode);
1595 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1596 if (!STACK_GROWS_DOWNWARD)
1597 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1598 incoming_args, 0, OPTAB_LIB_WIDEN);
1599
1600 /* Push a new argument block and copy the arguments. Do not allow
1601 the (potential) memcpy call below to interfere with our stack
1602 manipulations. */
1603 do_pending_stack_adjust ();
1604 NO_DEFER_POP;
1605
1606 /* Save the stack with nonlocal if available. */
1607 if (targetm.have_save_stack_nonlocal ())
1608 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1609 else
1610 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1611
1612 /* Allocate a block of memory onto the stack and copy the memory
1613 arguments to the outgoing arguments address. We can pass TRUE
1614 as the 4th argument because we just saved the stack pointer
1615 and will restore it right after the call. */
1616 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1617
1618 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1619 may have already set current_function_calls_alloca to true.
1620 current_function_calls_alloca won't be set if argsize is zero,
1621 so we have to guarantee need_drap is true here. */
1622 if (SUPPORTS_STACK_ALIGNMENT)
1623 crtl->need_drap = true;
1624
1625 dest = virtual_outgoing_args_rtx;
1626 if (!STACK_GROWS_DOWNWARD)
1627 {
1628 if (CONST_INT_P (argsize))
1629 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1630 else
1631 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1632 }
1633 dest = gen_rtx_MEM (BLKmode, dest);
1634 set_mem_align (dest, PARM_BOUNDARY);
1635 src = gen_rtx_MEM (BLKmode, incoming_args);
1636 set_mem_align (src, PARM_BOUNDARY);
1637 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1638
1639 /* Refer to the argument block. */
1640 apply_args_size ();
1641 arguments = gen_rtx_MEM (BLKmode, arguments);
1642 set_mem_align (arguments, PARM_BOUNDARY);
1643
1644 /* Walk past the arg-pointer and structure value address. */
1645 size = GET_MODE_SIZE (Pmode);
1646 if (struct_value)
1647 size += GET_MODE_SIZE (Pmode);
1648
1649 /* Restore each of the registers previously saved. Make USE insns
1650 for each of these registers for use in making the call. */
1651 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1652 if ((mode = apply_args_mode[regno]) != VOIDmode)
1653 {
1654 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1655 if (size % align != 0)
1656 size = CEIL (size, align) * align;
1657 reg = gen_rtx_REG (mode, regno);
1658 emit_move_insn (reg, adjust_address (arguments, mode, size));
1659 use_reg (&call_fusage, reg);
1660 size += GET_MODE_SIZE (mode);
1661 }
1662
1663 /* Restore the structure value address unless this is passed as an
1664 "invisible" first argument. */
1665 size = GET_MODE_SIZE (Pmode);
1666 if (struct_value)
1667 {
1668 rtx value = gen_reg_rtx (Pmode);
1669 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1670 emit_move_insn (struct_value, value);
1671 if (REG_P (struct_value))
1672 use_reg (&call_fusage, struct_value);
1673 size += GET_MODE_SIZE (Pmode);
1674 }
1675
1676 /* All arguments and registers used for the call are set up by now! */
1677 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1678
1679 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1680 and we don't want to load it into a register as an optimization,
1681 because prepare_call_address already did it if it should be done. */
1682 if (GET_CODE (function) != SYMBOL_REF)
1683 function = memory_address (FUNCTION_MODE, function);
1684
1685 /* Generate the actual call instruction and save the return value. */
1686 if (targetm.have_untyped_call ())
1687 {
1688 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1689 emit_call_insn (targetm.gen_untyped_call (mem, result,
1690 result_vector (1, result)));
1691 }
1692 else if (targetm.have_call_value ())
1693 {
1694 rtx valreg = 0;
1695
1696 /* Locate the unique return register. It is not possible to
1697 express a call that sets more than one return register using
1698 call_value; use untyped_call for that. In fact, untyped_call
1699 only needs to save the return registers in the given block. */
1700 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1701 if ((mode = apply_result_mode[regno]) != VOIDmode)
1702 {
1703 gcc_assert (!valreg); /* have_untyped_call required. */
1704
1705 valreg = gen_rtx_REG (mode, regno);
1706 }
1707
1708 emit_insn (targetm.gen_call_value (valreg,
1709 gen_rtx_MEM (FUNCTION_MODE, function),
1710 const0_rtx, NULL_RTX, const0_rtx));
1711
1712 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1713 }
1714 else
1715 gcc_unreachable ();
1716
1717 /* Find the CALL insn we just emitted, and attach the register usage
1718 information. */
1719 call_insn = last_call_insn ();
1720 add_function_usage_to (call_insn, call_fusage);
1721
1722 /* Restore the stack. */
1723 if (targetm.have_save_stack_nonlocal ())
1724 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1725 else
1726 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1727 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1728
1729 OK_DEFER_POP;
1730
1731 /* Return the address of the result block. */
1732 result = copy_addr_to_reg (XEXP (result, 0));
1733 return convert_memory_address (ptr_mode, result);
1734 }
1735
1736 /* Perform an untyped return. */
1737
1738 static void
1739 expand_builtin_return (rtx result)
1740 {
1741 int size, align, regno;
1742 machine_mode mode;
1743 rtx reg;
1744 rtx_insn *call_fusage = 0;
1745
1746 result = convert_memory_address (Pmode, result);
1747
1748 apply_result_size ();
1749 result = gen_rtx_MEM (BLKmode, result);
1750
1751 if (targetm.have_untyped_return ())
1752 {
1753 rtx vector = result_vector (0, result);
1754 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1755 emit_barrier ();
1756 return;
1757 }
1758
1759 /* Restore the return value and note that each value is used. */
1760 size = 0;
1761 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1762 if ((mode = apply_result_mode[regno]) != VOIDmode)
1763 {
1764 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1765 if (size % align != 0)
1766 size = CEIL (size, align) * align;
1767 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1768 emit_move_insn (reg, adjust_address (result, mode, size));
1769
1770 push_to_sequence (call_fusage);
1771 emit_use (reg);
1772 call_fusage = get_insns ();
1773 end_sequence ();
1774 size += GET_MODE_SIZE (mode);
1775 }
1776
1777 /* Put the USE insns before the return. */
1778 emit_insn (call_fusage);
1779
1780 /* Return whatever values was restored by jumping directly to the end
1781 of the function. */
1782 expand_naked_return ();
1783 }
1784
1785 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1786
1787 static enum type_class
1788 type_to_class (tree type)
1789 {
1790 switch (TREE_CODE (type))
1791 {
1792 case VOID_TYPE: return void_type_class;
1793 case INTEGER_TYPE: return integer_type_class;
1794 case ENUMERAL_TYPE: return enumeral_type_class;
1795 case BOOLEAN_TYPE: return boolean_type_class;
1796 case POINTER_TYPE: return pointer_type_class;
1797 case REFERENCE_TYPE: return reference_type_class;
1798 case OFFSET_TYPE: return offset_type_class;
1799 case REAL_TYPE: return real_type_class;
1800 case COMPLEX_TYPE: return complex_type_class;
1801 case FUNCTION_TYPE: return function_type_class;
1802 case METHOD_TYPE: return method_type_class;
1803 case RECORD_TYPE: return record_type_class;
1804 case UNION_TYPE:
1805 case QUAL_UNION_TYPE: return union_type_class;
1806 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1807 ? string_type_class : array_type_class);
1808 case LANG_TYPE: return lang_type_class;
1809 default: return no_type_class;
1810 }
1811 }
1812
1813 /* Expand a call EXP to __builtin_classify_type. */
1814
1815 static rtx
1816 expand_builtin_classify_type (tree exp)
1817 {
1818 if (call_expr_nargs (exp))
1819 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1820 return GEN_INT (no_type_class);
1821 }
1822
1823 /* This helper macro, meant to be used in mathfn_built_in below,
1824 determines which among a set of three builtin math functions is
1825 appropriate for a given type mode. The `F' and `L' cases are
1826 automatically generated from the `double' case. */
1827 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1828 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1829 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1830 fcodel = BUILT_IN_MATHFN##L ; break;
1831 /* Similar to above, but appends _R after any F/L suffix. */
1832 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1833 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1834 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1835 fcodel = BUILT_IN_MATHFN##L_R ; break;
1836
1837 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1838 if available. If IMPLICIT is true use the implicit builtin declaration,
1839 otherwise use the explicit declaration. If we can't do the conversion,
1840 return zero. */
1841
1842 static tree
1843 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1844 {
1845 enum built_in_function fcode, fcodef, fcodel, fcode2;
1846
1847 switch (fn)
1848 {
1849 CASE_MATHFN (BUILT_IN_ACOS)
1850 CASE_MATHFN (BUILT_IN_ACOSH)
1851 CASE_MATHFN (BUILT_IN_ASIN)
1852 CASE_MATHFN (BUILT_IN_ASINH)
1853 CASE_MATHFN (BUILT_IN_ATAN)
1854 CASE_MATHFN (BUILT_IN_ATAN2)
1855 CASE_MATHFN (BUILT_IN_ATANH)
1856 CASE_MATHFN (BUILT_IN_CBRT)
1857 CASE_MATHFN (BUILT_IN_CEIL)
1858 CASE_MATHFN (BUILT_IN_CEXPI)
1859 CASE_MATHFN (BUILT_IN_COPYSIGN)
1860 CASE_MATHFN (BUILT_IN_COS)
1861 CASE_MATHFN (BUILT_IN_COSH)
1862 CASE_MATHFN (BUILT_IN_DREM)
1863 CASE_MATHFN (BUILT_IN_ERF)
1864 CASE_MATHFN (BUILT_IN_ERFC)
1865 CASE_MATHFN (BUILT_IN_EXP)
1866 CASE_MATHFN (BUILT_IN_EXP10)
1867 CASE_MATHFN (BUILT_IN_EXP2)
1868 CASE_MATHFN (BUILT_IN_EXPM1)
1869 CASE_MATHFN (BUILT_IN_FABS)
1870 CASE_MATHFN (BUILT_IN_FDIM)
1871 CASE_MATHFN (BUILT_IN_FLOOR)
1872 CASE_MATHFN (BUILT_IN_FMA)
1873 CASE_MATHFN (BUILT_IN_FMAX)
1874 CASE_MATHFN (BUILT_IN_FMIN)
1875 CASE_MATHFN (BUILT_IN_FMOD)
1876 CASE_MATHFN (BUILT_IN_FREXP)
1877 CASE_MATHFN (BUILT_IN_GAMMA)
1878 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1879 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1880 CASE_MATHFN (BUILT_IN_HYPOT)
1881 CASE_MATHFN (BUILT_IN_ILOGB)
1882 CASE_MATHFN (BUILT_IN_ICEIL)
1883 CASE_MATHFN (BUILT_IN_IFLOOR)
1884 CASE_MATHFN (BUILT_IN_INF)
1885 CASE_MATHFN (BUILT_IN_IRINT)
1886 CASE_MATHFN (BUILT_IN_IROUND)
1887 CASE_MATHFN (BUILT_IN_ISINF)
1888 CASE_MATHFN (BUILT_IN_J0)
1889 CASE_MATHFN (BUILT_IN_J1)
1890 CASE_MATHFN (BUILT_IN_JN)
1891 CASE_MATHFN (BUILT_IN_LCEIL)
1892 CASE_MATHFN (BUILT_IN_LDEXP)
1893 CASE_MATHFN (BUILT_IN_LFLOOR)
1894 CASE_MATHFN (BUILT_IN_LGAMMA)
1895 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1896 CASE_MATHFN (BUILT_IN_LLCEIL)
1897 CASE_MATHFN (BUILT_IN_LLFLOOR)
1898 CASE_MATHFN (BUILT_IN_LLRINT)
1899 CASE_MATHFN (BUILT_IN_LLROUND)
1900 CASE_MATHFN (BUILT_IN_LOG)
1901 CASE_MATHFN (BUILT_IN_LOG10)
1902 CASE_MATHFN (BUILT_IN_LOG1P)
1903 CASE_MATHFN (BUILT_IN_LOG2)
1904 CASE_MATHFN (BUILT_IN_LOGB)
1905 CASE_MATHFN (BUILT_IN_LRINT)
1906 CASE_MATHFN (BUILT_IN_LROUND)
1907 CASE_MATHFN (BUILT_IN_MODF)
1908 CASE_MATHFN (BUILT_IN_NAN)
1909 CASE_MATHFN (BUILT_IN_NANS)
1910 CASE_MATHFN (BUILT_IN_NEARBYINT)
1911 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1912 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1913 CASE_MATHFN (BUILT_IN_POW)
1914 CASE_MATHFN (BUILT_IN_POWI)
1915 CASE_MATHFN (BUILT_IN_POW10)
1916 CASE_MATHFN (BUILT_IN_REMAINDER)
1917 CASE_MATHFN (BUILT_IN_REMQUO)
1918 CASE_MATHFN (BUILT_IN_RINT)
1919 CASE_MATHFN (BUILT_IN_ROUND)
1920 CASE_MATHFN (BUILT_IN_SCALB)
1921 CASE_MATHFN (BUILT_IN_SCALBLN)
1922 CASE_MATHFN (BUILT_IN_SCALBN)
1923 CASE_MATHFN (BUILT_IN_SIGNBIT)
1924 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1925 CASE_MATHFN (BUILT_IN_SIN)
1926 CASE_MATHFN (BUILT_IN_SINCOS)
1927 CASE_MATHFN (BUILT_IN_SINH)
1928 CASE_MATHFN (BUILT_IN_SQRT)
1929 CASE_MATHFN (BUILT_IN_TAN)
1930 CASE_MATHFN (BUILT_IN_TANH)
1931 CASE_MATHFN (BUILT_IN_TGAMMA)
1932 CASE_MATHFN (BUILT_IN_TRUNC)
1933 CASE_MATHFN (BUILT_IN_Y0)
1934 CASE_MATHFN (BUILT_IN_Y1)
1935 CASE_MATHFN (BUILT_IN_YN)
1936
1937 default:
1938 return NULL_TREE;
1939 }
1940
1941 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1942 fcode2 = fcode;
1943 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1944 fcode2 = fcodef;
1945 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1946 fcode2 = fcodel;
1947 else
1948 return NULL_TREE;
1949
1950 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1951 return NULL_TREE;
1952
1953 return builtin_decl_explicit (fcode2);
1954 }
1955
1956 /* Like mathfn_built_in_1(), but always use the implicit array. */
1957
1958 tree
1959 mathfn_built_in (tree type, enum built_in_function fn)
1960 {
1961 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1962 }
1963
1964 /* If errno must be maintained, expand the RTL to check if the result,
1965 TARGET, of a built-in function call, EXP, is NaN, and if so set
1966 errno to EDOM. */
1967
1968 static void
1969 expand_errno_check (tree exp, rtx target)
1970 {
1971 rtx_code_label *lab = gen_label_rtx ();
1972
1973 /* Test the result; if it is NaN, set errno=EDOM because
1974 the argument was not in the domain. */
1975 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1976 NULL_RTX, NULL, lab,
1977 /* The jump is very likely. */
1978 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1979
1980 #ifdef TARGET_EDOM
1981 /* If this built-in doesn't throw an exception, set errno directly. */
1982 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1983 {
1984 #ifdef GEN_ERRNO_RTX
1985 rtx errno_rtx = GEN_ERRNO_RTX;
1986 #else
1987 rtx errno_rtx
1988 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1989 #endif
1990 emit_move_insn (errno_rtx,
1991 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1992 emit_label (lab);
1993 return;
1994 }
1995 #endif
1996
1997 /* Make sure the library call isn't expanded as a tail call. */
1998 CALL_EXPR_TAILCALL (exp) = 0;
1999
2000 /* We can't set errno=EDOM directly; let the library call do it.
2001 Pop the arguments right away in case the call gets deleted. */
2002 NO_DEFER_POP;
2003 expand_call (exp, target, 0);
2004 OK_DEFER_POP;
2005 emit_label (lab);
2006 }
2007
2008 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2009 Return NULL_RTX if a normal call should be emitted rather than expanding
2010 the function in-line. EXP is the expression that is a call to the builtin
2011 function; if convenient, the result should be placed in TARGET.
2012 SUBTARGET may be used as the target for computing one of EXP's operands. */
2013
2014 static rtx
2015 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2016 {
2017 optab builtin_optab;
2018 rtx op0;
2019 rtx_insn *insns;
2020 tree fndecl = get_callee_fndecl (exp);
2021 machine_mode mode;
2022 bool errno_set = false;
2023 bool try_widening = false;
2024 tree arg;
2025
2026 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2027 return NULL_RTX;
2028
2029 arg = CALL_EXPR_ARG (exp, 0);
2030
2031 switch (DECL_FUNCTION_CODE (fndecl))
2032 {
2033 CASE_FLT_FN (BUILT_IN_SQRT):
2034 errno_set = ! tree_expr_nonnegative_p (arg);
2035 try_widening = true;
2036 builtin_optab = sqrt_optab;
2037 break;
2038 CASE_FLT_FN (BUILT_IN_EXP):
2039 errno_set = true; builtin_optab = exp_optab; break;
2040 CASE_FLT_FN (BUILT_IN_EXP10):
2041 CASE_FLT_FN (BUILT_IN_POW10):
2042 errno_set = true; builtin_optab = exp10_optab; break;
2043 CASE_FLT_FN (BUILT_IN_EXP2):
2044 errno_set = true; builtin_optab = exp2_optab; break;
2045 CASE_FLT_FN (BUILT_IN_EXPM1):
2046 errno_set = true; builtin_optab = expm1_optab; break;
2047 CASE_FLT_FN (BUILT_IN_LOGB):
2048 errno_set = true; builtin_optab = logb_optab; break;
2049 CASE_FLT_FN (BUILT_IN_LOG):
2050 errno_set = true; builtin_optab = log_optab; break;
2051 CASE_FLT_FN (BUILT_IN_LOG10):
2052 errno_set = true; builtin_optab = log10_optab; break;
2053 CASE_FLT_FN (BUILT_IN_LOG2):
2054 errno_set = true; builtin_optab = log2_optab; break;
2055 CASE_FLT_FN (BUILT_IN_LOG1P):
2056 errno_set = true; builtin_optab = log1p_optab; break;
2057 CASE_FLT_FN (BUILT_IN_ASIN):
2058 builtin_optab = asin_optab; break;
2059 CASE_FLT_FN (BUILT_IN_ACOS):
2060 builtin_optab = acos_optab; break;
2061 CASE_FLT_FN (BUILT_IN_TAN):
2062 builtin_optab = tan_optab; break;
2063 CASE_FLT_FN (BUILT_IN_ATAN):
2064 builtin_optab = atan_optab; break;
2065 CASE_FLT_FN (BUILT_IN_FLOOR):
2066 builtin_optab = floor_optab; break;
2067 CASE_FLT_FN (BUILT_IN_CEIL):
2068 builtin_optab = ceil_optab; break;
2069 CASE_FLT_FN (BUILT_IN_TRUNC):
2070 builtin_optab = btrunc_optab; break;
2071 CASE_FLT_FN (BUILT_IN_ROUND):
2072 builtin_optab = round_optab; break;
2073 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2074 builtin_optab = nearbyint_optab;
2075 if (flag_trapping_math)
2076 break;
2077 /* Else fallthrough and expand as rint. */
2078 CASE_FLT_FN (BUILT_IN_RINT):
2079 builtin_optab = rint_optab; break;
2080 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2081 builtin_optab = significand_optab; break;
2082 default:
2083 gcc_unreachable ();
2084 }
2085
2086 /* Make a suitable register to place result in. */
2087 mode = TYPE_MODE (TREE_TYPE (exp));
2088
2089 if (! flag_errno_math || ! HONOR_NANS (mode))
2090 errno_set = false;
2091
2092 /* Before working hard, check whether the instruction is available, but try
2093 to widen the mode for specific operations. */
2094 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2095 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2096 && (!errno_set || !optimize_insn_for_size_p ()))
2097 {
2098 rtx result = gen_reg_rtx (mode);
2099
2100 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2101 need to expand the argument again. This way, we will not perform
2102 side-effects more the once. */
2103 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2104
2105 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2106
2107 start_sequence ();
2108
2109 /* Compute into RESULT.
2110 Set RESULT to wherever the result comes back. */
2111 result = expand_unop (mode, builtin_optab, op0, result, 0);
2112
2113 if (result != 0)
2114 {
2115 if (errno_set)
2116 expand_errno_check (exp, result);
2117
2118 /* Output the entire sequence. */
2119 insns = get_insns ();
2120 end_sequence ();
2121 emit_insn (insns);
2122 return result;
2123 }
2124
2125 /* If we were unable to expand via the builtin, stop the sequence
2126 (without outputting the insns) and call to the library function
2127 with the stabilized argument list. */
2128 end_sequence ();
2129 }
2130
2131 return expand_call (exp, target, target == const0_rtx);
2132 }
2133
2134 /* Expand a call to the builtin binary math functions (pow and atan2).
2135 Return NULL_RTX if a normal call should be emitted rather than expanding the
2136 function in-line. EXP is the expression that is a call to the builtin
2137 function; if convenient, the result should be placed in TARGET.
2138 SUBTARGET may be used as the target for computing one of EXP's
2139 operands. */
2140
2141 static rtx
2142 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2143 {
2144 optab builtin_optab;
2145 rtx op0, op1, result;
2146 rtx_insn *insns;
2147 int op1_type = REAL_TYPE;
2148 tree fndecl = get_callee_fndecl (exp);
2149 tree arg0, arg1;
2150 machine_mode mode;
2151 bool errno_set = true;
2152
2153 switch (DECL_FUNCTION_CODE (fndecl))
2154 {
2155 CASE_FLT_FN (BUILT_IN_SCALBN):
2156 CASE_FLT_FN (BUILT_IN_SCALBLN):
2157 CASE_FLT_FN (BUILT_IN_LDEXP):
2158 op1_type = INTEGER_TYPE;
2159 default:
2160 break;
2161 }
2162
2163 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2164 return NULL_RTX;
2165
2166 arg0 = CALL_EXPR_ARG (exp, 0);
2167 arg1 = CALL_EXPR_ARG (exp, 1);
2168
2169 switch (DECL_FUNCTION_CODE (fndecl))
2170 {
2171 CASE_FLT_FN (BUILT_IN_POW):
2172 builtin_optab = pow_optab; break;
2173 CASE_FLT_FN (BUILT_IN_ATAN2):
2174 builtin_optab = atan2_optab; break;
2175 CASE_FLT_FN (BUILT_IN_SCALB):
2176 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2177 return 0;
2178 builtin_optab = scalb_optab; break;
2179 CASE_FLT_FN (BUILT_IN_SCALBN):
2180 CASE_FLT_FN (BUILT_IN_SCALBLN):
2181 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2182 return 0;
2183 /* Fall through... */
2184 CASE_FLT_FN (BUILT_IN_LDEXP):
2185 builtin_optab = ldexp_optab; break;
2186 CASE_FLT_FN (BUILT_IN_FMOD):
2187 builtin_optab = fmod_optab; break;
2188 CASE_FLT_FN (BUILT_IN_REMAINDER):
2189 CASE_FLT_FN (BUILT_IN_DREM):
2190 builtin_optab = remainder_optab; break;
2191 default:
2192 gcc_unreachable ();
2193 }
2194
2195 /* Make a suitable register to place result in. */
2196 mode = TYPE_MODE (TREE_TYPE (exp));
2197
2198 /* Before working hard, check whether the instruction is available. */
2199 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2200 return NULL_RTX;
2201
2202 result = gen_reg_rtx (mode);
2203
2204 if (! flag_errno_math || ! HONOR_NANS (mode))
2205 errno_set = false;
2206
2207 if (errno_set && optimize_insn_for_size_p ())
2208 return 0;
2209
2210 /* Always stabilize the argument list. */
2211 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2212 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2213
2214 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2215 op1 = expand_normal (arg1);
2216
2217 start_sequence ();
2218
2219 /* Compute into RESULT.
2220 Set RESULT to wherever the result comes back. */
2221 result = expand_binop (mode, builtin_optab, op0, op1,
2222 result, 0, OPTAB_DIRECT);
2223
2224 /* If we were unable to expand via the builtin, stop the sequence
2225 (without outputting the insns) and call to the library function
2226 with the stabilized argument list. */
2227 if (result == 0)
2228 {
2229 end_sequence ();
2230 return expand_call (exp, target, target == const0_rtx);
2231 }
2232
2233 if (errno_set)
2234 expand_errno_check (exp, result);
2235
2236 /* Output the entire sequence. */
2237 insns = get_insns ();
2238 end_sequence ();
2239 emit_insn (insns);
2240
2241 return result;
2242 }
2243
2244 /* Expand a call to the builtin trinary math functions (fma).
2245 Return NULL_RTX if a normal call should be emitted rather than expanding the
2246 function in-line. EXP is the expression that is a call to the builtin
2247 function; if convenient, the result should be placed in TARGET.
2248 SUBTARGET may be used as the target for computing one of EXP's
2249 operands. */
2250
2251 static rtx
2252 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2253 {
2254 optab builtin_optab;
2255 rtx op0, op1, op2, result;
2256 rtx_insn *insns;
2257 tree fndecl = get_callee_fndecl (exp);
2258 tree arg0, arg1, arg2;
2259 machine_mode mode;
2260
2261 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2262 return NULL_RTX;
2263
2264 arg0 = CALL_EXPR_ARG (exp, 0);
2265 arg1 = CALL_EXPR_ARG (exp, 1);
2266 arg2 = CALL_EXPR_ARG (exp, 2);
2267
2268 switch (DECL_FUNCTION_CODE (fndecl))
2269 {
2270 CASE_FLT_FN (BUILT_IN_FMA):
2271 builtin_optab = fma_optab; break;
2272 default:
2273 gcc_unreachable ();
2274 }
2275
2276 /* Make a suitable register to place result in. */
2277 mode = TYPE_MODE (TREE_TYPE (exp));
2278
2279 /* Before working hard, check whether the instruction is available. */
2280 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2281 return NULL_RTX;
2282
2283 result = gen_reg_rtx (mode);
2284
2285 /* Always stabilize the argument list. */
2286 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2287 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2288 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2289
2290 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2291 op1 = expand_normal (arg1);
2292 op2 = expand_normal (arg2);
2293
2294 start_sequence ();
2295
2296 /* Compute into RESULT.
2297 Set RESULT to wherever the result comes back. */
2298 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2299 result, 0);
2300
2301 /* If we were unable to expand via the builtin, stop the sequence
2302 (without outputting the insns) and call to the library function
2303 with the stabilized argument list. */
2304 if (result == 0)
2305 {
2306 end_sequence ();
2307 return expand_call (exp, target, target == const0_rtx);
2308 }
2309
2310 /* Output the entire sequence. */
2311 insns = get_insns ();
2312 end_sequence ();
2313 emit_insn (insns);
2314
2315 return result;
2316 }
2317
2318 /* Expand a call to the builtin sin and cos math functions.
2319 Return NULL_RTX if a normal call should be emitted rather than expanding the
2320 function in-line. EXP is the expression that is a call to the builtin
2321 function; if convenient, the result should be placed in TARGET.
2322 SUBTARGET may be used as the target for computing one of EXP's
2323 operands. */
2324
2325 static rtx
2326 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2327 {
2328 optab builtin_optab;
2329 rtx op0;
2330 rtx_insn *insns;
2331 tree fndecl = get_callee_fndecl (exp);
2332 machine_mode mode;
2333 tree arg;
2334
2335 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2336 return NULL_RTX;
2337
2338 arg = CALL_EXPR_ARG (exp, 0);
2339
2340 switch (DECL_FUNCTION_CODE (fndecl))
2341 {
2342 CASE_FLT_FN (BUILT_IN_SIN):
2343 CASE_FLT_FN (BUILT_IN_COS):
2344 builtin_optab = sincos_optab; break;
2345 default:
2346 gcc_unreachable ();
2347 }
2348
2349 /* Make a suitable register to place result in. */
2350 mode = TYPE_MODE (TREE_TYPE (exp));
2351
2352 /* Check if sincos insn is available, otherwise fallback
2353 to sin or cos insn. */
2354 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2355 switch (DECL_FUNCTION_CODE (fndecl))
2356 {
2357 CASE_FLT_FN (BUILT_IN_SIN):
2358 builtin_optab = sin_optab; break;
2359 CASE_FLT_FN (BUILT_IN_COS):
2360 builtin_optab = cos_optab; break;
2361 default:
2362 gcc_unreachable ();
2363 }
2364
2365 /* Before working hard, check whether the instruction is available. */
2366 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2367 {
2368 rtx result = gen_reg_rtx (mode);
2369
2370 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2371 need to expand the argument again. This way, we will not perform
2372 side-effects more the once. */
2373 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2374
2375 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2376
2377 start_sequence ();
2378
2379 /* Compute into RESULT.
2380 Set RESULT to wherever the result comes back. */
2381 if (builtin_optab == sincos_optab)
2382 {
2383 int ok;
2384
2385 switch (DECL_FUNCTION_CODE (fndecl))
2386 {
2387 CASE_FLT_FN (BUILT_IN_SIN):
2388 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2389 break;
2390 CASE_FLT_FN (BUILT_IN_COS):
2391 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2392 break;
2393 default:
2394 gcc_unreachable ();
2395 }
2396 gcc_assert (ok);
2397 }
2398 else
2399 result = expand_unop (mode, builtin_optab, op0, result, 0);
2400
2401 if (result != 0)
2402 {
2403 /* Output the entire sequence. */
2404 insns = get_insns ();
2405 end_sequence ();
2406 emit_insn (insns);
2407 return result;
2408 }
2409
2410 /* If we were unable to expand via the builtin, stop the sequence
2411 (without outputting the insns) and call to the library function
2412 with the stabilized argument list. */
2413 end_sequence ();
2414 }
2415
2416 return expand_call (exp, target, target == const0_rtx);
2417 }
2418
2419 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2420 return an RTL instruction code that implements the functionality.
2421 If that isn't possible or available return CODE_FOR_nothing. */
2422
2423 static enum insn_code
2424 interclass_mathfn_icode (tree arg, tree fndecl)
2425 {
2426 bool errno_set = false;
2427 optab builtin_optab = unknown_optab;
2428 machine_mode mode;
2429
2430 switch (DECL_FUNCTION_CODE (fndecl))
2431 {
2432 CASE_FLT_FN (BUILT_IN_ILOGB):
2433 errno_set = true; builtin_optab = ilogb_optab; break;
2434 CASE_FLT_FN (BUILT_IN_ISINF):
2435 builtin_optab = isinf_optab; break;
2436 case BUILT_IN_ISNORMAL:
2437 case BUILT_IN_ISFINITE:
2438 CASE_FLT_FN (BUILT_IN_FINITE):
2439 case BUILT_IN_FINITED32:
2440 case BUILT_IN_FINITED64:
2441 case BUILT_IN_FINITED128:
2442 case BUILT_IN_ISINFD32:
2443 case BUILT_IN_ISINFD64:
2444 case BUILT_IN_ISINFD128:
2445 /* These builtins have no optabs (yet). */
2446 break;
2447 default:
2448 gcc_unreachable ();
2449 }
2450
2451 /* There's no easy way to detect the case we need to set EDOM. */
2452 if (flag_errno_math && errno_set)
2453 return CODE_FOR_nothing;
2454
2455 /* Optab mode depends on the mode of the input argument. */
2456 mode = TYPE_MODE (TREE_TYPE (arg));
2457
2458 if (builtin_optab)
2459 return optab_handler (builtin_optab, mode);
2460 return CODE_FOR_nothing;
2461 }
2462
2463 /* Expand a call to one of the builtin math functions that operate on
2464 floating point argument and output an integer result (ilogb, isinf,
2465 isnan, etc).
2466 Return 0 if a normal call should be emitted rather than expanding the
2467 function in-line. EXP is the expression that is a call to the builtin
2468 function; if convenient, the result should be placed in TARGET. */
2469
2470 static rtx
2471 expand_builtin_interclass_mathfn (tree exp, rtx target)
2472 {
2473 enum insn_code icode = CODE_FOR_nothing;
2474 rtx op0;
2475 tree fndecl = get_callee_fndecl (exp);
2476 machine_mode mode;
2477 tree arg;
2478
2479 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2480 return NULL_RTX;
2481
2482 arg = CALL_EXPR_ARG (exp, 0);
2483 icode = interclass_mathfn_icode (arg, fndecl);
2484 mode = TYPE_MODE (TREE_TYPE (arg));
2485
2486 if (icode != CODE_FOR_nothing)
2487 {
2488 struct expand_operand ops[1];
2489 rtx_insn *last = get_last_insn ();
2490 tree orig_arg = arg;
2491
2492 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2493 need to expand the argument again. This way, we will not perform
2494 side-effects more the once. */
2495 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2496
2497 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2498
2499 if (mode != GET_MODE (op0))
2500 op0 = convert_to_mode (mode, op0, 0);
2501
2502 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2503 if (maybe_legitimize_operands (icode, 0, 1, ops)
2504 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2505 return ops[0].value;
2506
2507 delete_insns_since (last);
2508 CALL_EXPR_ARG (exp, 0) = orig_arg;
2509 }
2510
2511 return NULL_RTX;
2512 }
2513
2514 /* Expand a call to the builtin sincos math function.
2515 Return NULL_RTX if a normal call should be emitted rather than expanding the
2516 function in-line. EXP is the expression that is a call to the builtin
2517 function. */
2518
2519 static rtx
2520 expand_builtin_sincos (tree exp)
2521 {
2522 rtx op0, op1, op2, target1, target2;
2523 machine_mode mode;
2524 tree arg, sinp, cosp;
2525 int result;
2526 location_t loc = EXPR_LOCATION (exp);
2527 tree alias_type, alias_off;
2528
2529 if (!validate_arglist (exp, REAL_TYPE,
2530 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2531 return NULL_RTX;
2532
2533 arg = CALL_EXPR_ARG (exp, 0);
2534 sinp = CALL_EXPR_ARG (exp, 1);
2535 cosp = CALL_EXPR_ARG (exp, 2);
2536
2537 /* Make a suitable register to place result in. */
2538 mode = TYPE_MODE (TREE_TYPE (arg));
2539
2540 /* Check if sincos insn is available, otherwise emit the call. */
2541 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2542 return NULL_RTX;
2543
2544 target1 = gen_reg_rtx (mode);
2545 target2 = gen_reg_rtx (mode);
2546
2547 op0 = expand_normal (arg);
2548 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2549 alias_off = build_int_cst (alias_type, 0);
2550 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2551 sinp, alias_off));
2552 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2553 cosp, alias_off));
2554
2555 /* Compute into target1 and target2.
2556 Set TARGET to wherever the result comes back. */
2557 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2558 gcc_assert (result);
2559
2560 /* Move target1 and target2 to the memory locations indicated
2561 by op1 and op2. */
2562 emit_move_insn (op1, target1);
2563 emit_move_insn (op2, target2);
2564
2565 return const0_rtx;
2566 }
2567
2568 /* Expand a call to the internal cexpi builtin to the sincos math function.
2569 EXP is the expression that is a call to the builtin function; if convenient,
2570 the result should be placed in TARGET. */
2571
2572 static rtx
2573 expand_builtin_cexpi (tree exp, rtx target)
2574 {
2575 tree fndecl = get_callee_fndecl (exp);
2576 tree arg, type;
2577 machine_mode mode;
2578 rtx op0, op1, op2;
2579 location_t loc = EXPR_LOCATION (exp);
2580
2581 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2582 return NULL_RTX;
2583
2584 arg = CALL_EXPR_ARG (exp, 0);
2585 type = TREE_TYPE (arg);
2586 mode = TYPE_MODE (TREE_TYPE (arg));
2587
2588 /* Try expanding via a sincos optab, fall back to emitting a libcall
2589 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2590 is only generated from sincos, cexp or if we have either of them. */
2591 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2592 {
2593 op1 = gen_reg_rtx (mode);
2594 op2 = gen_reg_rtx (mode);
2595
2596 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2597
2598 /* Compute into op1 and op2. */
2599 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2600 }
2601 else if (targetm.libc_has_function (function_sincos))
2602 {
2603 tree call, fn = NULL_TREE;
2604 tree top1, top2;
2605 rtx op1a, op2a;
2606
2607 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2608 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2609 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2610 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2611 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2612 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2613 else
2614 gcc_unreachable ();
2615
2616 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2617 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2618 op1a = copy_addr_to_reg (XEXP (op1, 0));
2619 op2a = copy_addr_to_reg (XEXP (op2, 0));
2620 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2621 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2622
2623 /* Make sure not to fold the sincos call again. */
2624 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2625 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2626 call, 3, arg, top1, top2));
2627 }
2628 else
2629 {
2630 tree call, fn = NULL_TREE, narg;
2631 tree ctype = build_complex_type (type);
2632
2633 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2634 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2635 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2636 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2637 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2638 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2639 else
2640 gcc_unreachable ();
2641
2642 /* If we don't have a decl for cexp create one. This is the
2643 friendliest fallback if the user calls __builtin_cexpi
2644 without full target C99 function support. */
2645 if (fn == NULL_TREE)
2646 {
2647 tree fntype;
2648 const char *name = NULL;
2649
2650 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2651 name = "cexpf";
2652 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2653 name = "cexp";
2654 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2655 name = "cexpl";
2656
2657 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2658 fn = build_fn_decl (name, fntype);
2659 }
2660
2661 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2662 build_real (type, dconst0), arg);
2663
2664 /* Make sure not to fold the cexp call again. */
2665 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2666 return expand_expr (build_call_nary (ctype, call, 1, narg),
2667 target, VOIDmode, EXPAND_NORMAL);
2668 }
2669
2670 /* Now build the proper return type. */
2671 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2672 make_tree (TREE_TYPE (arg), op2),
2673 make_tree (TREE_TYPE (arg), op1)),
2674 target, VOIDmode, EXPAND_NORMAL);
2675 }
2676
2677 /* Conveniently construct a function call expression. FNDECL names the
2678 function to be called, N is the number of arguments, and the "..."
2679 parameters are the argument expressions. Unlike build_call_exr
2680 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2681
2682 static tree
2683 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2684 {
2685 va_list ap;
2686 tree fntype = TREE_TYPE (fndecl);
2687 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2688
2689 va_start (ap, n);
2690 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2691 va_end (ap);
2692 SET_EXPR_LOCATION (fn, loc);
2693 return fn;
2694 }
2695
2696 /* Expand a call to one of the builtin rounding functions gcc defines
2697 as an extension (lfloor and lceil). As these are gcc extensions we
2698 do not need to worry about setting errno to EDOM.
2699 If expanding via optab fails, lower expression to (int)(floor(x)).
2700 EXP is the expression that is a call to the builtin function;
2701 if convenient, the result should be placed in TARGET. */
2702
2703 static rtx
2704 expand_builtin_int_roundingfn (tree exp, rtx target)
2705 {
2706 convert_optab builtin_optab;
2707 rtx op0, tmp;
2708 rtx_insn *insns;
2709 tree fndecl = get_callee_fndecl (exp);
2710 enum built_in_function fallback_fn;
2711 tree fallback_fndecl;
2712 machine_mode mode;
2713 tree arg;
2714
2715 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2716 gcc_unreachable ();
2717
2718 arg = CALL_EXPR_ARG (exp, 0);
2719
2720 switch (DECL_FUNCTION_CODE (fndecl))
2721 {
2722 CASE_FLT_FN (BUILT_IN_ICEIL):
2723 CASE_FLT_FN (BUILT_IN_LCEIL):
2724 CASE_FLT_FN (BUILT_IN_LLCEIL):
2725 builtin_optab = lceil_optab;
2726 fallback_fn = BUILT_IN_CEIL;
2727 break;
2728
2729 CASE_FLT_FN (BUILT_IN_IFLOOR):
2730 CASE_FLT_FN (BUILT_IN_LFLOOR):
2731 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2732 builtin_optab = lfloor_optab;
2733 fallback_fn = BUILT_IN_FLOOR;
2734 break;
2735
2736 default:
2737 gcc_unreachable ();
2738 }
2739
2740 /* Make a suitable register to place result in. */
2741 mode = TYPE_MODE (TREE_TYPE (exp));
2742
2743 target = gen_reg_rtx (mode);
2744
2745 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2746 need to expand the argument again. This way, we will not perform
2747 side-effects more the once. */
2748 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2749
2750 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2751
2752 start_sequence ();
2753
2754 /* Compute into TARGET. */
2755 if (expand_sfix_optab (target, op0, builtin_optab))
2756 {
2757 /* Output the entire sequence. */
2758 insns = get_insns ();
2759 end_sequence ();
2760 emit_insn (insns);
2761 return target;
2762 }
2763
2764 /* If we were unable to expand via the builtin, stop the sequence
2765 (without outputting the insns). */
2766 end_sequence ();
2767
2768 /* Fall back to floating point rounding optab. */
2769 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2770
2771 /* For non-C99 targets we may end up without a fallback fndecl here
2772 if the user called __builtin_lfloor directly. In this case emit
2773 a call to the floor/ceil variants nevertheless. This should result
2774 in the best user experience for not full C99 targets. */
2775 if (fallback_fndecl == NULL_TREE)
2776 {
2777 tree fntype;
2778 const char *name = NULL;
2779
2780 switch (DECL_FUNCTION_CODE (fndecl))
2781 {
2782 case BUILT_IN_ICEIL:
2783 case BUILT_IN_LCEIL:
2784 case BUILT_IN_LLCEIL:
2785 name = "ceil";
2786 break;
2787 case BUILT_IN_ICEILF:
2788 case BUILT_IN_LCEILF:
2789 case BUILT_IN_LLCEILF:
2790 name = "ceilf";
2791 break;
2792 case BUILT_IN_ICEILL:
2793 case BUILT_IN_LCEILL:
2794 case BUILT_IN_LLCEILL:
2795 name = "ceill";
2796 break;
2797 case BUILT_IN_IFLOOR:
2798 case BUILT_IN_LFLOOR:
2799 case BUILT_IN_LLFLOOR:
2800 name = "floor";
2801 break;
2802 case BUILT_IN_IFLOORF:
2803 case BUILT_IN_LFLOORF:
2804 case BUILT_IN_LLFLOORF:
2805 name = "floorf";
2806 break;
2807 case BUILT_IN_IFLOORL:
2808 case BUILT_IN_LFLOORL:
2809 case BUILT_IN_LLFLOORL:
2810 name = "floorl";
2811 break;
2812 default:
2813 gcc_unreachable ();
2814 }
2815
2816 fntype = build_function_type_list (TREE_TYPE (arg),
2817 TREE_TYPE (arg), NULL_TREE);
2818 fallback_fndecl = build_fn_decl (name, fntype);
2819 }
2820
2821 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2822
2823 tmp = expand_normal (exp);
2824 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2825
2826 /* Truncate the result of floating point optab to integer
2827 via expand_fix (). */
2828 target = gen_reg_rtx (mode);
2829 expand_fix (target, tmp, 0);
2830
2831 return target;
2832 }
2833
2834 /* Expand a call to one of the builtin math functions doing integer
2835 conversion (lrint).
2836 Return 0 if a normal call should be emitted rather than expanding the
2837 function in-line. EXP is the expression that is a call to the builtin
2838 function; if convenient, the result should be placed in TARGET. */
2839
2840 static rtx
2841 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2842 {
2843 convert_optab builtin_optab;
2844 rtx op0;
2845 rtx_insn *insns;
2846 tree fndecl = get_callee_fndecl (exp);
2847 tree arg;
2848 machine_mode mode;
2849 enum built_in_function fallback_fn = BUILT_IN_NONE;
2850
2851 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2852 gcc_unreachable ();
2853
2854 arg = CALL_EXPR_ARG (exp, 0);
2855
2856 switch (DECL_FUNCTION_CODE (fndecl))
2857 {
2858 CASE_FLT_FN (BUILT_IN_IRINT):
2859 fallback_fn = BUILT_IN_LRINT;
2860 /* FALLTHRU */
2861 CASE_FLT_FN (BUILT_IN_LRINT):
2862 CASE_FLT_FN (BUILT_IN_LLRINT):
2863 builtin_optab = lrint_optab;
2864 break;
2865
2866 CASE_FLT_FN (BUILT_IN_IROUND):
2867 fallback_fn = BUILT_IN_LROUND;
2868 /* FALLTHRU */
2869 CASE_FLT_FN (BUILT_IN_LROUND):
2870 CASE_FLT_FN (BUILT_IN_LLROUND):
2871 builtin_optab = lround_optab;
2872 break;
2873
2874 default:
2875 gcc_unreachable ();
2876 }
2877
2878 /* There's no easy way to detect the case we need to set EDOM. */
2879 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2880 return NULL_RTX;
2881
2882 /* Make a suitable register to place result in. */
2883 mode = TYPE_MODE (TREE_TYPE (exp));
2884
2885 /* There's no easy way to detect the case we need to set EDOM. */
2886 if (!flag_errno_math)
2887 {
2888 rtx result = gen_reg_rtx (mode);
2889
2890 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2891 need to expand the argument again. This way, we will not perform
2892 side-effects more the once. */
2893 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2894
2895 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2896
2897 start_sequence ();
2898
2899 if (expand_sfix_optab (result, op0, builtin_optab))
2900 {
2901 /* Output the entire sequence. */
2902 insns = get_insns ();
2903 end_sequence ();
2904 emit_insn (insns);
2905 return result;
2906 }
2907
2908 /* If we were unable to expand via the builtin, stop the sequence
2909 (without outputting the insns) and call to the library function
2910 with the stabilized argument list. */
2911 end_sequence ();
2912 }
2913
2914 if (fallback_fn != BUILT_IN_NONE)
2915 {
2916 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2917 targets, (int) round (x) should never be transformed into
2918 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2919 a call to lround in the hope that the target provides at least some
2920 C99 functions. This should result in the best user experience for
2921 not full C99 targets. */
2922 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2923 fallback_fn, 0);
2924
2925 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2926 fallback_fndecl, 1, arg);
2927
2928 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2929 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2930 return convert_to_mode (mode, target, 0);
2931 }
2932
2933 return expand_call (exp, target, target == const0_rtx);
2934 }
2935
2936 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2937 a normal call should be emitted rather than expanding the function
2938 in-line. EXP is the expression that is a call to the builtin
2939 function; if convenient, the result should be placed in TARGET. */
2940
2941 static rtx
2942 expand_builtin_powi (tree exp, rtx target)
2943 {
2944 tree arg0, arg1;
2945 rtx op0, op1;
2946 machine_mode mode;
2947 machine_mode mode2;
2948
2949 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2950 return NULL_RTX;
2951
2952 arg0 = CALL_EXPR_ARG (exp, 0);
2953 arg1 = CALL_EXPR_ARG (exp, 1);
2954 mode = TYPE_MODE (TREE_TYPE (exp));
2955
2956 /* Emit a libcall to libgcc. */
2957
2958 /* Mode of the 2nd argument must match that of an int. */
2959 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2960
2961 if (target == NULL_RTX)
2962 target = gen_reg_rtx (mode);
2963
2964 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2965 if (GET_MODE (op0) != mode)
2966 op0 = convert_to_mode (mode, op0, 0);
2967 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2968 if (GET_MODE (op1) != mode2)
2969 op1 = convert_to_mode (mode2, op1, 0);
2970
2971 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2972 target, LCT_CONST, mode, 2,
2973 op0, mode, op1, mode2);
2974
2975 return target;
2976 }
2977
2978 /* Expand expression EXP which is a call to the strlen builtin. Return
2979 NULL_RTX if we failed the caller should emit a normal call, otherwise
2980 try to get the result in TARGET, if convenient. */
2981
2982 static rtx
2983 expand_builtin_strlen (tree exp, rtx target,
2984 machine_mode target_mode)
2985 {
2986 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2987 return NULL_RTX;
2988 else
2989 {
2990 struct expand_operand ops[4];
2991 rtx pat;
2992 tree len;
2993 tree src = CALL_EXPR_ARG (exp, 0);
2994 rtx src_reg;
2995 rtx_insn *before_strlen;
2996 machine_mode insn_mode = target_mode;
2997 enum insn_code icode = CODE_FOR_nothing;
2998 unsigned int align;
2999
3000 /* If the length can be computed at compile-time, return it. */
3001 len = c_strlen (src, 0);
3002 if (len)
3003 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3004
3005 /* If the length can be computed at compile-time and is constant
3006 integer, but there are side-effects in src, evaluate
3007 src for side-effects, then return len.
3008 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3009 can be optimized into: i++; x = 3; */
3010 len = c_strlen (src, 1);
3011 if (len && TREE_CODE (len) == INTEGER_CST)
3012 {
3013 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3014 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3015 }
3016
3017 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3018
3019 /* If SRC is not a pointer type, don't do this operation inline. */
3020 if (align == 0)
3021 return NULL_RTX;
3022
3023 /* Bail out if we can't compute strlen in the right mode. */
3024 while (insn_mode != VOIDmode)
3025 {
3026 icode = optab_handler (strlen_optab, insn_mode);
3027 if (icode != CODE_FOR_nothing)
3028 break;
3029
3030 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3031 }
3032 if (insn_mode == VOIDmode)
3033 return NULL_RTX;
3034
3035 /* Make a place to hold the source address. We will not expand
3036 the actual source until we are sure that the expansion will
3037 not fail -- there are trees that cannot be expanded twice. */
3038 src_reg = gen_reg_rtx (Pmode);
3039
3040 /* Mark the beginning of the strlen sequence so we can emit the
3041 source operand later. */
3042 before_strlen = get_last_insn ();
3043
3044 create_output_operand (&ops[0], target, insn_mode);
3045 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3046 create_integer_operand (&ops[2], 0);
3047 create_integer_operand (&ops[3], align);
3048 if (!maybe_expand_insn (icode, 4, ops))
3049 return NULL_RTX;
3050
3051 /* Now that we are assured of success, expand the source. */
3052 start_sequence ();
3053 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3054 if (pat != src_reg)
3055 {
3056 #ifdef POINTERS_EXTEND_UNSIGNED
3057 if (GET_MODE (pat) != Pmode)
3058 pat = convert_to_mode (Pmode, pat,
3059 POINTERS_EXTEND_UNSIGNED);
3060 #endif
3061 emit_move_insn (src_reg, pat);
3062 }
3063 pat = get_insns ();
3064 end_sequence ();
3065
3066 if (before_strlen)
3067 emit_insn_after (pat, before_strlen);
3068 else
3069 emit_insn_before (pat, get_insns ());
3070
3071 /* Return the value in the proper mode for this function. */
3072 if (GET_MODE (ops[0].value) == target_mode)
3073 target = ops[0].value;
3074 else if (target != 0)
3075 convert_move (target, ops[0].value, 0);
3076 else
3077 target = convert_to_mode (target_mode, ops[0].value, 0);
3078
3079 return target;
3080 }
3081 }
3082
3083 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3084 bytes from constant string DATA + OFFSET and return it as target
3085 constant. */
3086
3087 static rtx
3088 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3089 machine_mode mode)
3090 {
3091 const char *str = (const char *) data;
3092
3093 gcc_assert (offset >= 0
3094 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3095 <= strlen (str) + 1));
3096
3097 return c_readstr (str + offset, mode);
3098 }
3099
3100 /* LEN specify length of the block of memcpy/memset operation.
3101 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3102 In some cases we can make very likely guess on max size, then we
3103 set it into PROBABLE_MAX_SIZE. */
3104
3105 static void
3106 determine_block_size (tree len, rtx len_rtx,
3107 unsigned HOST_WIDE_INT *min_size,
3108 unsigned HOST_WIDE_INT *max_size,
3109 unsigned HOST_WIDE_INT *probable_max_size)
3110 {
3111 if (CONST_INT_P (len_rtx))
3112 {
3113 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3114 return;
3115 }
3116 else
3117 {
3118 wide_int min, max;
3119 enum value_range_type range_type = VR_UNDEFINED;
3120
3121 /* Determine bounds from the type. */
3122 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3123 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3124 else
3125 *min_size = 0;
3126 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3127 *probable_max_size = *max_size
3128 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3129 else
3130 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3131
3132 if (TREE_CODE (len) == SSA_NAME)
3133 range_type = get_range_info (len, &min, &max);
3134 if (range_type == VR_RANGE)
3135 {
3136 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3137 *min_size = min.to_uhwi ();
3138 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3139 *probable_max_size = *max_size = max.to_uhwi ();
3140 }
3141 else if (range_type == VR_ANTI_RANGE)
3142 {
3143 /* Anti range 0...N lets us to determine minimal size to N+1. */
3144 if (min == 0)
3145 {
3146 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3147 *min_size = max.to_uhwi () + 1;
3148 }
3149 /* Code like
3150
3151 int n;
3152 if (n < 100)
3153 memcpy (a, b, n)
3154
3155 Produce anti range allowing negative values of N. We still
3156 can use the information and make a guess that N is not negative.
3157 */
3158 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3159 *probable_max_size = min.to_uhwi () - 1;
3160 }
3161 }
3162 gcc_checking_assert (*max_size <=
3163 (unsigned HOST_WIDE_INT)
3164 GET_MODE_MASK (GET_MODE (len_rtx)));
3165 }
3166
3167 /* Helper function to do the actual work for expand_builtin_memcpy. */
3168
3169 static rtx
3170 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3171 {
3172 const char *src_str;
3173 unsigned int src_align = get_pointer_alignment (src);
3174 unsigned int dest_align = get_pointer_alignment (dest);
3175 rtx dest_mem, src_mem, dest_addr, len_rtx;
3176 HOST_WIDE_INT expected_size = -1;
3177 unsigned int expected_align = 0;
3178 unsigned HOST_WIDE_INT min_size;
3179 unsigned HOST_WIDE_INT max_size;
3180 unsigned HOST_WIDE_INT probable_max_size;
3181
3182 /* If DEST is not a pointer type, call the normal function. */
3183 if (dest_align == 0)
3184 return NULL_RTX;
3185
3186 /* If either SRC is not a pointer type, don't do this
3187 operation in-line. */
3188 if (src_align == 0)
3189 return NULL_RTX;
3190
3191 if (currently_expanding_gimple_stmt)
3192 stringop_block_profile (currently_expanding_gimple_stmt,
3193 &expected_align, &expected_size);
3194
3195 if (expected_align < dest_align)
3196 expected_align = dest_align;
3197 dest_mem = get_memory_rtx (dest, len);
3198 set_mem_align (dest_mem, dest_align);
3199 len_rtx = expand_normal (len);
3200 determine_block_size (len, len_rtx, &min_size, &max_size,
3201 &probable_max_size);
3202 src_str = c_getstr (src);
3203
3204 /* If SRC is a string constant and block move would be done
3205 by pieces, we can avoid loading the string from memory
3206 and only stored the computed constants. */
3207 if (src_str
3208 && CONST_INT_P (len_rtx)
3209 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3210 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3211 CONST_CAST (char *, src_str),
3212 dest_align, false))
3213 {
3214 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3215 builtin_memcpy_read_str,
3216 CONST_CAST (char *, src_str),
3217 dest_align, false, 0);
3218 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3219 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3220 return dest_mem;
3221 }
3222
3223 src_mem = get_memory_rtx (src, len);
3224 set_mem_align (src_mem, src_align);
3225
3226 /* Copy word part most expediently. */
3227 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3228 CALL_EXPR_TAILCALL (exp)
3229 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3230 expected_align, expected_size,
3231 min_size, max_size, probable_max_size);
3232
3233 if (dest_addr == 0)
3234 {
3235 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3236 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3237 }
3238
3239 return dest_addr;
3240 }
3241
3242 /* Expand a call EXP to the memcpy builtin.
3243 Return NULL_RTX if we failed, the caller should emit a normal call,
3244 otherwise try to get the result in TARGET, if convenient (and in
3245 mode MODE if that's convenient). */
3246
3247 static rtx
3248 expand_builtin_memcpy (tree exp, rtx target)
3249 {
3250 if (!validate_arglist (exp,
3251 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3252 return NULL_RTX;
3253 else
3254 {
3255 tree dest = CALL_EXPR_ARG (exp, 0);
3256 tree src = CALL_EXPR_ARG (exp, 1);
3257 tree len = CALL_EXPR_ARG (exp, 2);
3258 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3259 }
3260 }
3261
3262 /* Expand an instrumented call EXP to the memcpy builtin.
3263 Return NULL_RTX if we failed, the caller should emit a normal call,
3264 otherwise try to get the result in TARGET, if convenient (and in
3265 mode MODE if that's convenient). */
3266
3267 static rtx
3268 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3269 {
3270 if (!validate_arglist (exp,
3271 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3272 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3273 INTEGER_TYPE, VOID_TYPE))
3274 return NULL_RTX;
3275 else
3276 {
3277 tree dest = CALL_EXPR_ARG (exp, 0);
3278 tree src = CALL_EXPR_ARG (exp, 2);
3279 tree len = CALL_EXPR_ARG (exp, 4);
3280 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3281
3282 /* Return src bounds with the result. */
3283 if (res)
3284 {
3285 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3286 expand_normal (CALL_EXPR_ARG (exp, 1)));
3287 res = chkp_join_splitted_slot (res, bnd);
3288 }
3289 return res;
3290 }
3291 }
3292
3293 /* Expand a call EXP to the mempcpy builtin.
3294 Return NULL_RTX if we failed; the caller should emit a normal call,
3295 otherwise try to get the result in TARGET, if convenient (and in
3296 mode MODE if that's convenient). If ENDP is 0 return the
3297 destination pointer, if ENDP is 1 return the end pointer ala
3298 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3299 stpcpy. */
3300
3301 static rtx
3302 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3303 {
3304 if (!validate_arglist (exp,
3305 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3306 return NULL_RTX;
3307 else
3308 {
3309 tree dest = CALL_EXPR_ARG (exp, 0);
3310 tree src = CALL_EXPR_ARG (exp, 1);
3311 tree len = CALL_EXPR_ARG (exp, 2);
3312 return expand_builtin_mempcpy_args (dest, src, len,
3313 target, mode, /*endp=*/ 1,
3314 exp);
3315 }
3316 }
3317
3318 /* Expand an instrumented call EXP to the mempcpy builtin.
3319 Return NULL_RTX if we failed, the caller should emit a normal call,
3320 otherwise try to get the result in TARGET, if convenient (and in
3321 mode MODE if that's convenient). */
3322
3323 static rtx
3324 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3325 {
3326 if (!validate_arglist (exp,
3327 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3328 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3329 INTEGER_TYPE, VOID_TYPE))
3330 return NULL_RTX;
3331 else
3332 {
3333 tree dest = CALL_EXPR_ARG (exp, 0);
3334 tree src = CALL_EXPR_ARG (exp, 2);
3335 tree len = CALL_EXPR_ARG (exp, 4);
3336 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3337 mode, 1, exp);
3338
3339 /* Return src bounds with the result. */
3340 if (res)
3341 {
3342 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3343 expand_normal (CALL_EXPR_ARG (exp, 1)));
3344 res = chkp_join_splitted_slot (res, bnd);
3345 }
3346 return res;
3347 }
3348 }
3349
3350 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3351 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3352 so that this can also be called without constructing an actual CALL_EXPR.
3353 The other arguments and return value are the same as for
3354 expand_builtin_mempcpy. */
3355
3356 static rtx
3357 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3358 rtx target, machine_mode mode, int endp,
3359 tree orig_exp)
3360 {
3361 tree fndecl = get_callee_fndecl (orig_exp);
3362
3363 /* If return value is ignored, transform mempcpy into memcpy. */
3364 if (target == const0_rtx
3365 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3366 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3367 {
3368 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3369 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3370 dest, src, len);
3371 return expand_expr (result, target, mode, EXPAND_NORMAL);
3372 }
3373 else if (target == const0_rtx
3374 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3375 {
3376 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3377 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3378 dest, src, len);
3379 return expand_expr (result, target, mode, EXPAND_NORMAL);
3380 }
3381 else
3382 {
3383 const char *src_str;
3384 unsigned int src_align = get_pointer_alignment (src);
3385 unsigned int dest_align = get_pointer_alignment (dest);
3386 rtx dest_mem, src_mem, len_rtx;
3387
3388 /* If either SRC or DEST is not a pointer type, don't do this
3389 operation in-line. */
3390 if (dest_align == 0 || src_align == 0)
3391 return NULL_RTX;
3392
3393 /* If LEN is not constant, call the normal function. */
3394 if (! tree_fits_uhwi_p (len))
3395 return NULL_RTX;
3396
3397 len_rtx = expand_normal (len);
3398 src_str = c_getstr (src);
3399
3400 /* If SRC is a string constant and block move would be done
3401 by pieces, we can avoid loading the string from memory
3402 and only stored the computed constants. */
3403 if (src_str
3404 && CONST_INT_P (len_rtx)
3405 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3406 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3407 CONST_CAST (char *, src_str),
3408 dest_align, false))
3409 {
3410 dest_mem = get_memory_rtx (dest, len);
3411 set_mem_align (dest_mem, dest_align);
3412 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3413 builtin_memcpy_read_str,
3414 CONST_CAST (char *, src_str),
3415 dest_align, false, endp);
3416 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3417 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3418 return dest_mem;
3419 }
3420
3421 if (CONST_INT_P (len_rtx)
3422 && can_move_by_pieces (INTVAL (len_rtx),
3423 MIN (dest_align, src_align)))
3424 {
3425 dest_mem = get_memory_rtx (dest, len);
3426 set_mem_align (dest_mem, dest_align);
3427 src_mem = get_memory_rtx (src, len);
3428 set_mem_align (src_mem, src_align);
3429 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3430 MIN (dest_align, src_align), endp);
3431 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3432 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3433 return dest_mem;
3434 }
3435
3436 return NULL_RTX;
3437 }
3438 }
3439
3440 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3441 we failed, the caller should emit a normal call, otherwise try to
3442 get the result in TARGET, if convenient. If ENDP is 0 return the
3443 destination pointer, if ENDP is 1 return the end pointer ala
3444 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3445 stpcpy. */
3446
3447 static rtx
3448 expand_movstr (tree dest, tree src, rtx target, int endp)
3449 {
3450 struct expand_operand ops[3];
3451 rtx dest_mem;
3452 rtx src_mem;
3453
3454 if (!targetm.have_movstr ())
3455 return NULL_RTX;
3456
3457 dest_mem = get_memory_rtx (dest, NULL);
3458 src_mem = get_memory_rtx (src, NULL);
3459 if (!endp)
3460 {
3461 target = force_reg (Pmode, XEXP (dest_mem, 0));
3462 dest_mem = replace_equiv_address (dest_mem, target);
3463 }
3464
3465 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3466 create_fixed_operand (&ops[1], dest_mem);
3467 create_fixed_operand (&ops[2], src_mem);
3468 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3469 return NULL_RTX;
3470
3471 if (endp && target != const0_rtx)
3472 {
3473 target = ops[0].value;
3474 /* movstr is supposed to set end to the address of the NUL
3475 terminator. If the caller requested a mempcpy-like return value,
3476 adjust it. */
3477 if (endp == 1)
3478 {
3479 rtx tem = plus_constant (GET_MODE (target),
3480 gen_lowpart (GET_MODE (target), target), 1);
3481 emit_move_insn (target, force_operand (tem, NULL_RTX));
3482 }
3483 }
3484 return target;
3485 }
3486
3487 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3488 NULL_RTX if we failed the caller should emit a normal call, otherwise
3489 try to get the result in TARGET, if convenient (and in mode MODE if that's
3490 convenient). */
3491
3492 static rtx
3493 expand_builtin_strcpy (tree exp, rtx target)
3494 {
3495 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3496 {
3497 tree dest = CALL_EXPR_ARG (exp, 0);
3498 tree src = CALL_EXPR_ARG (exp, 1);
3499 return expand_builtin_strcpy_args (dest, src, target);
3500 }
3501 return NULL_RTX;
3502 }
3503
3504 /* Helper function to do the actual work for expand_builtin_strcpy. The
3505 arguments to the builtin_strcpy call DEST and SRC are broken out
3506 so that this can also be called without constructing an actual CALL_EXPR.
3507 The other arguments and return value are the same as for
3508 expand_builtin_strcpy. */
3509
3510 static rtx
3511 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3512 {
3513 return expand_movstr (dest, src, target, /*endp=*/0);
3514 }
3515
3516 /* Expand a call EXP to the stpcpy builtin.
3517 Return NULL_RTX if we failed the caller should emit a normal call,
3518 otherwise try to get the result in TARGET, if convenient (and in
3519 mode MODE if that's convenient). */
3520
3521 static rtx
3522 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3523 {
3524 tree dst, src;
3525 location_t loc = EXPR_LOCATION (exp);
3526
3527 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3528 return NULL_RTX;
3529
3530 dst = CALL_EXPR_ARG (exp, 0);
3531 src = CALL_EXPR_ARG (exp, 1);
3532
3533 /* If return value is ignored, transform stpcpy into strcpy. */
3534 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3535 {
3536 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3537 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3538 return expand_expr (result, target, mode, EXPAND_NORMAL);
3539 }
3540 else
3541 {
3542 tree len, lenp1;
3543 rtx ret;
3544
3545 /* Ensure we get an actual string whose length can be evaluated at
3546 compile-time, not an expression containing a string. This is
3547 because the latter will potentially produce pessimized code
3548 when used to produce the return value. */
3549 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3550 return expand_movstr (dst, src, target, /*endp=*/2);
3551
3552 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3553 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3554 target, mode, /*endp=*/2,
3555 exp);
3556
3557 if (ret)
3558 return ret;
3559
3560 if (TREE_CODE (len) == INTEGER_CST)
3561 {
3562 rtx len_rtx = expand_normal (len);
3563
3564 if (CONST_INT_P (len_rtx))
3565 {
3566 ret = expand_builtin_strcpy_args (dst, src, target);
3567
3568 if (ret)
3569 {
3570 if (! target)
3571 {
3572 if (mode != VOIDmode)
3573 target = gen_reg_rtx (mode);
3574 else
3575 target = gen_reg_rtx (GET_MODE (ret));
3576 }
3577 if (GET_MODE (target) != GET_MODE (ret))
3578 ret = gen_lowpart (GET_MODE (target), ret);
3579
3580 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3581 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3582 gcc_assert (ret);
3583
3584 return target;
3585 }
3586 }
3587 }
3588
3589 return expand_movstr (dst, src, target, /*endp=*/2);
3590 }
3591 }
3592
3593 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3594 bytes from constant string DATA + OFFSET and return it as target
3595 constant. */
3596
3597 rtx
3598 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3599 machine_mode mode)
3600 {
3601 const char *str = (const char *) data;
3602
3603 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3604 return const0_rtx;
3605
3606 return c_readstr (str + offset, mode);
3607 }
3608
3609 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3610 NULL_RTX if we failed the caller should emit a normal call. */
3611
3612 static rtx
3613 expand_builtin_strncpy (tree exp, rtx target)
3614 {
3615 location_t loc = EXPR_LOCATION (exp);
3616
3617 if (validate_arglist (exp,
3618 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3619 {
3620 tree dest = CALL_EXPR_ARG (exp, 0);
3621 tree src = CALL_EXPR_ARG (exp, 1);
3622 tree len = CALL_EXPR_ARG (exp, 2);
3623 tree slen = c_strlen (src, 1);
3624
3625 /* We must be passed a constant len and src parameter. */
3626 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3627 return NULL_RTX;
3628
3629 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3630
3631 /* We're required to pad with trailing zeros if the requested
3632 len is greater than strlen(s2)+1. In that case try to
3633 use store_by_pieces, if it fails, punt. */
3634 if (tree_int_cst_lt (slen, len))
3635 {
3636 unsigned int dest_align = get_pointer_alignment (dest);
3637 const char *p = c_getstr (src);
3638 rtx dest_mem;
3639
3640 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3641 || !can_store_by_pieces (tree_to_uhwi (len),
3642 builtin_strncpy_read_str,
3643 CONST_CAST (char *, p),
3644 dest_align, false))
3645 return NULL_RTX;
3646
3647 dest_mem = get_memory_rtx (dest, len);
3648 store_by_pieces (dest_mem, tree_to_uhwi (len),
3649 builtin_strncpy_read_str,
3650 CONST_CAST (char *, p), dest_align, false, 0);
3651 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3652 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3653 return dest_mem;
3654 }
3655 }
3656 return NULL_RTX;
3657 }
3658
3659 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3660 bytes from constant string DATA + OFFSET and return it as target
3661 constant. */
3662
3663 rtx
3664 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3665 machine_mode mode)
3666 {
3667 const char *c = (const char *) data;
3668 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3669
3670 memset (p, *c, GET_MODE_SIZE (mode));
3671
3672 return c_readstr (p, mode);
3673 }
3674
3675 /* Callback routine for store_by_pieces. Return the RTL of a register
3676 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3677 char value given in the RTL register data. For example, if mode is
3678 4 bytes wide, return the RTL for 0x01010101*data. */
3679
3680 static rtx
3681 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3682 machine_mode mode)
3683 {
3684 rtx target, coeff;
3685 size_t size;
3686 char *p;
3687
3688 size = GET_MODE_SIZE (mode);
3689 if (size == 1)
3690 return (rtx) data;
3691
3692 p = XALLOCAVEC (char, size);
3693 memset (p, 1, size);
3694 coeff = c_readstr (p, mode);
3695
3696 target = convert_to_mode (mode, (rtx) data, 1);
3697 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3698 return force_reg (mode, target);
3699 }
3700
3701 /* Expand expression EXP, which is a call to the memset builtin. Return
3702 NULL_RTX if we failed the caller should emit a normal call, otherwise
3703 try to get the result in TARGET, if convenient (and in mode MODE if that's
3704 convenient). */
3705
3706 static rtx
3707 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3708 {
3709 if (!validate_arglist (exp,
3710 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3711 return NULL_RTX;
3712 else
3713 {
3714 tree dest = CALL_EXPR_ARG (exp, 0);
3715 tree val = CALL_EXPR_ARG (exp, 1);
3716 tree len = CALL_EXPR_ARG (exp, 2);
3717 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3718 }
3719 }
3720
3721 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3722 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3723 try to get the result in TARGET, if convenient (and in mode MODE if that's
3724 convenient). */
3725
3726 static rtx
3727 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3728 {
3729 if (!validate_arglist (exp,
3730 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3731 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3732 return NULL_RTX;
3733 else
3734 {
3735 tree dest = CALL_EXPR_ARG (exp, 0);
3736 tree val = CALL_EXPR_ARG (exp, 2);
3737 tree len = CALL_EXPR_ARG (exp, 3);
3738 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3739
3740 /* Return src bounds with the result. */
3741 if (res)
3742 {
3743 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3744 expand_normal (CALL_EXPR_ARG (exp, 1)));
3745 res = chkp_join_splitted_slot (res, bnd);
3746 }
3747 return res;
3748 }
3749 }
3750
3751 /* Helper function to do the actual work for expand_builtin_memset. The
3752 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3753 so that this can also be called without constructing an actual CALL_EXPR.
3754 The other arguments and return value are the same as for
3755 expand_builtin_memset. */
3756
3757 static rtx
3758 expand_builtin_memset_args (tree dest, tree val, tree len,
3759 rtx target, machine_mode mode, tree orig_exp)
3760 {
3761 tree fndecl, fn;
3762 enum built_in_function fcode;
3763 machine_mode val_mode;
3764 char c;
3765 unsigned int dest_align;
3766 rtx dest_mem, dest_addr, len_rtx;
3767 HOST_WIDE_INT expected_size = -1;
3768 unsigned int expected_align = 0;
3769 unsigned HOST_WIDE_INT min_size;
3770 unsigned HOST_WIDE_INT max_size;
3771 unsigned HOST_WIDE_INT probable_max_size;
3772
3773 dest_align = get_pointer_alignment (dest);
3774
3775 /* If DEST is not a pointer type, don't do this operation in-line. */
3776 if (dest_align == 0)
3777 return NULL_RTX;
3778
3779 if (currently_expanding_gimple_stmt)
3780 stringop_block_profile (currently_expanding_gimple_stmt,
3781 &expected_align, &expected_size);
3782
3783 if (expected_align < dest_align)
3784 expected_align = dest_align;
3785
3786 /* If the LEN parameter is zero, return DEST. */
3787 if (integer_zerop (len))
3788 {
3789 /* Evaluate and ignore VAL in case it has side-effects. */
3790 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3791 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3792 }
3793
3794 /* Stabilize the arguments in case we fail. */
3795 dest = builtin_save_expr (dest);
3796 val = builtin_save_expr (val);
3797 len = builtin_save_expr (len);
3798
3799 len_rtx = expand_normal (len);
3800 determine_block_size (len, len_rtx, &min_size, &max_size,
3801 &probable_max_size);
3802 dest_mem = get_memory_rtx (dest, len);
3803 val_mode = TYPE_MODE (unsigned_char_type_node);
3804
3805 if (TREE_CODE (val) != INTEGER_CST)
3806 {
3807 rtx val_rtx;
3808
3809 val_rtx = expand_normal (val);
3810 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3811
3812 /* Assume that we can memset by pieces if we can store
3813 * the coefficients by pieces (in the required modes).
3814 * We can't pass builtin_memset_gen_str as that emits RTL. */
3815 c = 1;
3816 if (tree_fits_uhwi_p (len)
3817 && can_store_by_pieces (tree_to_uhwi (len),
3818 builtin_memset_read_str, &c, dest_align,
3819 true))
3820 {
3821 val_rtx = force_reg (val_mode, val_rtx);
3822 store_by_pieces (dest_mem, tree_to_uhwi (len),
3823 builtin_memset_gen_str, val_rtx, dest_align,
3824 true, 0);
3825 }
3826 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3827 dest_align, expected_align,
3828 expected_size, min_size, max_size,
3829 probable_max_size))
3830 goto do_libcall;
3831
3832 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3833 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3834 return dest_mem;
3835 }
3836
3837 if (target_char_cast (val, &c))
3838 goto do_libcall;
3839
3840 if (c)
3841 {
3842 if (tree_fits_uhwi_p (len)
3843 && can_store_by_pieces (tree_to_uhwi (len),
3844 builtin_memset_read_str, &c, dest_align,
3845 true))
3846 store_by_pieces (dest_mem, tree_to_uhwi (len),
3847 builtin_memset_read_str, &c, dest_align, true, 0);
3848 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3849 gen_int_mode (c, val_mode),
3850 dest_align, expected_align,
3851 expected_size, min_size, max_size,
3852 probable_max_size))
3853 goto do_libcall;
3854
3855 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3856 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3857 return dest_mem;
3858 }
3859
3860 set_mem_align (dest_mem, dest_align);
3861 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3862 CALL_EXPR_TAILCALL (orig_exp)
3863 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3864 expected_align, expected_size,
3865 min_size, max_size,
3866 probable_max_size);
3867
3868 if (dest_addr == 0)
3869 {
3870 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3871 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3872 }
3873
3874 return dest_addr;
3875
3876 do_libcall:
3877 fndecl = get_callee_fndecl (orig_exp);
3878 fcode = DECL_FUNCTION_CODE (fndecl);
3879 if (fcode == BUILT_IN_MEMSET
3880 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3881 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3882 dest, val, len);
3883 else if (fcode == BUILT_IN_BZERO)
3884 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3885 dest, len);
3886 else
3887 gcc_unreachable ();
3888 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3889 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3890 return expand_call (fn, target, target == const0_rtx);
3891 }
3892
3893 /* Expand expression EXP, which is a call to the bzero builtin. Return
3894 NULL_RTX if we failed the caller should emit a normal call. */
3895
3896 static rtx
3897 expand_builtin_bzero (tree exp)
3898 {
3899 tree dest, size;
3900 location_t loc = EXPR_LOCATION (exp);
3901
3902 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3903 return NULL_RTX;
3904
3905 dest = CALL_EXPR_ARG (exp, 0);
3906 size = CALL_EXPR_ARG (exp, 1);
3907
3908 /* New argument list transforming bzero(ptr x, int y) to
3909 memset(ptr x, int 0, size_t y). This is done this way
3910 so that if it isn't expanded inline, we fallback to
3911 calling bzero instead of memset. */
3912
3913 return expand_builtin_memset_args (dest, integer_zero_node,
3914 fold_convert_loc (loc,
3915 size_type_node, size),
3916 const0_rtx, VOIDmode, exp);
3917 }
3918
3919 /* Try to expand cmpstr operation ICODE with the given operands.
3920 Return the result rtx on success, otherwise return null. */
3921
3922 static rtx
3923 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3924 HOST_WIDE_INT align)
3925 {
3926 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3927
3928 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3929 target = NULL_RTX;
3930
3931 struct expand_operand ops[4];
3932 create_output_operand (&ops[0], target, insn_mode);
3933 create_fixed_operand (&ops[1], arg1_rtx);
3934 create_fixed_operand (&ops[2], arg2_rtx);
3935 create_integer_operand (&ops[3], align);
3936 if (maybe_expand_insn (icode, 4, ops))
3937 return ops[0].value;
3938 return NULL_RTX;
3939 }
3940
3941 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3942 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3943 otherwise return null. */
3944
3945 static rtx
3946 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
3947 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
3948 HOST_WIDE_INT align)
3949 {
3950 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3951
3952 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3953 target = NULL_RTX;
3954
3955 struct expand_operand ops[5];
3956 create_output_operand (&ops[0], target, insn_mode);
3957 create_fixed_operand (&ops[1], arg1_rtx);
3958 create_fixed_operand (&ops[2], arg2_rtx);
3959 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
3960 TYPE_UNSIGNED (arg3_type));
3961 create_integer_operand (&ops[4], align);
3962 if (maybe_expand_insn (icode, 5, ops))
3963 return ops[0].value;
3964 return NULL_RTX;
3965 }
3966
3967 /* Expand expression EXP, which is a call to the memcmp built-in function.
3968 Return NULL_RTX if we failed and the caller should emit a normal call,
3969 otherwise try to get the result in TARGET, if convenient. */
3970
3971 static rtx
3972 expand_builtin_memcmp (tree exp, rtx target)
3973 {
3974 if (!validate_arglist (exp,
3975 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3976 return NULL_RTX;
3977
3978 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3979 implementing memcmp because it will stop if it encounters two
3980 zero bytes. */
3981 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
3982 if (icode == CODE_FOR_nothing)
3983 return NULL_RTX;
3984
3985 tree arg1 = CALL_EXPR_ARG (exp, 0);
3986 tree arg2 = CALL_EXPR_ARG (exp, 1);
3987 tree len = CALL_EXPR_ARG (exp, 2);
3988
3989 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3990 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3991
3992 /* If we don't have POINTER_TYPE, call the function. */
3993 if (arg1_align == 0 || arg2_align == 0)
3994 return NULL_RTX;
3995
3996 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3997 location_t loc = EXPR_LOCATION (exp);
3998 rtx arg1_rtx = get_memory_rtx (arg1, len);
3999 rtx arg2_rtx = get_memory_rtx (arg2, len);
4000 rtx arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4001
4002 /* Set MEM_SIZE as appropriate. */
4003 if (CONST_INT_P (arg3_rtx))
4004 {
4005 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
4006 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
4007 }
4008
4009 rtx result = expand_cmpstrn_or_cmpmem (icode, target, arg1_rtx, arg2_rtx,
4010 TREE_TYPE (len), arg3_rtx,
4011 MIN (arg1_align, arg2_align));
4012 if (result)
4013 {
4014 /* Return the value in the proper mode for this function. */
4015 if (GET_MODE (result) == mode)
4016 return result;
4017
4018 if (target != 0)
4019 {
4020 convert_move (target, result, 0);
4021 return target;
4022 }
4023
4024 return convert_to_mode (mode, result, 0);
4025 }
4026
4027 result = target;
4028 if (! (result != 0
4029 && REG_P (result) && GET_MODE (result) == mode
4030 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4031 result = gen_reg_rtx (mode);
4032
4033 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4034 TYPE_MODE (integer_type_node), 3,
4035 XEXP (arg1_rtx, 0), Pmode,
4036 XEXP (arg2_rtx, 0), Pmode,
4037 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4038 TYPE_UNSIGNED (sizetype)),
4039 TYPE_MODE (sizetype));
4040 return result;
4041 }
4042
4043 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4044 if we failed the caller should emit a normal call, otherwise try to get
4045 the result in TARGET, if convenient. */
4046
4047 static rtx
4048 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4049 {
4050 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4051 return NULL_RTX;
4052
4053 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4054 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4055 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4056 {
4057 rtx arg1_rtx, arg2_rtx;
4058 tree fndecl, fn;
4059 tree arg1 = CALL_EXPR_ARG (exp, 0);
4060 tree arg2 = CALL_EXPR_ARG (exp, 1);
4061 rtx result = NULL_RTX;
4062
4063 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4064 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4065
4066 /* If we don't have POINTER_TYPE, call the function. */
4067 if (arg1_align == 0 || arg2_align == 0)
4068 return NULL_RTX;
4069
4070 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4071 arg1 = builtin_save_expr (arg1);
4072 arg2 = builtin_save_expr (arg2);
4073
4074 arg1_rtx = get_memory_rtx (arg1, NULL);
4075 arg2_rtx = get_memory_rtx (arg2, NULL);
4076
4077 /* Try to call cmpstrsi. */
4078 if (cmpstr_icode != CODE_FOR_nothing)
4079 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4080 MIN (arg1_align, arg2_align));
4081
4082 /* Try to determine at least one length and call cmpstrnsi. */
4083 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4084 {
4085 tree len;
4086 rtx arg3_rtx;
4087
4088 tree len1 = c_strlen (arg1, 1);
4089 tree len2 = c_strlen (arg2, 1);
4090
4091 if (len1)
4092 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4093 if (len2)
4094 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4095
4096 /* If we don't have a constant length for the first, use the length
4097 of the second, if we know it. We don't require a constant for
4098 this case; some cost analysis could be done if both are available
4099 but neither is constant. For now, assume they're equally cheap,
4100 unless one has side effects. If both strings have constant lengths,
4101 use the smaller. */
4102
4103 if (!len1)
4104 len = len2;
4105 else if (!len2)
4106 len = len1;
4107 else if (TREE_SIDE_EFFECTS (len1))
4108 len = len2;
4109 else if (TREE_SIDE_EFFECTS (len2))
4110 len = len1;
4111 else if (TREE_CODE (len1) != INTEGER_CST)
4112 len = len2;
4113 else if (TREE_CODE (len2) != INTEGER_CST)
4114 len = len1;
4115 else if (tree_int_cst_lt (len1, len2))
4116 len = len1;
4117 else
4118 len = len2;
4119
4120 /* If both arguments have side effects, we cannot optimize. */
4121 if (len && !TREE_SIDE_EFFECTS (len))
4122 {
4123 arg3_rtx = expand_normal (len);
4124 result = expand_cmpstrn_or_cmpmem
4125 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4126 arg3_rtx, MIN (arg1_align, arg2_align));
4127 }
4128 }
4129
4130 if (result)
4131 {
4132 /* Return the value in the proper mode for this function. */
4133 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4134 if (GET_MODE (result) == mode)
4135 return result;
4136 if (target == 0)
4137 return convert_to_mode (mode, result, 0);
4138 convert_move (target, result, 0);
4139 return target;
4140 }
4141
4142 /* Expand the library call ourselves using a stabilized argument
4143 list to avoid re-evaluating the function's arguments twice. */
4144 fndecl = get_callee_fndecl (exp);
4145 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4146 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4147 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4148 return expand_call (fn, target, target == const0_rtx);
4149 }
4150 return NULL_RTX;
4151 }
4152
4153 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4154 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4155 the result in TARGET, if convenient. */
4156
4157 static rtx
4158 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4159 ATTRIBUTE_UNUSED machine_mode mode)
4160 {
4161 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4162
4163 if (!validate_arglist (exp,
4164 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4165 return NULL_RTX;
4166
4167 /* If c_strlen can determine an expression for one of the string
4168 lengths, and it doesn't have side effects, then emit cmpstrnsi
4169 using length MIN(strlen(string)+1, arg3). */
4170 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4171 if (cmpstrn_icode != CODE_FOR_nothing)
4172 {
4173 tree len, len1, len2;
4174 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4175 rtx result;
4176 tree fndecl, fn;
4177 tree arg1 = CALL_EXPR_ARG (exp, 0);
4178 tree arg2 = CALL_EXPR_ARG (exp, 1);
4179 tree arg3 = CALL_EXPR_ARG (exp, 2);
4180
4181 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4182 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4183
4184 len1 = c_strlen (arg1, 1);
4185 len2 = c_strlen (arg2, 1);
4186
4187 if (len1)
4188 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4189 if (len2)
4190 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4191
4192 /* If we don't have a constant length for the first, use the length
4193 of the second, if we know it. We don't require a constant for
4194 this case; some cost analysis could be done if both are available
4195 but neither is constant. For now, assume they're equally cheap,
4196 unless one has side effects. If both strings have constant lengths,
4197 use the smaller. */
4198
4199 if (!len1)
4200 len = len2;
4201 else if (!len2)
4202 len = len1;
4203 else if (TREE_SIDE_EFFECTS (len1))
4204 len = len2;
4205 else if (TREE_SIDE_EFFECTS (len2))
4206 len = len1;
4207 else if (TREE_CODE (len1) != INTEGER_CST)
4208 len = len2;
4209 else if (TREE_CODE (len2) != INTEGER_CST)
4210 len = len1;
4211 else if (tree_int_cst_lt (len1, len2))
4212 len = len1;
4213 else
4214 len = len2;
4215
4216 /* If both arguments have side effects, we cannot optimize. */
4217 if (!len || TREE_SIDE_EFFECTS (len))
4218 return NULL_RTX;
4219
4220 /* The actual new length parameter is MIN(len,arg3). */
4221 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4222 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4223
4224 /* If we don't have POINTER_TYPE, call the function. */
4225 if (arg1_align == 0 || arg2_align == 0)
4226 return NULL_RTX;
4227
4228 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4229 arg1 = builtin_save_expr (arg1);
4230 arg2 = builtin_save_expr (arg2);
4231 len = builtin_save_expr (len);
4232
4233 arg1_rtx = get_memory_rtx (arg1, len);
4234 arg2_rtx = get_memory_rtx (arg2, len);
4235 arg3_rtx = expand_normal (len);
4236 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4237 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4238 MIN (arg1_align, arg2_align));
4239 if (result)
4240 {
4241 /* Return the value in the proper mode for this function. */
4242 mode = TYPE_MODE (TREE_TYPE (exp));
4243 if (GET_MODE (result) == mode)
4244 return result;
4245 if (target == 0)
4246 return convert_to_mode (mode, result, 0);
4247 convert_move (target, result, 0);
4248 return target;
4249 }
4250
4251 /* Expand the library call ourselves using a stabilized argument
4252 list to avoid re-evaluating the function's arguments twice. */
4253 fndecl = get_callee_fndecl (exp);
4254 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4255 arg1, arg2, len);
4256 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4257 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4258 return expand_call (fn, target, target == const0_rtx);
4259 }
4260 return NULL_RTX;
4261 }
4262
4263 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4264 if that's convenient. */
4265
4266 rtx
4267 expand_builtin_saveregs (void)
4268 {
4269 rtx val;
4270 rtx_insn *seq;
4271
4272 /* Don't do __builtin_saveregs more than once in a function.
4273 Save the result of the first call and reuse it. */
4274 if (saveregs_value != 0)
4275 return saveregs_value;
4276
4277 /* When this function is called, it means that registers must be
4278 saved on entry to this function. So we migrate the call to the
4279 first insn of this function. */
4280
4281 start_sequence ();
4282
4283 /* Do whatever the machine needs done in this case. */
4284 val = targetm.calls.expand_builtin_saveregs ();
4285
4286 seq = get_insns ();
4287 end_sequence ();
4288
4289 saveregs_value = val;
4290
4291 /* Put the insns after the NOTE that starts the function. If this
4292 is inside a start_sequence, make the outer-level insn chain current, so
4293 the code is placed at the start of the function. */
4294 push_topmost_sequence ();
4295 emit_insn_after (seq, entry_of_function ());
4296 pop_topmost_sequence ();
4297
4298 return val;
4299 }
4300
4301 /* Expand a call to __builtin_next_arg. */
4302
4303 static rtx
4304 expand_builtin_next_arg (void)
4305 {
4306 /* Checking arguments is already done in fold_builtin_next_arg
4307 that must be called before this function. */
4308 return expand_binop (ptr_mode, add_optab,
4309 crtl->args.internal_arg_pointer,
4310 crtl->args.arg_offset_rtx,
4311 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4312 }
4313
4314 /* Make it easier for the backends by protecting the valist argument
4315 from multiple evaluations. */
4316
4317 static tree
4318 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4319 {
4320 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4321
4322 /* The current way of determining the type of valist is completely
4323 bogus. We should have the information on the va builtin instead. */
4324 if (!vatype)
4325 vatype = targetm.fn_abi_va_list (cfun->decl);
4326
4327 if (TREE_CODE (vatype) == ARRAY_TYPE)
4328 {
4329 if (TREE_SIDE_EFFECTS (valist))
4330 valist = save_expr (valist);
4331
4332 /* For this case, the backends will be expecting a pointer to
4333 vatype, but it's possible we've actually been given an array
4334 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4335 So fix it. */
4336 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4337 {
4338 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4339 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4340 }
4341 }
4342 else
4343 {
4344 tree pt = build_pointer_type (vatype);
4345
4346 if (! needs_lvalue)
4347 {
4348 if (! TREE_SIDE_EFFECTS (valist))
4349 return valist;
4350
4351 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4352 TREE_SIDE_EFFECTS (valist) = 1;
4353 }
4354
4355 if (TREE_SIDE_EFFECTS (valist))
4356 valist = save_expr (valist);
4357 valist = fold_build2_loc (loc, MEM_REF,
4358 vatype, valist, build_int_cst (pt, 0));
4359 }
4360
4361 return valist;
4362 }
4363
4364 /* The "standard" definition of va_list is void*. */
4365
4366 tree
4367 std_build_builtin_va_list (void)
4368 {
4369 return ptr_type_node;
4370 }
4371
4372 /* The "standard" abi va_list is va_list_type_node. */
4373
4374 tree
4375 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4376 {
4377 return va_list_type_node;
4378 }
4379
4380 /* The "standard" type of va_list is va_list_type_node. */
4381
4382 tree
4383 std_canonical_va_list_type (tree type)
4384 {
4385 tree wtype, htype;
4386
4387 if (INDIRECT_REF_P (type))
4388 type = TREE_TYPE (type);
4389 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4390 type = TREE_TYPE (type);
4391 wtype = va_list_type_node;
4392 htype = type;
4393 /* Treat structure va_list types. */
4394 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4395 htype = TREE_TYPE (htype);
4396 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4397 {
4398 /* If va_list is an array type, the argument may have decayed
4399 to a pointer type, e.g. by being passed to another function.
4400 In that case, unwrap both types so that we can compare the
4401 underlying records. */
4402 if (TREE_CODE (htype) == ARRAY_TYPE
4403 || POINTER_TYPE_P (htype))
4404 {
4405 wtype = TREE_TYPE (wtype);
4406 htype = TREE_TYPE (htype);
4407 }
4408 }
4409 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4410 return va_list_type_node;
4411
4412 return NULL_TREE;
4413 }
4414
4415 /* The "standard" implementation of va_start: just assign `nextarg' to
4416 the variable. */
4417
4418 void
4419 std_expand_builtin_va_start (tree valist, rtx nextarg)
4420 {
4421 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4422 convert_move (va_r, nextarg, 0);
4423
4424 /* We do not have any valid bounds for the pointer, so
4425 just store zero bounds for it. */
4426 if (chkp_function_instrumented_p (current_function_decl))
4427 chkp_expand_bounds_reset_for_mem (valist,
4428 make_tree (TREE_TYPE (valist),
4429 nextarg));
4430 }
4431
4432 /* Expand EXP, a call to __builtin_va_start. */
4433
4434 static rtx
4435 expand_builtin_va_start (tree exp)
4436 {
4437 rtx nextarg;
4438 tree valist;
4439 location_t loc = EXPR_LOCATION (exp);
4440
4441 if (call_expr_nargs (exp) < 2)
4442 {
4443 error_at (loc, "too few arguments to function %<va_start%>");
4444 return const0_rtx;
4445 }
4446
4447 if (fold_builtin_next_arg (exp, true))
4448 return const0_rtx;
4449
4450 nextarg = expand_builtin_next_arg ();
4451 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4452
4453 if (targetm.expand_builtin_va_start)
4454 targetm.expand_builtin_va_start (valist, nextarg);
4455 else
4456 std_expand_builtin_va_start (valist, nextarg);
4457
4458 return const0_rtx;
4459 }
4460
4461 /* Expand EXP, a call to __builtin_va_end. */
4462
4463 static rtx
4464 expand_builtin_va_end (tree exp)
4465 {
4466 tree valist = CALL_EXPR_ARG (exp, 0);
4467
4468 /* Evaluate for side effects, if needed. I hate macros that don't
4469 do that. */
4470 if (TREE_SIDE_EFFECTS (valist))
4471 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4472
4473 return const0_rtx;
4474 }
4475
4476 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4477 builtin rather than just as an assignment in stdarg.h because of the
4478 nastiness of array-type va_list types. */
4479
4480 static rtx
4481 expand_builtin_va_copy (tree exp)
4482 {
4483 tree dst, src, t;
4484 location_t loc = EXPR_LOCATION (exp);
4485
4486 dst = CALL_EXPR_ARG (exp, 0);
4487 src = CALL_EXPR_ARG (exp, 1);
4488
4489 dst = stabilize_va_list_loc (loc, dst, 1);
4490 src = stabilize_va_list_loc (loc, src, 0);
4491
4492 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4493
4494 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4495 {
4496 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4497 TREE_SIDE_EFFECTS (t) = 1;
4498 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4499 }
4500 else
4501 {
4502 rtx dstb, srcb, size;
4503
4504 /* Evaluate to pointers. */
4505 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4506 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4507 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4508 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4509
4510 dstb = convert_memory_address (Pmode, dstb);
4511 srcb = convert_memory_address (Pmode, srcb);
4512
4513 /* "Dereference" to BLKmode memories. */
4514 dstb = gen_rtx_MEM (BLKmode, dstb);
4515 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4516 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4517 srcb = gen_rtx_MEM (BLKmode, srcb);
4518 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4519 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4520
4521 /* Copy. */
4522 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4523 }
4524
4525 return const0_rtx;
4526 }
4527
4528 /* Expand a call to one of the builtin functions __builtin_frame_address or
4529 __builtin_return_address. */
4530
4531 static rtx
4532 expand_builtin_frame_address (tree fndecl, tree exp)
4533 {
4534 /* The argument must be a nonnegative integer constant.
4535 It counts the number of frames to scan up the stack.
4536 The value is either the frame pointer value or the return
4537 address saved in that frame. */
4538 if (call_expr_nargs (exp) == 0)
4539 /* Warning about missing arg was already issued. */
4540 return const0_rtx;
4541 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4542 {
4543 error ("invalid argument to %qD", fndecl);
4544 return const0_rtx;
4545 }
4546 else
4547 {
4548 /* Number of frames to scan up the stack. */
4549 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4550
4551 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4552
4553 /* Some ports cannot access arbitrary stack frames. */
4554 if (tem == NULL)
4555 {
4556 warning (0, "unsupported argument to %qD", fndecl);
4557 return const0_rtx;
4558 }
4559
4560 if (count)
4561 {
4562 /* Warn since no effort is made to ensure that any frame
4563 beyond the current one exists or can be safely reached. */
4564 warning (OPT_Wframe_address, "calling %qD with "
4565 "a nonzero argument is unsafe", fndecl);
4566 }
4567
4568 /* For __builtin_frame_address, return what we've got. */
4569 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4570 return tem;
4571
4572 if (!REG_P (tem)
4573 && ! CONSTANT_P (tem))
4574 tem = copy_addr_to_reg (tem);
4575 return tem;
4576 }
4577 }
4578
4579 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4580 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4581 is the same as for allocate_dynamic_stack_space. */
4582
4583 static rtx
4584 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4585 {
4586 rtx op0;
4587 rtx result;
4588 bool valid_arglist;
4589 unsigned int align;
4590 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4591 == BUILT_IN_ALLOCA_WITH_ALIGN);
4592
4593 valid_arglist
4594 = (alloca_with_align
4595 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4596 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4597
4598 if (!valid_arglist)
4599 return NULL_RTX;
4600
4601 /* Compute the argument. */
4602 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4603
4604 /* Compute the alignment. */
4605 align = (alloca_with_align
4606 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4607 : BIGGEST_ALIGNMENT);
4608
4609 /* Allocate the desired space. */
4610 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4611 result = convert_memory_address (ptr_mode, result);
4612
4613 return result;
4614 }
4615
4616 /* Expand a call to bswap builtin in EXP.
4617 Return NULL_RTX if a normal call should be emitted rather than expanding the
4618 function in-line. If convenient, the result should be placed in TARGET.
4619 SUBTARGET may be used as the target for computing one of EXP's operands. */
4620
4621 static rtx
4622 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4623 rtx subtarget)
4624 {
4625 tree arg;
4626 rtx op0;
4627
4628 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4629 return NULL_RTX;
4630
4631 arg = CALL_EXPR_ARG (exp, 0);
4632 op0 = expand_expr (arg,
4633 subtarget && GET_MODE (subtarget) == target_mode
4634 ? subtarget : NULL_RTX,
4635 target_mode, EXPAND_NORMAL);
4636 if (GET_MODE (op0) != target_mode)
4637 op0 = convert_to_mode (target_mode, op0, 1);
4638
4639 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4640
4641 gcc_assert (target);
4642
4643 return convert_to_mode (target_mode, target, 1);
4644 }
4645
4646 /* Expand a call to a unary builtin in EXP.
4647 Return NULL_RTX if a normal call should be emitted rather than expanding the
4648 function in-line. If convenient, the result should be placed in TARGET.
4649 SUBTARGET may be used as the target for computing one of EXP's operands. */
4650
4651 static rtx
4652 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4653 rtx subtarget, optab op_optab)
4654 {
4655 rtx op0;
4656
4657 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4658 return NULL_RTX;
4659
4660 /* Compute the argument. */
4661 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4662 (subtarget
4663 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4664 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4665 VOIDmode, EXPAND_NORMAL);
4666 /* Compute op, into TARGET if possible.
4667 Set TARGET to wherever the result comes back. */
4668 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4669 op_optab, op0, target, op_optab != clrsb_optab);
4670 gcc_assert (target);
4671
4672 return convert_to_mode (target_mode, target, 0);
4673 }
4674
4675 /* Expand a call to __builtin_expect. We just return our argument
4676 as the builtin_expect semantic should've been already executed by
4677 tree branch prediction pass. */
4678
4679 static rtx
4680 expand_builtin_expect (tree exp, rtx target)
4681 {
4682 tree arg;
4683
4684 if (call_expr_nargs (exp) < 2)
4685 return const0_rtx;
4686 arg = CALL_EXPR_ARG (exp, 0);
4687
4688 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4689 /* When guessing was done, the hints should be already stripped away. */
4690 gcc_assert (!flag_guess_branch_prob
4691 || optimize == 0 || seen_error ());
4692 return target;
4693 }
4694
4695 /* Expand a call to __builtin_assume_aligned. We just return our first
4696 argument as the builtin_assume_aligned semantic should've been already
4697 executed by CCP. */
4698
4699 static rtx
4700 expand_builtin_assume_aligned (tree exp, rtx target)
4701 {
4702 if (call_expr_nargs (exp) < 2)
4703 return const0_rtx;
4704 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4705 EXPAND_NORMAL);
4706 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4707 && (call_expr_nargs (exp) < 3
4708 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4709 return target;
4710 }
4711
4712 void
4713 expand_builtin_trap (void)
4714 {
4715 if (targetm.have_trap ())
4716 {
4717 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4718 /* For trap insns when not accumulating outgoing args force
4719 REG_ARGS_SIZE note to prevent crossjumping of calls with
4720 different args sizes. */
4721 if (!ACCUMULATE_OUTGOING_ARGS)
4722 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4723 }
4724 else
4725 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4726 emit_barrier ();
4727 }
4728
4729 /* Expand a call to __builtin_unreachable. We do nothing except emit
4730 a barrier saying that control flow will not pass here.
4731
4732 It is the responsibility of the program being compiled to ensure
4733 that control flow does never reach __builtin_unreachable. */
4734 static void
4735 expand_builtin_unreachable (void)
4736 {
4737 emit_barrier ();
4738 }
4739
4740 /* Expand EXP, a call to fabs, fabsf or fabsl.
4741 Return NULL_RTX if a normal call should be emitted rather than expanding
4742 the function inline. If convenient, the result should be placed
4743 in TARGET. SUBTARGET may be used as the target for computing
4744 the operand. */
4745
4746 static rtx
4747 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4748 {
4749 machine_mode mode;
4750 tree arg;
4751 rtx op0;
4752
4753 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4754 return NULL_RTX;
4755
4756 arg = CALL_EXPR_ARG (exp, 0);
4757 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4758 mode = TYPE_MODE (TREE_TYPE (arg));
4759 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4760 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4761 }
4762
4763 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4764 Return NULL is a normal call should be emitted rather than expanding the
4765 function inline. If convenient, the result should be placed in TARGET.
4766 SUBTARGET may be used as the target for computing the operand. */
4767
4768 static rtx
4769 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4770 {
4771 rtx op0, op1;
4772 tree arg;
4773
4774 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4775 return NULL_RTX;
4776
4777 arg = CALL_EXPR_ARG (exp, 0);
4778 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4779
4780 arg = CALL_EXPR_ARG (exp, 1);
4781 op1 = expand_normal (arg);
4782
4783 return expand_copysign (op0, op1, target);
4784 }
4785
4786 /* Expand a call to __builtin___clear_cache. */
4787
4788 static rtx
4789 expand_builtin___clear_cache (tree exp)
4790 {
4791 if (!targetm.code_for_clear_cache)
4792 {
4793 #ifdef CLEAR_INSN_CACHE
4794 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4795 does something. Just do the default expansion to a call to
4796 __clear_cache(). */
4797 return NULL_RTX;
4798 #else
4799 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4800 does nothing. There is no need to call it. Do nothing. */
4801 return const0_rtx;
4802 #endif /* CLEAR_INSN_CACHE */
4803 }
4804
4805 /* We have a "clear_cache" insn, and it will handle everything. */
4806 tree begin, end;
4807 rtx begin_rtx, end_rtx;
4808
4809 /* We must not expand to a library call. If we did, any
4810 fallback library function in libgcc that might contain a call to
4811 __builtin___clear_cache() would recurse infinitely. */
4812 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4813 {
4814 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4815 return const0_rtx;
4816 }
4817
4818 if (targetm.have_clear_cache ())
4819 {
4820 struct expand_operand ops[2];
4821
4822 begin = CALL_EXPR_ARG (exp, 0);
4823 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4824
4825 end = CALL_EXPR_ARG (exp, 1);
4826 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4827
4828 create_address_operand (&ops[0], begin_rtx);
4829 create_address_operand (&ops[1], end_rtx);
4830 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4831 return const0_rtx;
4832 }
4833 return const0_rtx;
4834 }
4835
4836 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4837
4838 static rtx
4839 round_trampoline_addr (rtx tramp)
4840 {
4841 rtx temp, addend, mask;
4842
4843 /* If we don't need too much alignment, we'll have been guaranteed
4844 proper alignment by get_trampoline_type. */
4845 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4846 return tramp;
4847
4848 /* Round address up to desired boundary. */
4849 temp = gen_reg_rtx (Pmode);
4850 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4851 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4852
4853 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4854 temp, 0, OPTAB_LIB_WIDEN);
4855 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4856 temp, 0, OPTAB_LIB_WIDEN);
4857
4858 return tramp;
4859 }
4860
4861 static rtx
4862 expand_builtin_init_trampoline (tree exp, bool onstack)
4863 {
4864 tree t_tramp, t_func, t_chain;
4865 rtx m_tramp, r_tramp, r_chain, tmp;
4866
4867 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4868 POINTER_TYPE, VOID_TYPE))
4869 return NULL_RTX;
4870
4871 t_tramp = CALL_EXPR_ARG (exp, 0);
4872 t_func = CALL_EXPR_ARG (exp, 1);
4873 t_chain = CALL_EXPR_ARG (exp, 2);
4874
4875 r_tramp = expand_normal (t_tramp);
4876 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4877 MEM_NOTRAP_P (m_tramp) = 1;
4878
4879 /* If ONSTACK, the TRAMP argument should be the address of a field
4880 within the local function's FRAME decl. Either way, let's see if
4881 we can fill in the MEM_ATTRs for this memory. */
4882 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4883 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4884
4885 /* Creator of a heap trampoline is responsible for making sure the
4886 address is aligned to at least STACK_BOUNDARY. Normally malloc
4887 will ensure this anyhow. */
4888 tmp = round_trampoline_addr (r_tramp);
4889 if (tmp != r_tramp)
4890 {
4891 m_tramp = change_address (m_tramp, BLKmode, tmp);
4892 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4893 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4894 }
4895
4896 /* The FUNC argument should be the address of the nested function.
4897 Extract the actual function decl to pass to the hook. */
4898 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4899 t_func = TREE_OPERAND (t_func, 0);
4900 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4901
4902 r_chain = expand_normal (t_chain);
4903
4904 /* Generate insns to initialize the trampoline. */
4905 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4906
4907 if (onstack)
4908 {
4909 trampolines_created = 1;
4910
4911 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4912 "trampoline generated for nested function %qD", t_func);
4913 }
4914
4915 return const0_rtx;
4916 }
4917
4918 static rtx
4919 expand_builtin_adjust_trampoline (tree exp)
4920 {
4921 rtx tramp;
4922
4923 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4924 return NULL_RTX;
4925
4926 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4927 tramp = round_trampoline_addr (tramp);
4928 if (targetm.calls.trampoline_adjust_address)
4929 tramp = targetm.calls.trampoline_adjust_address (tramp);
4930
4931 return tramp;
4932 }
4933
4934 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4935 function. The function first checks whether the back end provides
4936 an insn to implement signbit for the respective mode. If not, it
4937 checks whether the floating point format of the value is such that
4938 the sign bit can be extracted. If that is not the case, error out.
4939 EXP is the expression that is a call to the builtin function; if
4940 convenient, the result should be placed in TARGET. */
4941 static rtx
4942 expand_builtin_signbit (tree exp, rtx target)
4943 {
4944 const struct real_format *fmt;
4945 machine_mode fmode, imode, rmode;
4946 tree arg;
4947 int word, bitpos;
4948 enum insn_code icode;
4949 rtx temp;
4950 location_t loc = EXPR_LOCATION (exp);
4951
4952 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4953 return NULL_RTX;
4954
4955 arg = CALL_EXPR_ARG (exp, 0);
4956 fmode = TYPE_MODE (TREE_TYPE (arg));
4957 rmode = TYPE_MODE (TREE_TYPE (exp));
4958 fmt = REAL_MODE_FORMAT (fmode);
4959
4960 arg = builtin_save_expr (arg);
4961
4962 /* Expand the argument yielding a RTX expression. */
4963 temp = expand_normal (arg);
4964
4965 /* Check if the back end provides an insn that handles signbit for the
4966 argument's mode. */
4967 icode = optab_handler (signbit_optab, fmode);
4968 if (icode != CODE_FOR_nothing)
4969 {
4970 rtx_insn *last = get_last_insn ();
4971 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4972 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4973 return target;
4974 delete_insns_since (last);
4975 }
4976
4977 /* For floating point formats without a sign bit, implement signbit
4978 as "ARG < 0.0". */
4979 bitpos = fmt->signbit_ro;
4980 if (bitpos < 0)
4981 {
4982 /* But we can't do this if the format supports signed zero. */
4983 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4984
4985 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4986 build_real (TREE_TYPE (arg), dconst0));
4987 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4988 }
4989
4990 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4991 {
4992 imode = int_mode_for_mode (fmode);
4993 gcc_assert (imode != BLKmode);
4994 temp = gen_lowpart (imode, temp);
4995 }
4996 else
4997 {
4998 imode = word_mode;
4999 /* Handle targets with different FP word orders. */
5000 if (FLOAT_WORDS_BIG_ENDIAN)
5001 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5002 else
5003 word = bitpos / BITS_PER_WORD;
5004 temp = operand_subword_force (temp, word, fmode);
5005 bitpos = bitpos % BITS_PER_WORD;
5006 }
5007
5008 /* Force the intermediate word_mode (or narrower) result into a
5009 register. This avoids attempting to create paradoxical SUBREGs
5010 of floating point modes below. */
5011 temp = force_reg (imode, temp);
5012
5013 /* If the bitpos is within the "result mode" lowpart, the operation
5014 can be implement with a single bitwise AND. Otherwise, we need
5015 a right shift and an AND. */
5016
5017 if (bitpos < GET_MODE_BITSIZE (rmode))
5018 {
5019 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5020
5021 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5022 temp = gen_lowpart (rmode, temp);
5023 temp = expand_binop (rmode, and_optab, temp,
5024 immed_wide_int_const (mask, rmode),
5025 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5026 }
5027 else
5028 {
5029 /* Perform a logical right shift to place the signbit in the least
5030 significant bit, then truncate the result to the desired mode
5031 and mask just this bit. */
5032 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5033 temp = gen_lowpart (rmode, temp);
5034 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5035 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5036 }
5037
5038 return temp;
5039 }
5040
5041 /* Expand fork or exec calls. TARGET is the desired target of the
5042 call. EXP is the call. FN is the
5043 identificator of the actual function. IGNORE is nonzero if the
5044 value is to be ignored. */
5045
5046 static rtx
5047 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5048 {
5049 tree id, decl;
5050 tree call;
5051
5052 /* If we are not profiling, just call the function. */
5053 if (!profile_arc_flag)
5054 return NULL_RTX;
5055
5056 /* Otherwise call the wrapper. This should be equivalent for the rest of
5057 compiler, so the code does not diverge, and the wrapper may run the
5058 code necessary for keeping the profiling sane. */
5059
5060 switch (DECL_FUNCTION_CODE (fn))
5061 {
5062 case BUILT_IN_FORK:
5063 id = get_identifier ("__gcov_fork");
5064 break;
5065
5066 case BUILT_IN_EXECL:
5067 id = get_identifier ("__gcov_execl");
5068 break;
5069
5070 case BUILT_IN_EXECV:
5071 id = get_identifier ("__gcov_execv");
5072 break;
5073
5074 case BUILT_IN_EXECLP:
5075 id = get_identifier ("__gcov_execlp");
5076 break;
5077
5078 case BUILT_IN_EXECLE:
5079 id = get_identifier ("__gcov_execle");
5080 break;
5081
5082 case BUILT_IN_EXECVP:
5083 id = get_identifier ("__gcov_execvp");
5084 break;
5085
5086 case BUILT_IN_EXECVE:
5087 id = get_identifier ("__gcov_execve");
5088 break;
5089
5090 default:
5091 gcc_unreachable ();
5092 }
5093
5094 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5095 FUNCTION_DECL, id, TREE_TYPE (fn));
5096 DECL_EXTERNAL (decl) = 1;
5097 TREE_PUBLIC (decl) = 1;
5098 DECL_ARTIFICIAL (decl) = 1;
5099 TREE_NOTHROW (decl) = 1;
5100 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5101 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5102 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5103 return expand_call (call, target, ignore);
5104 }
5105
5106
5107 \f
5108 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5109 the pointer in these functions is void*, the tree optimizers may remove
5110 casts. The mode computed in expand_builtin isn't reliable either, due
5111 to __sync_bool_compare_and_swap.
5112
5113 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5114 group of builtins. This gives us log2 of the mode size. */
5115
5116 static inline machine_mode
5117 get_builtin_sync_mode (int fcode_diff)
5118 {
5119 /* The size is not negotiable, so ask not to get BLKmode in return
5120 if the target indicates that a smaller size would be better. */
5121 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5122 }
5123
5124 /* Expand the memory expression LOC and return the appropriate memory operand
5125 for the builtin_sync operations. */
5126
5127 static rtx
5128 get_builtin_sync_mem (tree loc, machine_mode mode)
5129 {
5130 rtx addr, mem;
5131
5132 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5133 addr = convert_memory_address (Pmode, addr);
5134
5135 /* Note that we explicitly do not want any alias information for this
5136 memory, so that we kill all other live memories. Otherwise we don't
5137 satisfy the full barrier semantics of the intrinsic. */
5138 mem = validize_mem (gen_rtx_MEM (mode, addr));
5139
5140 /* The alignment needs to be at least according to that of the mode. */
5141 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5142 get_pointer_alignment (loc)));
5143 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5144 MEM_VOLATILE_P (mem) = 1;
5145
5146 return mem;
5147 }
5148
5149 /* Make sure an argument is in the right mode.
5150 EXP is the tree argument.
5151 MODE is the mode it should be in. */
5152
5153 static rtx
5154 expand_expr_force_mode (tree exp, machine_mode mode)
5155 {
5156 rtx val;
5157 machine_mode old_mode;
5158
5159 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5160 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5161 of CONST_INTs, where we know the old_mode only from the call argument. */
5162
5163 old_mode = GET_MODE (val);
5164 if (old_mode == VOIDmode)
5165 old_mode = TYPE_MODE (TREE_TYPE (exp));
5166 val = convert_modes (mode, old_mode, val, 1);
5167 return val;
5168 }
5169
5170
5171 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5172 EXP is the CALL_EXPR. CODE is the rtx code
5173 that corresponds to the arithmetic or logical operation from the name;
5174 an exception here is that NOT actually means NAND. TARGET is an optional
5175 place for us to store the results; AFTER is true if this is the
5176 fetch_and_xxx form. */
5177
5178 static rtx
5179 expand_builtin_sync_operation (machine_mode mode, tree exp,
5180 enum rtx_code code, bool after,
5181 rtx target)
5182 {
5183 rtx val, mem;
5184 location_t loc = EXPR_LOCATION (exp);
5185
5186 if (code == NOT && warn_sync_nand)
5187 {
5188 tree fndecl = get_callee_fndecl (exp);
5189 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5190
5191 static bool warned_f_a_n, warned_n_a_f;
5192
5193 switch (fcode)
5194 {
5195 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5196 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5197 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5198 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5199 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5200 if (warned_f_a_n)
5201 break;
5202
5203 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5204 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5205 warned_f_a_n = true;
5206 break;
5207
5208 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5209 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5210 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5211 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5212 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5213 if (warned_n_a_f)
5214 break;
5215
5216 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5217 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5218 warned_n_a_f = true;
5219 break;
5220
5221 default:
5222 gcc_unreachable ();
5223 }
5224 }
5225
5226 /* Expand the operands. */
5227 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5228 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5229
5230 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5231 after);
5232 }
5233
5234 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5235 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5236 true if this is the boolean form. TARGET is a place for us to store the
5237 results; this is NOT optional if IS_BOOL is true. */
5238
5239 static rtx
5240 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5241 bool is_bool, rtx target)
5242 {
5243 rtx old_val, new_val, mem;
5244 rtx *pbool, *poval;
5245
5246 /* Expand the operands. */
5247 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5248 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5249 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5250
5251 pbool = poval = NULL;
5252 if (target != const0_rtx)
5253 {
5254 if (is_bool)
5255 pbool = &target;
5256 else
5257 poval = &target;
5258 }
5259 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5260 false, MEMMODEL_SYNC_SEQ_CST,
5261 MEMMODEL_SYNC_SEQ_CST))
5262 return NULL_RTX;
5263
5264 return target;
5265 }
5266
5267 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5268 general form is actually an atomic exchange, and some targets only
5269 support a reduced form with the second argument being a constant 1.
5270 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5271 the results. */
5272
5273 static rtx
5274 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5275 rtx target)
5276 {
5277 rtx val, mem;
5278
5279 /* Expand the operands. */
5280 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5281 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5282
5283 return expand_sync_lock_test_and_set (target, mem, val);
5284 }
5285
5286 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5287
5288 static void
5289 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5290 {
5291 rtx mem;
5292
5293 /* Expand the operands. */
5294 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5295
5296 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5297 }
5298
5299 /* Given an integer representing an ``enum memmodel'', verify its
5300 correctness and return the memory model enum. */
5301
5302 static enum memmodel
5303 get_memmodel (tree exp)
5304 {
5305 rtx op;
5306 unsigned HOST_WIDE_INT val;
5307
5308 /* If the parameter is not a constant, it's a run time value so we'll just
5309 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5310 if (TREE_CODE (exp) != INTEGER_CST)
5311 return MEMMODEL_SEQ_CST;
5312
5313 op = expand_normal (exp);
5314
5315 val = INTVAL (op);
5316 if (targetm.memmodel_check)
5317 val = targetm.memmodel_check (val);
5318 else if (val & ~MEMMODEL_MASK)
5319 {
5320 warning (OPT_Winvalid_memory_model,
5321 "Unknown architecture specifier in memory model to builtin.");
5322 return MEMMODEL_SEQ_CST;
5323 }
5324
5325 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5326 if (memmodel_base (val) >= MEMMODEL_LAST)
5327 {
5328 warning (OPT_Winvalid_memory_model,
5329 "invalid memory model argument to builtin");
5330 return MEMMODEL_SEQ_CST;
5331 }
5332
5333 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5334 be conservative and promote consume to acquire. */
5335 if (val == MEMMODEL_CONSUME)
5336 val = MEMMODEL_ACQUIRE;
5337
5338 return (enum memmodel) val;
5339 }
5340
5341 /* Expand the __atomic_exchange intrinsic:
5342 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5343 EXP is the CALL_EXPR.
5344 TARGET is an optional place for us to store the results. */
5345
5346 static rtx
5347 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5348 {
5349 rtx val, mem;
5350 enum memmodel model;
5351
5352 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5353
5354 if (!flag_inline_atomics)
5355 return NULL_RTX;
5356
5357 /* Expand the operands. */
5358 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5359 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5360
5361 return expand_atomic_exchange (target, mem, val, model);
5362 }
5363
5364 /* Expand the __atomic_compare_exchange intrinsic:
5365 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5366 TYPE desired, BOOL weak,
5367 enum memmodel success,
5368 enum memmodel failure)
5369 EXP is the CALL_EXPR.
5370 TARGET is an optional place for us to store the results. */
5371
5372 static rtx
5373 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5374 rtx target)
5375 {
5376 rtx expect, desired, mem, oldval;
5377 rtx_code_label *label;
5378 enum memmodel success, failure;
5379 tree weak;
5380 bool is_weak;
5381
5382 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5383 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5384
5385 if (failure > success)
5386 {
5387 warning (OPT_Winvalid_memory_model,
5388 "failure memory model cannot be stronger than success memory "
5389 "model for %<__atomic_compare_exchange%>");
5390 success = MEMMODEL_SEQ_CST;
5391 }
5392
5393 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5394 {
5395 warning (OPT_Winvalid_memory_model,
5396 "invalid failure memory model for "
5397 "%<__atomic_compare_exchange%>");
5398 failure = MEMMODEL_SEQ_CST;
5399 success = MEMMODEL_SEQ_CST;
5400 }
5401
5402
5403 if (!flag_inline_atomics)
5404 return NULL_RTX;
5405
5406 /* Expand the operands. */
5407 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5408
5409 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5410 expect = convert_memory_address (Pmode, expect);
5411 expect = gen_rtx_MEM (mode, expect);
5412 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5413
5414 weak = CALL_EXPR_ARG (exp, 3);
5415 is_weak = false;
5416 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5417 is_weak = true;
5418
5419 if (target == const0_rtx)
5420 target = NULL;
5421
5422 /* Lest the rtl backend create a race condition with an imporoper store
5423 to memory, always create a new pseudo for OLDVAL. */
5424 oldval = NULL;
5425
5426 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5427 is_weak, success, failure))
5428 return NULL_RTX;
5429
5430 /* Conditionally store back to EXPECT, lest we create a race condition
5431 with an improper store to memory. */
5432 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5433 the normal case where EXPECT is totally private, i.e. a register. At
5434 which point the store can be unconditional. */
5435 label = gen_label_rtx ();
5436 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5437 GET_MODE (target), 1, label);
5438 emit_move_insn (expect, oldval);
5439 emit_label (label);
5440
5441 return target;
5442 }
5443
5444 /* Expand the __atomic_load intrinsic:
5445 TYPE __atomic_load (TYPE *object, enum memmodel)
5446 EXP is the CALL_EXPR.
5447 TARGET is an optional place for us to store the results. */
5448
5449 static rtx
5450 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5451 {
5452 rtx mem;
5453 enum memmodel model;
5454
5455 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5456 if (is_mm_release (model) || is_mm_acq_rel (model))
5457 {
5458 warning (OPT_Winvalid_memory_model,
5459 "invalid memory model for %<__atomic_load%>");
5460 model = MEMMODEL_SEQ_CST;
5461 }
5462
5463 if (!flag_inline_atomics)
5464 return NULL_RTX;
5465
5466 /* Expand the operand. */
5467 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5468
5469 return expand_atomic_load (target, mem, model);
5470 }
5471
5472
5473 /* Expand the __atomic_store intrinsic:
5474 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5475 EXP is the CALL_EXPR.
5476 TARGET is an optional place for us to store the results. */
5477
5478 static rtx
5479 expand_builtin_atomic_store (machine_mode mode, tree exp)
5480 {
5481 rtx mem, val;
5482 enum memmodel model;
5483
5484 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5485 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5486 || is_mm_release (model)))
5487 {
5488 warning (OPT_Winvalid_memory_model,
5489 "invalid memory model for %<__atomic_store%>");
5490 model = MEMMODEL_SEQ_CST;
5491 }
5492
5493 if (!flag_inline_atomics)
5494 return NULL_RTX;
5495
5496 /* Expand the operands. */
5497 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5498 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5499
5500 return expand_atomic_store (mem, val, model, false);
5501 }
5502
5503 /* Expand the __atomic_fetch_XXX intrinsic:
5504 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5505 EXP is the CALL_EXPR.
5506 TARGET is an optional place for us to store the results.
5507 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5508 FETCH_AFTER is true if returning the result of the operation.
5509 FETCH_AFTER is false if returning the value before the operation.
5510 IGNORE is true if the result is not used.
5511 EXT_CALL is the correct builtin for an external call if this cannot be
5512 resolved to an instruction sequence. */
5513
5514 static rtx
5515 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5516 enum rtx_code code, bool fetch_after,
5517 bool ignore, enum built_in_function ext_call)
5518 {
5519 rtx val, mem, ret;
5520 enum memmodel model;
5521 tree fndecl;
5522 tree addr;
5523
5524 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5525
5526 /* Expand the operands. */
5527 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5528 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5529
5530 /* Only try generating instructions if inlining is turned on. */
5531 if (flag_inline_atomics)
5532 {
5533 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5534 if (ret)
5535 return ret;
5536 }
5537
5538 /* Return if a different routine isn't needed for the library call. */
5539 if (ext_call == BUILT_IN_NONE)
5540 return NULL_RTX;
5541
5542 /* Change the call to the specified function. */
5543 fndecl = get_callee_fndecl (exp);
5544 addr = CALL_EXPR_FN (exp);
5545 STRIP_NOPS (addr);
5546
5547 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5548 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5549
5550 /* Expand the call here so we can emit trailing code. */
5551 ret = expand_call (exp, target, ignore);
5552
5553 /* Replace the original function just in case it matters. */
5554 TREE_OPERAND (addr, 0) = fndecl;
5555
5556 /* Then issue the arithmetic correction to return the right result. */
5557 if (!ignore)
5558 {
5559 if (code == NOT)
5560 {
5561 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5562 OPTAB_LIB_WIDEN);
5563 ret = expand_simple_unop (mode, NOT, ret, target, true);
5564 }
5565 else
5566 ret = expand_simple_binop (mode, code, ret, val, target, true,
5567 OPTAB_LIB_WIDEN);
5568 }
5569 return ret;
5570 }
5571
5572 /* Expand an atomic clear operation.
5573 void _atomic_clear (BOOL *obj, enum memmodel)
5574 EXP is the call expression. */
5575
5576 static rtx
5577 expand_builtin_atomic_clear (tree exp)
5578 {
5579 machine_mode mode;
5580 rtx mem, ret;
5581 enum memmodel model;
5582
5583 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5584 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5585 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5586
5587 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5588 {
5589 warning (OPT_Winvalid_memory_model,
5590 "invalid memory model for %<__atomic_store%>");
5591 model = MEMMODEL_SEQ_CST;
5592 }
5593
5594 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5595 Failing that, a store is issued by __atomic_store. The only way this can
5596 fail is if the bool type is larger than a word size. Unlikely, but
5597 handle it anyway for completeness. Assume a single threaded model since
5598 there is no atomic support in this case, and no barriers are required. */
5599 ret = expand_atomic_store (mem, const0_rtx, model, true);
5600 if (!ret)
5601 emit_move_insn (mem, const0_rtx);
5602 return const0_rtx;
5603 }
5604
5605 /* Expand an atomic test_and_set operation.
5606 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5607 EXP is the call expression. */
5608
5609 static rtx
5610 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5611 {
5612 rtx mem;
5613 enum memmodel model;
5614 machine_mode mode;
5615
5616 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5617 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5618 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5619
5620 return expand_atomic_test_and_set (target, mem, model);
5621 }
5622
5623
5624 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5625 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5626
5627 static tree
5628 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5629 {
5630 int size;
5631 machine_mode mode;
5632 unsigned int mode_align, type_align;
5633
5634 if (TREE_CODE (arg0) != INTEGER_CST)
5635 return NULL_TREE;
5636
5637 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5638 mode = mode_for_size (size, MODE_INT, 0);
5639 mode_align = GET_MODE_ALIGNMENT (mode);
5640
5641 if (TREE_CODE (arg1) == INTEGER_CST)
5642 {
5643 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5644
5645 /* Either this argument is null, or it's a fake pointer encoding
5646 the alignment of the object. */
5647 val = val & -val;
5648 val *= BITS_PER_UNIT;
5649
5650 if (val == 0 || mode_align < val)
5651 type_align = mode_align;
5652 else
5653 type_align = val;
5654 }
5655 else
5656 {
5657 tree ttype = TREE_TYPE (arg1);
5658
5659 /* This function is usually invoked and folded immediately by the front
5660 end before anything else has a chance to look at it. The pointer
5661 parameter at this point is usually cast to a void *, so check for that
5662 and look past the cast. */
5663 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5664 && VOID_TYPE_P (TREE_TYPE (ttype)))
5665 arg1 = TREE_OPERAND (arg1, 0);
5666
5667 ttype = TREE_TYPE (arg1);
5668 gcc_assert (POINTER_TYPE_P (ttype));
5669
5670 /* Get the underlying type of the object. */
5671 ttype = TREE_TYPE (ttype);
5672 type_align = TYPE_ALIGN (ttype);
5673 }
5674
5675 /* If the object has smaller alignment, the lock free routines cannot
5676 be used. */
5677 if (type_align < mode_align)
5678 return boolean_false_node;
5679
5680 /* Check if a compare_and_swap pattern exists for the mode which represents
5681 the required size. The pattern is not allowed to fail, so the existence
5682 of the pattern indicates support is present. */
5683 if (can_compare_and_swap_p (mode, true))
5684 return boolean_true_node;
5685 else
5686 return boolean_false_node;
5687 }
5688
5689 /* Return true if the parameters to call EXP represent an object which will
5690 always generate lock free instructions. The first argument represents the
5691 size of the object, and the second parameter is a pointer to the object
5692 itself. If NULL is passed for the object, then the result is based on
5693 typical alignment for an object of the specified size. Otherwise return
5694 false. */
5695
5696 static rtx
5697 expand_builtin_atomic_always_lock_free (tree exp)
5698 {
5699 tree size;
5700 tree arg0 = CALL_EXPR_ARG (exp, 0);
5701 tree arg1 = CALL_EXPR_ARG (exp, 1);
5702
5703 if (TREE_CODE (arg0) != INTEGER_CST)
5704 {
5705 error ("non-constant argument 1 to __atomic_always_lock_free");
5706 return const0_rtx;
5707 }
5708
5709 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5710 if (size == boolean_true_node)
5711 return const1_rtx;
5712 return const0_rtx;
5713 }
5714
5715 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5716 is lock free on this architecture. */
5717
5718 static tree
5719 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5720 {
5721 if (!flag_inline_atomics)
5722 return NULL_TREE;
5723
5724 /* If it isn't always lock free, don't generate a result. */
5725 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5726 return boolean_true_node;
5727
5728 return NULL_TREE;
5729 }
5730
5731 /* Return true if the parameters to call EXP represent an object which will
5732 always generate lock free instructions. The first argument represents the
5733 size of the object, and the second parameter is a pointer to the object
5734 itself. If NULL is passed for the object, then the result is based on
5735 typical alignment for an object of the specified size. Otherwise return
5736 NULL*/
5737
5738 static rtx
5739 expand_builtin_atomic_is_lock_free (tree exp)
5740 {
5741 tree size;
5742 tree arg0 = CALL_EXPR_ARG (exp, 0);
5743 tree arg1 = CALL_EXPR_ARG (exp, 1);
5744
5745 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5746 {
5747 error ("non-integer argument 1 to __atomic_is_lock_free");
5748 return NULL_RTX;
5749 }
5750
5751 if (!flag_inline_atomics)
5752 return NULL_RTX;
5753
5754 /* If the value is known at compile time, return the RTX for it. */
5755 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5756 if (size == boolean_true_node)
5757 return const1_rtx;
5758
5759 return NULL_RTX;
5760 }
5761
5762 /* Expand the __atomic_thread_fence intrinsic:
5763 void __atomic_thread_fence (enum memmodel)
5764 EXP is the CALL_EXPR. */
5765
5766 static void
5767 expand_builtin_atomic_thread_fence (tree exp)
5768 {
5769 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5770 expand_mem_thread_fence (model);
5771 }
5772
5773 /* Expand the __atomic_signal_fence intrinsic:
5774 void __atomic_signal_fence (enum memmodel)
5775 EXP is the CALL_EXPR. */
5776
5777 static void
5778 expand_builtin_atomic_signal_fence (tree exp)
5779 {
5780 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5781 expand_mem_signal_fence (model);
5782 }
5783
5784 /* Expand the __sync_synchronize intrinsic. */
5785
5786 static void
5787 expand_builtin_sync_synchronize (void)
5788 {
5789 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5790 }
5791
5792 static rtx
5793 expand_builtin_thread_pointer (tree exp, rtx target)
5794 {
5795 enum insn_code icode;
5796 if (!validate_arglist (exp, VOID_TYPE))
5797 return const0_rtx;
5798 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5799 if (icode != CODE_FOR_nothing)
5800 {
5801 struct expand_operand op;
5802 /* If the target is not sutitable then create a new target. */
5803 if (target == NULL_RTX
5804 || !REG_P (target)
5805 || GET_MODE (target) != Pmode)
5806 target = gen_reg_rtx (Pmode);
5807 create_output_operand (&op, target, Pmode);
5808 expand_insn (icode, 1, &op);
5809 return target;
5810 }
5811 error ("__builtin_thread_pointer is not supported on this target");
5812 return const0_rtx;
5813 }
5814
5815 static void
5816 expand_builtin_set_thread_pointer (tree exp)
5817 {
5818 enum insn_code icode;
5819 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5820 return;
5821 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5822 if (icode != CODE_FOR_nothing)
5823 {
5824 struct expand_operand op;
5825 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5826 Pmode, EXPAND_NORMAL);
5827 create_input_operand (&op, val, Pmode);
5828 expand_insn (icode, 1, &op);
5829 return;
5830 }
5831 error ("__builtin_set_thread_pointer is not supported on this target");
5832 }
5833
5834 \f
5835 /* Emit code to restore the current value of stack. */
5836
5837 static void
5838 expand_stack_restore (tree var)
5839 {
5840 rtx_insn *prev;
5841 rtx sa = expand_normal (var);
5842
5843 sa = convert_memory_address (Pmode, sa);
5844
5845 prev = get_last_insn ();
5846 emit_stack_restore (SAVE_BLOCK, sa);
5847
5848 record_new_stack_level ();
5849
5850 fixup_args_size_notes (prev, get_last_insn (), 0);
5851 }
5852
5853 /* Emit code to save the current value of stack. */
5854
5855 static rtx
5856 expand_stack_save (void)
5857 {
5858 rtx ret = NULL_RTX;
5859
5860 emit_stack_save (SAVE_BLOCK, &ret);
5861 return ret;
5862 }
5863
5864
5865 /* Expand an expression EXP that calls a built-in function,
5866 with result going to TARGET if that's convenient
5867 (and in mode MODE if that's convenient).
5868 SUBTARGET may be used as the target for computing one of EXP's operands.
5869 IGNORE is nonzero if the value is to be ignored. */
5870
5871 rtx
5872 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5873 int ignore)
5874 {
5875 tree fndecl = get_callee_fndecl (exp);
5876 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5877 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5878 int flags;
5879
5880 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5881 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5882
5883 /* When ASan is enabled, we don't want to expand some memory/string
5884 builtins and rely on libsanitizer's hooks. This allows us to avoid
5885 redundant checks and be sure, that possible overflow will be detected
5886 by ASan. */
5887
5888 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5889 return expand_call (exp, target, ignore);
5890
5891 /* When not optimizing, generate calls to library functions for a certain
5892 set of builtins. */
5893 if (!optimize
5894 && !called_as_built_in (fndecl)
5895 && fcode != BUILT_IN_FORK
5896 && fcode != BUILT_IN_EXECL
5897 && fcode != BUILT_IN_EXECV
5898 && fcode != BUILT_IN_EXECLP
5899 && fcode != BUILT_IN_EXECLE
5900 && fcode != BUILT_IN_EXECVP
5901 && fcode != BUILT_IN_EXECVE
5902 && fcode != BUILT_IN_ALLOCA
5903 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5904 && fcode != BUILT_IN_FREE
5905 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5906 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5907 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5908 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5909 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5910 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5911 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5912 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5913 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5914 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5915 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5916 && fcode != BUILT_IN_CHKP_BNDRET)
5917 return expand_call (exp, target, ignore);
5918
5919 /* The built-in function expanders test for target == const0_rtx
5920 to determine whether the function's result will be ignored. */
5921 if (ignore)
5922 target = const0_rtx;
5923
5924 /* If the result of a pure or const built-in function is ignored, and
5925 none of its arguments are volatile, we can avoid expanding the
5926 built-in call and just evaluate the arguments for side-effects. */
5927 if (target == const0_rtx
5928 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5929 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5930 {
5931 bool volatilep = false;
5932 tree arg;
5933 call_expr_arg_iterator iter;
5934
5935 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5936 if (TREE_THIS_VOLATILE (arg))
5937 {
5938 volatilep = true;
5939 break;
5940 }
5941
5942 if (! volatilep)
5943 {
5944 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5945 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5946 return const0_rtx;
5947 }
5948 }
5949
5950 /* expand_builtin_with_bounds is supposed to be used for
5951 instrumented builtin calls. */
5952 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5953
5954 switch (fcode)
5955 {
5956 CASE_FLT_FN (BUILT_IN_FABS):
5957 case BUILT_IN_FABSD32:
5958 case BUILT_IN_FABSD64:
5959 case BUILT_IN_FABSD128:
5960 target = expand_builtin_fabs (exp, target, subtarget);
5961 if (target)
5962 return target;
5963 break;
5964
5965 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5966 target = expand_builtin_copysign (exp, target, subtarget);
5967 if (target)
5968 return target;
5969 break;
5970
5971 /* Just do a normal library call if we were unable to fold
5972 the values. */
5973 CASE_FLT_FN (BUILT_IN_CABS):
5974 break;
5975
5976 CASE_FLT_FN (BUILT_IN_EXP):
5977 CASE_FLT_FN (BUILT_IN_EXP10):
5978 CASE_FLT_FN (BUILT_IN_POW10):
5979 CASE_FLT_FN (BUILT_IN_EXP2):
5980 CASE_FLT_FN (BUILT_IN_EXPM1):
5981 CASE_FLT_FN (BUILT_IN_LOGB):
5982 CASE_FLT_FN (BUILT_IN_LOG):
5983 CASE_FLT_FN (BUILT_IN_LOG10):
5984 CASE_FLT_FN (BUILT_IN_LOG2):
5985 CASE_FLT_FN (BUILT_IN_LOG1P):
5986 CASE_FLT_FN (BUILT_IN_TAN):
5987 CASE_FLT_FN (BUILT_IN_ASIN):
5988 CASE_FLT_FN (BUILT_IN_ACOS):
5989 CASE_FLT_FN (BUILT_IN_ATAN):
5990 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5991 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5992 because of possible accuracy problems. */
5993 if (! flag_unsafe_math_optimizations)
5994 break;
5995 CASE_FLT_FN (BUILT_IN_SQRT):
5996 CASE_FLT_FN (BUILT_IN_FLOOR):
5997 CASE_FLT_FN (BUILT_IN_CEIL):
5998 CASE_FLT_FN (BUILT_IN_TRUNC):
5999 CASE_FLT_FN (BUILT_IN_ROUND):
6000 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6001 CASE_FLT_FN (BUILT_IN_RINT):
6002 target = expand_builtin_mathfn (exp, target, subtarget);
6003 if (target)
6004 return target;
6005 break;
6006
6007 CASE_FLT_FN (BUILT_IN_FMA):
6008 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6009 if (target)
6010 return target;
6011 break;
6012
6013 CASE_FLT_FN (BUILT_IN_ILOGB):
6014 if (! flag_unsafe_math_optimizations)
6015 break;
6016 CASE_FLT_FN (BUILT_IN_ISINF):
6017 CASE_FLT_FN (BUILT_IN_FINITE):
6018 case BUILT_IN_ISFINITE:
6019 case BUILT_IN_ISNORMAL:
6020 target = expand_builtin_interclass_mathfn (exp, target);
6021 if (target)
6022 return target;
6023 break;
6024
6025 CASE_FLT_FN (BUILT_IN_ICEIL):
6026 CASE_FLT_FN (BUILT_IN_LCEIL):
6027 CASE_FLT_FN (BUILT_IN_LLCEIL):
6028 CASE_FLT_FN (BUILT_IN_LFLOOR):
6029 CASE_FLT_FN (BUILT_IN_IFLOOR):
6030 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6031 target = expand_builtin_int_roundingfn (exp, target);
6032 if (target)
6033 return target;
6034 break;
6035
6036 CASE_FLT_FN (BUILT_IN_IRINT):
6037 CASE_FLT_FN (BUILT_IN_LRINT):
6038 CASE_FLT_FN (BUILT_IN_LLRINT):
6039 CASE_FLT_FN (BUILT_IN_IROUND):
6040 CASE_FLT_FN (BUILT_IN_LROUND):
6041 CASE_FLT_FN (BUILT_IN_LLROUND):
6042 target = expand_builtin_int_roundingfn_2 (exp, target);
6043 if (target)
6044 return target;
6045 break;
6046
6047 CASE_FLT_FN (BUILT_IN_POWI):
6048 target = expand_builtin_powi (exp, target);
6049 if (target)
6050 return target;
6051 break;
6052
6053 CASE_FLT_FN (BUILT_IN_ATAN2):
6054 CASE_FLT_FN (BUILT_IN_LDEXP):
6055 CASE_FLT_FN (BUILT_IN_SCALB):
6056 CASE_FLT_FN (BUILT_IN_SCALBN):
6057 CASE_FLT_FN (BUILT_IN_SCALBLN):
6058 if (! flag_unsafe_math_optimizations)
6059 break;
6060
6061 CASE_FLT_FN (BUILT_IN_FMOD):
6062 CASE_FLT_FN (BUILT_IN_REMAINDER):
6063 CASE_FLT_FN (BUILT_IN_DREM):
6064 CASE_FLT_FN (BUILT_IN_POW):
6065 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6066 if (target)
6067 return target;
6068 break;
6069
6070 CASE_FLT_FN (BUILT_IN_CEXPI):
6071 target = expand_builtin_cexpi (exp, target);
6072 gcc_assert (target);
6073 return target;
6074
6075 CASE_FLT_FN (BUILT_IN_SIN):
6076 CASE_FLT_FN (BUILT_IN_COS):
6077 if (! flag_unsafe_math_optimizations)
6078 break;
6079 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6080 if (target)
6081 return target;
6082 break;
6083
6084 CASE_FLT_FN (BUILT_IN_SINCOS):
6085 if (! flag_unsafe_math_optimizations)
6086 break;
6087 target = expand_builtin_sincos (exp);
6088 if (target)
6089 return target;
6090 break;
6091
6092 case BUILT_IN_APPLY_ARGS:
6093 return expand_builtin_apply_args ();
6094
6095 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6096 FUNCTION with a copy of the parameters described by
6097 ARGUMENTS, and ARGSIZE. It returns a block of memory
6098 allocated on the stack into which is stored all the registers
6099 that might possibly be used for returning the result of a
6100 function. ARGUMENTS is the value returned by
6101 __builtin_apply_args. ARGSIZE is the number of bytes of
6102 arguments that must be copied. ??? How should this value be
6103 computed? We'll also need a safe worst case value for varargs
6104 functions. */
6105 case BUILT_IN_APPLY:
6106 if (!validate_arglist (exp, POINTER_TYPE,
6107 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6108 && !validate_arglist (exp, REFERENCE_TYPE,
6109 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6110 return const0_rtx;
6111 else
6112 {
6113 rtx ops[3];
6114
6115 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6116 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6117 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6118
6119 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6120 }
6121
6122 /* __builtin_return (RESULT) causes the function to return the
6123 value described by RESULT. RESULT is address of the block of
6124 memory returned by __builtin_apply. */
6125 case BUILT_IN_RETURN:
6126 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6127 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6128 return const0_rtx;
6129
6130 case BUILT_IN_SAVEREGS:
6131 return expand_builtin_saveregs ();
6132
6133 case BUILT_IN_VA_ARG_PACK:
6134 /* All valid uses of __builtin_va_arg_pack () are removed during
6135 inlining. */
6136 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6137 return const0_rtx;
6138
6139 case BUILT_IN_VA_ARG_PACK_LEN:
6140 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6141 inlining. */
6142 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6143 return const0_rtx;
6144
6145 /* Return the address of the first anonymous stack arg. */
6146 case BUILT_IN_NEXT_ARG:
6147 if (fold_builtin_next_arg (exp, false))
6148 return const0_rtx;
6149 return expand_builtin_next_arg ();
6150
6151 case BUILT_IN_CLEAR_CACHE:
6152 target = expand_builtin___clear_cache (exp);
6153 if (target)
6154 return target;
6155 break;
6156
6157 case BUILT_IN_CLASSIFY_TYPE:
6158 return expand_builtin_classify_type (exp);
6159
6160 case BUILT_IN_CONSTANT_P:
6161 return const0_rtx;
6162
6163 case BUILT_IN_FRAME_ADDRESS:
6164 case BUILT_IN_RETURN_ADDRESS:
6165 return expand_builtin_frame_address (fndecl, exp);
6166
6167 /* Returns the address of the area where the structure is returned.
6168 0 otherwise. */
6169 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6170 if (call_expr_nargs (exp) != 0
6171 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6172 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6173 return const0_rtx;
6174 else
6175 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6176
6177 case BUILT_IN_ALLOCA:
6178 case BUILT_IN_ALLOCA_WITH_ALIGN:
6179 /* If the allocation stems from the declaration of a variable-sized
6180 object, it cannot accumulate. */
6181 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6182 if (target)
6183 return target;
6184 break;
6185
6186 case BUILT_IN_STACK_SAVE:
6187 return expand_stack_save ();
6188
6189 case BUILT_IN_STACK_RESTORE:
6190 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6191 return const0_rtx;
6192
6193 case BUILT_IN_BSWAP16:
6194 case BUILT_IN_BSWAP32:
6195 case BUILT_IN_BSWAP64:
6196 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6197 if (target)
6198 return target;
6199 break;
6200
6201 CASE_INT_FN (BUILT_IN_FFS):
6202 target = expand_builtin_unop (target_mode, exp, target,
6203 subtarget, ffs_optab);
6204 if (target)
6205 return target;
6206 break;
6207
6208 CASE_INT_FN (BUILT_IN_CLZ):
6209 target = expand_builtin_unop (target_mode, exp, target,
6210 subtarget, clz_optab);
6211 if (target)
6212 return target;
6213 break;
6214
6215 CASE_INT_FN (BUILT_IN_CTZ):
6216 target = expand_builtin_unop (target_mode, exp, target,
6217 subtarget, ctz_optab);
6218 if (target)
6219 return target;
6220 break;
6221
6222 CASE_INT_FN (BUILT_IN_CLRSB):
6223 target = expand_builtin_unop (target_mode, exp, target,
6224 subtarget, clrsb_optab);
6225 if (target)
6226 return target;
6227 break;
6228
6229 CASE_INT_FN (BUILT_IN_POPCOUNT):
6230 target = expand_builtin_unop (target_mode, exp, target,
6231 subtarget, popcount_optab);
6232 if (target)
6233 return target;
6234 break;
6235
6236 CASE_INT_FN (BUILT_IN_PARITY):
6237 target = expand_builtin_unop (target_mode, exp, target,
6238 subtarget, parity_optab);
6239 if (target)
6240 return target;
6241 break;
6242
6243 case BUILT_IN_STRLEN:
6244 target = expand_builtin_strlen (exp, target, target_mode);
6245 if (target)
6246 return target;
6247 break;
6248
6249 case BUILT_IN_STRCPY:
6250 target = expand_builtin_strcpy (exp, target);
6251 if (target)
6252 return target;
6253 break;
6254
6255 case BUILT_IN_STRNCPY:
6256 target = expand_builtin_strncpy (exp, target);
6257 if (target)
6258 return target;
6259 break;
6260
6261 case BUILT_IN_STPCPY:
6262 target = expand_builtin_stpcpy (exp, target, mode);
6263 if (target)
6264 return target;
6265 break;
6266
6267 case BUILT_IN_MEMCPY:
6268 target = expand_builtin_memcpy (exp, target);
6269 if (target)
6270 return target;
6271 break;
6272
6273 case BUILT_IN_MEMPCPY:
6274 target = expand_builtin_mempcpy (exp, target, mode);
6275 if (target)
6276 return target;
6277 break;
6278
6279 case BUILT_IN_MEMSET:
6280 target = expand_builtin_memset (exp, target, mode);
6281 if (target)
6282 return target;
6283 break;
6284
6285 case BUILT_IN_BZERO:
6286 target = expand_builtin_bzero (exp);
6287 if (target)
6288 return target;
6289 break;
6290
6291 case BUILT_IN_STRCMP:
6292 target = expand_builtin_strcmp (exp, target);
6293 if (target)
6294 return target;
6295 break;
6296
6297 case BUILT_IN_STRNCMP:
6298 target = expand_builtin_strncmp (exp, target, mode);
6299 if (target)
6300 return target;
6301 break;
6302
6303 case BUILT_IN_BCMP:
6304 case BUILT_IN_MEMCMP:
6305 target = expand_builtin_memcmp (exp, target);
6306 if (target)
6307 return target;
6308 break;
6309
6310 case BUILT_IN_SETJMP:
6311 /* This should have been lowered to the builtins below. */
6312 gcc_unreachable ();
6313
6314 case BUILT_IN_SETJMP_SETUP:
6315 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6316 and the receiver label. */
6317 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6318 {
6319 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6320 VOIDmode, EXPAND_NORMAL);
6321 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6322 rtx_insn *label_r = label_rtx (label);
6323
6324 /* This is copied from the handling of non-local gotos. */
6325 expand_builtin_setjmp_setup (buf_addr, label_r);
6326 nonlocal_goto_handler_labels
6327 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6328 nonlocal_goto_handler_labels);
6329 /* ??? Do not let expand_label treat us as such since we would
6330 not want to be both on the list of non-local labels and on
6331 the list of forced labels. */
6332 FORCED_LABEL (label) = 0;
6333 return const0_rtx;
6334 }
6335 break;
6336
6337 case BUILT_IN_SETJMP_RECEIVER:
6338 /* __builtin_setjmp_receiver is passed the receiver label. */
6339 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6340 {
6341 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6342 rtx_insn *label_r = label_rtx (label);
6343
6344 expand_builtin_setjmp_receiver (label_r);
6345 return const0_rtx;
6346 }
6347 break;
6348
6349 /* __builtin_longjmp is passed a pointer to an array of five words.
6350 It's similar to the C library longjmp function but works with
6351 __builtin_setjmp above. */
6352 case BUILT_IN_LONGJMP:
6353 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6354 {
6355 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6356 VOIDmode, EXPAND_NORMAL);
6357 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6358
6359 if (value != const1_rtx)
6360 {
6361 error ("%<__builtin_longjmp%> second argument must be 1");
6362 return const0_rtx;
6363 }
6364
6365 expand_builtin_longjmp (buf_addr, value);
6366 return const0_rtx;
6367 }
6368 break;
6369
6370 case BUILT_IN_NONLOCAL_GOTO:
6371 target = expand_builtin_nonlocal_goto (exp);
6372 if (target)
6373 return target;
6374 break;
6375
6376 /* This updates the setjmp buffer that is its argument with the value
6377 of the current stack pointer. */
6378 case BUILT_IN_UPDATE_SETJMP_BUF:
6379 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6380 {
6381 rtx buf_addr
6382 = expand_normal (CALL_EXPR_ARG (exp, 0));
6383
6384 expand_builtin_update_setjmp_buf (buf_addr);
6385 return const0_rtx;
6386 }
6387 break;
6388
6389 case BUILT_IN_TRAP:
6390 expand_builtin_trap ();
6391 return const0_rtx;
6392
6393 case BUILT_IN_UNREACHABLE:
6394 expand_builtin_unreachable ();
6395 return const0_rtx;
6396
6397 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6398 case BUILT_IN_SIGNBITD32:
6399 case BUILT_IN_SIGNBITD64:
6400 case BUILT_IN_SIGNBITD128:
6401 target = expand_builtin_signbit (exp, target);
6402 if (target)
6403 return target;
6404 break;
6405
6406 /* Various hooks for the DWARF 2 __throw routine. */
6407 case BUILT_IN_UNWIND_INIT:
6408 expand_builtin_unwind_init ();
6409 return const0_rtx;
6410 case BUILT_IN_DWARF_CFA:
6411 return virtual_cfa_rtx;
6412 #ifdef DWARF2_UNWIND_INFO
6413 case BUILT_IN_DWARF_SP_COLUMN:
6414 return expand_builtin_dwarf_sp_column ();
6415 case BUILT_IN_INIT_DWARF_REG_SIZES:
6416 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6417 return const0_rtx;
6418 #endif
6419 case BUILT_IN_FROB_RETURN_ADDR:
6420 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6421 case BUILT_IN_EXTRACT_RETURN_ADDR:
6422 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6423 case BUILT_IN_EH_RETURN:
6424 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6425 CALL_EXPR_ARG (exp, 1));
6426 return const0_rtx;
6427 case BUILT_IN_EH_RETURN_DATA_REGNO:
6428 return expand_builtin_eh_return_data_regno (exp);
6429 case BUILT_IN_EXTEND_POINTER:
6430 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6431 case BUILT_IN_EH_POINTER:
6432 return expand_builtin_eh_pointer (exp);
6433 case BUILT_IN_EH_FILTER:
6434 return expand_builtin_eh_filter (exp);
6435 case BUILT_IN_EH_COPY_VALUES:
6436 return expand_builtin_eh_copy_values (exp);
6437
6438 case BUILT_IN_VA_START:
6439 return expand_builtin_va_start (exp);
6440 case BUILT_IN_VA_END:
6441 return expand_builtin_va_end (exp);
6442 case BUILT_IN_VA_COPY:
6443 return expand_builtin_va_copy (exp);
6444 case BUILT_IN_EXPECT:
6445 return expand_builtin_expect (exp, target);
6446 case BUILT_IN_ASSUME_ALIGNED:
6447 return expand_builtin_assume_aligned (exp, target);
6448 case BUILT_IN_PREFETCH:
6449 expand_builtin_prefetch (exp);
6450 return const0_rtx;
6451
6452 case BUILT_IN_INIT_TRAMPOLINE:
6453 return expand_builtin_init_trampoline (exp, true);
6454 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6455 return expand_builtin_init_trampoline (exp, false);
6456 case BUILT_IN_ADJUST_TRAMPOLINE:
6457 return expand_builtin_adjust_trampoline (exp);
6458
6459 case BUILT_IN_FORK:
6460 case BUILT_IN_EXECL:
6461 case BUILT_IN_EXECV:
6462 case BUILT_IN_EXECLP:
6463 case BUILT_IN_EXECLE:
6464 case BUILT_IN_EXECVP:
6465 case BUILT_IN_EXECVE:
6466 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6467 if (target)
6468 return target;
6469 break;
6470
6471 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6472 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6473 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6474 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6475 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6476 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6477 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6478 if (target)
6479 return target;
6480 break;
6481
6482 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6483 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6484 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6485 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6486 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6487 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6488 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6489 if (target)
6490 return target;
6491 break;
6492
6493 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6494 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6495 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6496 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6497 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6498 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6499 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6500 if (target)
6501 return target;
6502 break;
6503
6504 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6505 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6506 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6507 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6508 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6509 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6510 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6511 if (target)
6512 return target;
6513 break;
6514
6515 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6516 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6517 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6518 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6519 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6520 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6521 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6522 if (target)
6523 return target;
6524 break;
6525
6526 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6527 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6528 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6529 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6530 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6531 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6532 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6533 if (target)
6534 return target;
6535 break;
6536
6537 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6538 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6539 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6540 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6541 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6542 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6543 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6544 if (target)
6545 return target;
6546 break;
6547
6548 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6549 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6550 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6551 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6552 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6553 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6554 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6555 if (target)
6556 return target;
6557 break;
6558
6559 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6560 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6561 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6562 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6563 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6564 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6565 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6566 if (target)
6567 return target;
6568 break;
6569
6570 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6571 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6572 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6573 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6574 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6575 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6576 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6577 if (target)
6578 return target;
6579 break;
6580
6581 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6582 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6583 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6584 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6585 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6586 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6587 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6588 if (target)
6589 return target;
6590 break;
6591
6592 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6593 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6594 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6595 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6596 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6597 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6598 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6599 if (target)
6600 return target;
6601 break;
6602
6603 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6604 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6605 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6606 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6607 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6608 if (mode == VOIDmode)
6609 mode = TYPE_MODE (boolean_type_node);
6610 if (!target || !register_operand (target, mode))
6611 target = gen_reg_rtx (mode);
6612
6613 mode = get_builtin_sync_mode
6614 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6615 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6616 if (target)
6617 return target;
6618 break;
6619
6620 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6621 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6622 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6623 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6624 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6625 mode = get_builtin_sync_mode
6626 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6627 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6628 if (target)
6629 return target;
6630 break;
6631
6632 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6633 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6634 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6635 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6636 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6637 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6638 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6639 if (target)
6640 return target;
6641 break;
6642
6643 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6644 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6645 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6646 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6647 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6648 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6649 expand_builtin_sync_lock_release (mode, exp);
6650 return const0_rtx;
6651
6652 case BUILT_IN_SYNC_SYNCHRONIZE:
6653 expand_builtin_sync_synchronize ();
6654 return const0_rtx;
6655
6656 case BUILT_IN_ATOMIC_EXCHANGE_1:
6657 case BUILT_IN_ATOMIC_EXCHANGE_2:
6658 case BUILT_IN_ATOMIC_EXCHANGE_4:
6659 case BUILT_IN_ATOMIC_EXCHANGE_8:
6660 case BUILT_IN_ATOMIC_EXCHANGE_16:
6661 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6662 target = expand_builtin_atomic_exchange (mode, exp, target);
6663 if (target)
6664 return target;
6665 break;
6666
6667 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6668 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6669 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6670 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6671 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6672 {
6673 unsigned int nargs, z;
6674 vec<tree, va_gc> *vec;
6675
6676 mode =
6677 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6678 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6679 if (target)
6680 return target;
6681
6682 /* If this is turned into an external library call, the weak parameter
6683 must be dropped to match the expected parameter list. */
6684 nargs = call_expr_nargs (exp);
6685 vec_alloc (vec, nargs - 1);
6686 for (z = 0; z < 3; z++)
6687 vec->quick_push (CALL_EXPR_ARG (exp, z));
6688 /* Skip the boolean weak parameter. */
6689 for (z = 4; z < 6; z++)
6690 vec->quick_push (CALL_EXPR_ARG (exp, z));
6691 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6692 break;
6693 }
6694
6695 case BUILT_IN_ATOMIC_LOAD_1:
6696 case BUILT_IN_ATOMIC_LOAD_2:
6697 case BUILT_IN_ATOMIC_LOAD_4:
6698 case BUILT_IN_ATOMIC_LOAD_8:
6699 case BUILT_IN_ATOMIC_LOAD_16:
6700 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6701 target = expand_builtin_atomic_load (mode, exp, target);
6702 if (target)
6703 return target;
6704 break;
6705
6706 case BUILT_IN_ATOMIC_STORE_1:
6707 case BUILT_IN_ATOMIC_STORE_2:
6708 case BUILT_IN_ATOMIC_STORE_4:
6709 case BUILT_IN_ATOMIC_STORE_8:
6710 case BUILT_IN_ATOMIC_STORE_16:
6711 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6712 target = expand_builtin_atomic_store (mode, exp);
6713 if (target)
6714 return const0_rtx;
6715 break;
6716
6717 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6718 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6719 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6720 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6721 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6722 {
6723 enum built_in_function lib;
6724 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6725 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6726 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6727 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6728 ignore, lib);
6729 if (target)
6730 return target;
6731 break;
6732 }
6733 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6734 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6735 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6736 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6737 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6738 {
6739 enum built_in_function lib;
6740 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6741 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6742 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6743 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6744 ignore, lib);
6745 if (target)
6746 return target;
6747 break;
6748 }
6749 case BUILT_IN_ATOMIC_AND_FETCH_1:
6750 case BUILT_IN_ATOMIC_AND_FETCH_2:
6751 case BUILT_IN_ATOMIC_AND_FETCH_4:
6752 case BUILT_IN_ATOMIC_AND_FETCH_8:
6753 case BUILT_IN_ATOMIC_AND_FETCH_16:
6754 {
6755 enum built_in_function lib;
6756 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6757 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6758 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6759 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6760 ignore, lib);
6761 if (target)
6762 return target;
6763 break;
6764 }
6765 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6766 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6767 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6768 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6769 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6770 {
6771 enum built_in_function lib;
6772 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6773 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6774 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6775 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6776 ignore, lib);
6777 if (target)
6778 return target;
6779 break;
6780 }
6781 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6782 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6783 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6784 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6785 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6786 {
6787 enum built_in_function lib;
6788 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6789 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6790 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6791 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6792 ignore, lib);
6793 if (target)
6794 return target;
6795 break;
6796 }
6797 case BUILT_IN_ATOMIC_OR_FETCH_1:
6798 case BUILT_IN_ATOMIC_OR_FETCH_2:
6799 case BUILT_IN_ATOMIC_OR_FETCH_4:
6800 case BUILT_IN_ATOMIC_OR_FETCH_8:
6801 case BUILT_IN_ATOMIC_OR_FETCH_16:
6802 {
6803 enum built_in_function lib;
6804 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6805 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6806 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6807 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6808 ignore, lib);
6809 if (target)
6810 return target;
6811 break;
6812 }
6813 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6814 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6815 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6816 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6817 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6818 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6819 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6820 ignore, BUILT_IN_NONE);
6821 if (target)
6822 return target;
6823 break;
6824
6825 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6826 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6827 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6828 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6829 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6830 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6831 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6832 ignore, BUILT_IN_NONE);
6833 if (target)
6834 return target;
6835 break;
6836
6837 case BUILT_IN_ATOMIC_FETCH_AND_1:
6838 case BUILT_IN_ATOMIC_FETCH_AND_2:
6839 case BUILT_IN_ATOMIC_FETCH_AND_4:
6840 case BUILT_IN_ATOMIC_FETCH_AND_8:
6841 case BUILT_IN_ATOMIC_FETCH_AND_16:
6842 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6843 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6844 ignore, BUILT_IN_NONE);
6845 if (target)
6846 return target;
6847 break;
6848
6849 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6850 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6851 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6852 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6853 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6854 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6855 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6856 ignore, BUILT_IN_NONE);
6857 if (target)
6858 return target;
6859 break;
6860
6861 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6862 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6863 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6864 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6865 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6866 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6867 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6868 ignore, BUILT_IN_NONE);
6869 if (target)
6870 return target;
6871 break;
6872
6873 case BUILT_IN_ATOMIC_FETCH_OR_1:
6874 case BUILT_IN_ATOMIC_FETCH_OR_2:
6875 case BUILT_IN_ATOMIC_FETCH_OR_4:
6876 case BUILT_IN_ATOMIC_FETCH_OR_8:
6877 case BUILT_IN_ATOMIC_FETCH_OR_16:
6878 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6879 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6880 ignore, BUILT_IN_NONE);
6881 if (target)
6882 return target;
6883 break;
6884
6885 case BUILT_IN_ATOMIC_TEST_AND_SET:
6886 return expand_builtin_atomic_test_and_set (exp, target);
6887
6888 case BUILT_IN_ATOMIC_CLEAR:
6889 return expand_builtin_atomic_clear (exp);
6890
6891 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6892 return expand_builtin_atomic_always_lock_free (exp);
6893
6894 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6895 target = expand_builtin_atomic_is_lock_free (exp);
6896 if (target)
6897 return target;
6898 break;
6899
6900 case BUILT_IN_ATOMIC_THREAD_FENCE:
6901 expand_builtin_atomic_thread_fence (exp);
6902 return const0_rtx;
6903
6904 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6905 expand_builtin_atomic_signal_fence (exp);
6906 return const0_rtx;
6907
6908 case BUILT_IN_OBJECT_SIZE:
6909 return expand_builtin_object_size (exp);
6910
6911 case BUILT_IN_MEMCPY_CHK:
6912 case BUILT_IN_MEMPCPY_CHK:
6913 case BUILT_IN_MEMMOVE_CHK:
6914 case BUILT_IN_MEMSET_CHK:
6915 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6916 if (target)
6917 return target;
6918 break;
6919
6920 case BUILT_IN_STRCPY_CHK:
6921 case BUILT_IN_STPCPY_CHK:
6922 case BUILT_IN_STRNCPY_CHK:
6923 case BUILT_IN_STPNCPY_CHK:
6924 case BUILT_IN_STRCAT_CHK:
6925 case BUILT_IN_STRNCAT_CHK:
6926 case BUILT_IN_SNPRINTF_CHK:
6927 case BUILT_IN_VSNPRINTF_CHK:
6928 maybe_emit_chk_warning (exp, fcode);
6929 break;
6930
6931 case BUILT_IN_SPRINTF_CHK:
6932 case BUILT_IN_VSPRINTF_CHK:
6933 maybe_emit_sprintf_chk_warning (exp, fcode);
6934 break;
6935
6936 case BUILT_IN_FREE:
6937 if (warn_free_nonheap_object)
6938 maybe_emit_free_warning (exp);
6939 break;
6940
6941 case BUILT_IN_THREAD_POINTER:
6942 return expand_builtin_thread_pointer (exp, target);
6943
6944 case BUILT_IN_SET_THREAD_POINTER:
6945 expand_builtin_set_thread_pointer (exp);
6946 return const0_rtx;
6947
6948 case BUILT_IN_CILK_DETACH:
6949 expand_builtin_cilk_detach (exp);
6950 return const0_rtx;
6951
6952 case BUILT_IN_CILK_POP_FRAME:
6953 expand_builtin_cilk_pop_frame (exp);
6954 return const0_rtx;
6955
6956 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6957 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6958 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6959 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6960 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6961 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6962 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6963 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6964 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6965 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6966 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6967 /* We allow user CHKP builtins if Pointer Bounds
6968 Checker is off. */
6969 if (!chkp_function_instrumented_p (current_function_decl))
6970 {
6971 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6972 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6973 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6974 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6975 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6976 return expand_normal (CALL_EXPR_ARG (exp, 0));
6977 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6978 return expand_normal (size_zero_node);
6979 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6980 return expand_normal (size_int (-1));
6981 else
6982 return const0_rtx;
6983 }
6984 /* FALLTHROUGH */
6985
6986 case BUILT_IN_CHKP_BNDMK:
6987 case BUILT_IN_CHKP_BNDSTX:
6988 case BUILT_IN_CHKP_BNDCL:
6989 case BUILT_IN_CHKP_BNDCU:
6990 case BUILT_IN_CHKP_BNDLDX:
6991 case BUILT_IN_CHKP_BNDRET:
6992 case BUILT_IN_CHKP_INTERSECT:
6993 case BUILT_IN_CHKP_NARROW:
6994 case BUILT_IN_CHKP_EXTRACT_LOWER:
6995 case BUILT_IN_CHKP_EXTRACT_UPPER:
6996 /* Software implementation of Pointer Bounds Checker is NYI.
6997 Target support is required. */
6998 error ("Your target platform does not support -fcheck-pointer-bounds");
6999 break;
7000
7001 case BUILT_IN_ACC_ON_DEVICE:
7002 /* Do library call, if we failed to expand the builtin when
7003 folding. */
7004 break;
7005
7006 default: /* just do library call, if unknown builtin */
7007 break;
7008 }
7009
7010 /* The switch statement above can drop through to cause the function
7011 to be called normally. */
7012 return expand_call (exp, target, ignore);
7013 }
7014
7015 /* Similar to expand_builtin but is used for instrumented calls. */
7016
7017 rtx
7018 expand_builtin_with_bounds (tree exp, rtx target,
7019 rtx subtarget ATTRIBUTE_UNUSED,
7020 machine_mode mode, int ignore)
7021 {
7022 tree fndecl = get_callee_fndecl (exp);
7023 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7024
7025 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7026
7027 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7028 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7029
7030 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7031 && fcode < END_CHKP_BUILTINS);
7032
7033 switch (fcode)
7034 {
7035 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7036 target = expand_builtin_memcpy_with_bounds (exp, target);
7037 if (target)
7038 return target;
7039 break;
7040
7041 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7042 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7043 if (target)
7044 return target;
7045 break;
7046
7047 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7048 target = expand_builtin_memset_with_bounds (exp, target, mode);
7049 if (target)
7050 return target;
7051 break;
7052
7053 default:
7054 break;
7055 }
7056
7057 /* The switch statement above can drop through to cause the function
7058 to be called normally. */
7059 return expand_call (exp, target, ignore);
7060 }
7061
7062 /* Determine whether a tree node represents a call to a built-in
7063 function. If the tree T is a call to a built-in function with
7064 the right number of arguments of the appropriate types, return
7065 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7066 Otherwise the return value is END_BUILTINS. */
7067
7068 enum built_in_function
7069 builtin_mathfn_code (const_tree t)
7070 {
7071 const_tree fndecl, arg, parmlist;
7072 const_tree argtype, parmtype;
7073 const_call_expr_arg_iterator iter;
7074
7075 if (TREE_CODE (t) != CALL_EXPR
7076 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7077 return END_BUILTINS;
7078
7079 fndecl = get_callee_fndecl (t);
7080 if (fndecl == NULL_TREE
7081 || TREE_CODE (fndecl) != FUNCTION_DECL
7082 || ! DECL_BUILT_IN (fndecl)
7083 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7084 return END_BUILTINS;
7085
7086 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7087 init_const_call_expr_arg_iterator (t, &iter);
7088 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7089 {
7090 /* If a function doesn't take a variable number of arguments,
7091 the last element in the list will have type `void'. */
7092 parmtype = TREE_VALUE (parmlist);
7093 if (VOID_TYPE_P (parmtype))
7094 {
7095 if (more_const_call_expr_args_p (&iter))
7096 return END_BUILTINS;
7097 return DECL_FUNCTION_CODE (fndecl);
7098 }
7099
7100 if (! more_const_call_expr_args_p (&iter))
7101 return END_BUILTINS;
7102
7103 arg = next_const_call_expr_arg (&iter);
7104 argtype = TREE_TYPE (arg);
7105
7106 if (SCALAR_FLOAT_TYPE_P (parmtype))
7107 {
7108 if (! SCALAR_FLOAT_TYPE_P (argtype))
7109 return END_BUILTINS;
7110 }
7111 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7112 {
7113 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7114 return END_BUILTINS;
7115 }
7116 else if (POINTER_TYPE_P (parmtype))
7117 {
7118 if (! POINTER_TYPE_P (argtype))
7119 return END_BUILTINS;
7120 }
7121 else if (INTEGRAL_TYPE_P (parmtype))
7122 {
7123 if (! INTEGRAL_TYPE_P (argtype))
7124 return END_BUILTINS;
7125 }
7126 else
7127 return END_BUILTINS;
7128 }
7129
7130 /* Variable-length argument list. */
7131 return DECL_FUNCTION_CODE (fndecl);
7132 }
7133
7134 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7135 evaluate to a constant. */
7136
7137 static tree
7138 fold_builtin_constant_p (tree arg)
7139 {
7140 /* We return 1 for a numeric type that's known to be a constant
7141 value at compile-time or for an aggregate type that's a
7142 literal constant. */
7143 STRIP_NOPS (arg);
7144
7145 /* If we know this is a constant, emit the constant of one. */
7146 if (CONSTANT_CLASS_P (arg)
7147 || (TREE_CODE (arg) == CONSTRUCTOR
7148 && TREE_CONSTANT (arg)))
7149 return integer_one_node;
7150 if (TREE_CODE (arg) == ADDR_EXPR)
7151 {
7152 tree op = TREE_OPERAND (arg, 0);
7153 if (TREE_CODE (op) == STRING_CST
7154 || (TREE_CODE (op) == ARRAY_REF
7155 && integer_zerop (TREE_OPERAND (op, 1))
7156 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7157 return integer_one_node;
7158 }
7159
7160 /* If this expression has side effects, show we don't know it to be a
7161 constant. Likewise if it's a pointer or aggregate type since in
7162 those case we only want literals, since those are only optimized
7163 when generating RTL, not later.
7164 And finally, if we are compiling an initializer, not code, we
7165 need to return a definite result now; there's not going to be any
7166 more optimization done. */
7167 if (TREE_SIDE_EFFECTS (arg)
7168 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7169 || POINTER_TYPE_P (TREE_TYPE (arg))
7170 || cfun == 0
7171 || folding_initializer
7172 || force_folding_builtin_constant_p)
7173 return integer_zero_node;
7174
7175 return NULL_TREE;
7176 }
7177
7178 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7179 return it as a truthvalue. */
7180
7181 static tree
7182 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7183 tree predictor)
7184 {
7185 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7186
7187 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7188 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7189 ret_type = TREE_TYPE (TREE_TYPE (fn));
7190 pred_type = TREE_VALUE (arg_types);
7191 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7192
7193 pred = fold_convert_loc (loc, pred_type, pred);
7194 expected = fold_convert_loc (loc, expected_type, expected);
7195 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7196 predictor);
7197
7198 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7199 build_int_cst (ret_type, 0));
7200 }
7201
7202 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7203 NULL_TREE if no simplification is possible. */
7204
7205 tree
7206 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7207 {
7208 tree inner, fndecl, inner_arg0;
7209 enum tree_code code;
7210
7211 /* Distribute the expected value over short-circuiting operators.
7212 See through the cast from truthvalue_type_node to long. */
7213 inner_arg0 = arg0;
7214 while (CONVERT_EXPR_P (inner_arg0)
7215 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7216 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7217 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7218
7219 /* If this is a builtin_expect within a builtin_expect keep the
7220 inner one. See through a comparison against a constant. It
7221 might have been added to create a thruthvalue. */
7222 inner = inner_arg0;
7223
7224 if (COMPARISON_CLASS_P (inner)
7225 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7226 inner = TREE_OPERAND (inner, 0);
7227
7228 if (TREE_CODE (inner) == CALL_EXPR
7229 && (fndecl = get_callee_fndecl (inner))
7230 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7231 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7232 return arg0;
7233
7234 inner = inner_arg0;
7235 code = TREE_CODE (inner);
7236 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7237 {
7238 tree op0 = TREE_OPERAND (inner, 0);
7239 tree op1 = TREE_OPERAND (inner, 1);
7240
7241 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7242 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7243 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7244
7245 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7246 }
7247
7248 /* If the argument isn't invariant then there's nothing else we can do. */
7249 if (!TREE_CONSTANT (inner_arg0))
7250 return NULL_TREE;
7251
7252 /* If we expect that a comparison against the argument will fold to
7253 a constant return the constant. In practice, this means a true
7254 constant or the address of a non-weak symbol. */
7255 inner = inner_arg0;
7256 STRIP_NOPS (inner);
7257 if (TREE_CODE (inner) == ADDR_EXPR)
7258 {
7259 do
7260 {
7261 inner = TREE_OPERAND (inner, 0);
7262 }
7263 while (TREE_CODE (inner) == COMPONENT_REF
7264 || TREE_CODE (inner) == ARRAY_REF);
7265 if ((TREE_CODE (inner) == VAR_DECL
7266 || TREE_CODE (inner) == FUNCTION_DECL)
7267 && DECL_WEAK (inner))
7268 return NULL_TREE;
7269 }
7270
7271 /* Otherwise, ARG0 already has the proper type for the return value. */
7272 return arg0;
7273 }
7274
7275 /* Fold a call to __builtin_classify_type with argument ARG. */
7276
7277 static tree
7278 fold_builtin_classify_type (tree arg)
7279 {
7280 if (arg == 0)
7281 return build_int_cst (integer_type_node, no_type_class);
7282
7283 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7284 }
7285
7286 /* Fold a call to __builtin_strlen with argument ARG. */
7287
7288 static tree
7289 fold_builtin_strlen (location_t loc, tree type, tree arg)
7290 {
7291 if (!validate_arg (arg, POINTER_TYPE))
7292 return NULL_TREE;
7293 else
7294 {
7295 tree len = c_strlen (arg, 0);
7296
7297 if (len)
7298 return fold_convert_loc (loc, type, len);
7299
7300 return NULL_TREE;
7301 }
7302 }
7303
7304 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7305
7306 static tree
7307 fold_builtin_inf (location_t loc, tree type, int warn)
7308 {
7309 REAL_VALUE_TYPE real;
7310
7311 /* __builtin_inff is intended to be usable to define INFINITY on all
7312 targets. If an infinity is not available, INFINITY expands "to a
7313 positive constant of type float that overflows at translation
7314 time", footnote "In this case, using INFINITY will violate the
7315 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7316 Thus we pedwarn to ensure this constraint violation is
7317 diagnosed. */
7318 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7319 pedwarn (loc, 0, "target format does not support infinity");
7320
7321 real_inf (&real);
7322 return build_real (type, real);
7323 }
7324
7325 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7326
7327 static tree
7328 fold_builtin_nan (tree arg, tree type, int quiet)
7329 {
7330 REAL_VALUE_TYPE real;
7331 const char *str;
7332
7333 if (!validate_arg (arg, POINTER_TYPE))
7334 return NULL_TREE;
7335 str = c_getstr (arg);
7336 if (!str)
7337 return NULL_TREE;
7338
7339 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7340 return NULL_TREE;
7341
7342 return build_real (type, real);
7343 }
7344
7345 /* Return true if the floating point expression T has an integer value.
7346 We also allow +Inf, -Inf and NaN to be considered integer values. */
7347
7348 static bool
7349 integer_valued_real_p (tree t)
7350 {
7351 switch (TREE_CODE (t))
7352 {
7353 case FLOAT_EXPR:
7354 return true;
7355
7356 case ABS_EXPR:
7357 case SAVE_EXPR:
7358 return integer_valued_real_p (TREE_OPERAND (t, 0));
7359
7360 case COMPOUND_EXPR:
7361 case MODIFY_EXPR:
7362 case BIND_EXPR:
7363 return integer_valued_real_p (TREE_OPERAND (t, 1));
7364
7365 case PLUS_EXPR:
7366 case MINUS_EXPR:
7367 case MULT_EXPR:
7368 case MIN_EXPR:
7369 case MAX_EXPR:
7370 return integer_valued_real_p (TREE_OPERAND (t, 0))
7371 && integer_valued_real_p (TREE_OPERAND (t, 1));
7372
7373 case COND_EXPR:
7374 return integer_valued_real_p (TREE_OPERAND (t, 1))
7375 && integer_valued_real_p (TREE_OPERAND (t, 2));
7376
7377 case REAL_CST:
7378 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7379
7380 CASE_CONVERT:
7381 {
7382 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7383 if (TREE_CODE (type) == INTEGER_TYPE)
7384 return true;
7385 if (TREE_CODE (type) == REAL_TYPE)
7386 return integer_valued_real_p (TREE_OPERAND (t, 0));
7387 break;
7388 }
7389
7390 case CALL_EXPR:
7391 switch (builtin_mathfn_code (t))
7392 {
7393 CASE_FLT_FN (BUILT_IN_CEIL):
7394 CASE_FLT_FN (BUILT_IN_FLOOR):
7395 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7396 CASE_FLT_FN (BUILT_IN_RINT):
7397 CASE_FLT_FN (BUILT_IN_ROUND):
7398 CASE_FLT_FN (BUILT_IN_TRUNC):
7399 return true;
7400
7401 CASE_FLT_FN (BUILT_IN_FMIN):
7402 CASE_FLT_FN (BUILT_IN_FMAX):
7403 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7404 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7405
7406 default:
7407 break;
7408 }
7409 break;
7410
7411 default:
7412 break;
7413 }
7414 return false;
7415 }
7416
7417 /* FNDECL is assumed to be a builtin where truncation can be propagated
7418 across (for instance floor((double)f) == (double)floorf (f).
7419 Do the transformation for a call with argument ARG. */
7420
7421 static tree
7422 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7423 {
7424 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7425
7426 if (!validate_arg (arg, REAL_TYPE))
7427 return NULL_TREE;
7428
7429 /* Integer rounding functions are idempotent. */
7430 if (fcode == builtin_mathfn_code (arg))
7431 return arg;
7432
7433 /* If argument is already integer valued, and we don't need to worry
7434 about setting errno, there's no need to perform rounding. */
7435 if (! flag_errno_math && integer_valued_real_p (arg))
7436 return arg;
7437
7438 if (optimize)
7439 {
7440 tree arg0 = strip_float_extensions (arg);
7441 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7442 tree newtype = TREE_TYPE (arg0);
7443 tree decl;
7444
7445 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7446 && (decl = mathfn_built_in (newtype, fcode)))
7447 return fold_convert_loc (loc, ftype,
7448 build_call_expr_loc (loc, decl, 1,
7449 fold_convert_loc (loc,
7450 newtype,
7451 arg0)));
7452 }
7453 return NULL_TREE;
7454 }
7455
7456 /* FNDECL is assumed to be builtin which can narrow the FP type of
7457 the argument, for instance lround((double)f) -> lroundf (f).
7458 Do the transformation for a call with argument ARG. */
7459
7460 static tree
7461 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7462 {
7463 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7464
7465 if (!validate_arg (arg, REAL_TYPE))
7466 return NULL_TREE;
7467
7468 /* If argument is already integer valued, and we don't need to worry
7469 about setting errno, there's no need to perform rounding. */
7470 if (! flag_errno_math && integer_valued_real_p (arg))
7471 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7472 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7473
7474 if (optimize)
7475 {
7476 tree ftype = TREE_TYPE (arg);
7477 tree arg0 = strip_float_extensions (arg);
7478 tree newtype = TREE_TYPE (arg0);
7479 tree decl;
7480
7481 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7482 && (decl = mathfn_built_in (newtype, fcode)))
7483 return build_call_expr_loc (loc, decl, 1,
7484 fold_convert_loc (loc, newtype, arg0));
7485 }
7486
7487 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7488 sizeof (int) == sizeof (long). */
7489 if (TYPE_PRECISION (integer_type_node)
7490 == TYPE_PRECISION (long_integer_type_node))
7491 {
7492 tree newfn = NULL_TREE;
7493 switch (fcode)
7494 {
7495 CASE_FLT_FN (BUILT_IN_ICEIL):
7496 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7497 break;
7498
7499 CASE_FLT_FN (BUILT_IN_IFLOOR):
7500 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7501 break;
7502
7503 CASE_FLT_FN (BUILT_IN_IROUND):
7504 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7505 break;
7506
7507 CASE_FLT_FN (BUILT_IN_IRINT):
7508 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7509 break;
7510
7511 default:
7512 break;
7513 }
7514
7515 if (newfn)
7516 {
7517 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7518 return fold_convert_loc (loc,
7519 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7520 }
7521 }
7522
7523 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7524 sizeof (long long) == sizeof (long). */
7525 if (TYPE_PRECISION (long_long_integer_type_node)
7526 == TYPE_PRECISION (long_integer_type_node))
7527 {
7528 tree newfn = NULL_TREE;
7529 switch (fcode)
7530 {
7531 CASE_FLT_FN (BUILT_IN_LLCEIL):
7532 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7533 break;
7534
7535 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7536 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7537 break;
7538
7539 CASE_FLT_FN (BUILT_IN_LLROUND):
7540 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7541 break;
7542
7543 CASE_FLT_FN (BUILT_IN_LLRINT):
7544 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7545 break;
7546
7547 default:
7548 break;
7549 }
7550
7551 if (newfn)
7552 {
7553 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7554 return fold_convert_loc (loc,
7555 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7556 }
7557 }
7558
7559 return NULL_TREE;
7560 }
7561
7562 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7563 return type. Return NULL_TREE if no simplification can be made. */
7564
7565 static tree
7566 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7567 {
7568 tree res;
7569
7570 if (!validate_arg (arg, COMPLEX_TYPE)
7571 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7572 return NULL_TREE;
7573
7574 /* Calculate the result when the argument is a constant. */
7575 if (TREE_CODE (arg) == COMPLEX_CST
7576 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7577 type, mpfr_hypot)))
7578 return res;
7579
7580 if (TREE_CODE (arg) == COMPLEX_EXPR)
7581 {
7582 tree real = TREE_OPERAND (arg, 0);
7583 tree imag = TREE_OPERAND (arg, 1);
7584
7585 /* If either part is zero, cabs is fabs of the other. */
7586 if (real_zerop (real))
7587 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7588 if (real_zerop (imag))
7589 return fold_build1_loc (loc, ABS_EXPR, type, real);
7590
7591 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7592 if (flag_unsafe_math_optimizations
7593 && operand_equal_p (real, imag, OEP_PURE_SAME))
7594 {
7595 STRIP_NOPS (real);
7596 return fold_build2_loc (loc, MULT_EXPR, type,
7597 fold_build1_loc (loc, ABS_EXPR, type, real),
7598 build_real_truncate (type, dconst_sqrt2 ()));
7599 }
7600 }
7601
7602 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7603 if (TREE_CODE (arg) == NEGATE_EXPR
7604 || TREE_CODE (arg) == CONJ_EXPR)
7605 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7606
7607 /* Don't do this when optimizing for size. */
7608 if (flag_unsafe_math_optimizations
7609 && optimize && optimize_function_for_speed_p (cfun))
7610 {
7611 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7612
7613 if (sqrtfn != NULL_TREE)
7614 {
7615 tree rpart, ipart, result;
7616
7617 arg = builtin_save_expr (arg);
7618
7619 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7620 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7621
7622 rpart = builtin_save_expr (rpart);
7623 ipart = builtin_save_expr (ipart);
7624
7625 result = fold_build2_loc (loc, PLUS_EXPR, type,
7626 fold_build2_loc (loc, MULT_EXPR, type,
7627 rpart, rpart),
7628 fold_build2_loc (loc, MULT_EXPR, type,
7629 ipart, ipart));
7630
7631 return build_call_expr_loc (loc, sqrtfn, 1, result);
7632 }
7633 }
7634
7635 return NULL_TREE;
7636 }
7637
7638 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7639 complex tree type of the result. If NEG is true, the imaginary
7640 zero is negative. */
7641
7642 static tree
7643 build_complex_cproj (tree type, bool neg)
7644 {
7645 REAL_VALUE_TYPE rinf, rzero = dconst0;
7646
7647 real_inf (&rinf);
7648 rzero.sign = neg;
7649 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7650 build_real (TREE_TYPE (type), rzero));
7651 }
7652
7653 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7654 return type. Return NULL_TREE if no simplification can be made. */
7655
7656 static tree
7657 fold_builtin_cproj (location_t loc, tree arg, tree type)
7658 {
7659 if (!validate_arg (arg, COMPLEX_TYPE)
7660 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7661 return NULL_TREE;
7662
7663 /* If there are no infinities, return arg. */
7664 if (! HONOR_INFINITIES (type))
7665 return non_lvalue_loc (loc, arg);
7666
7667 /* Calculate the result when the argument is a constant. */
7668 if (TREE_CODE (arg) == COMPLEX_CST)
7669 {
7670 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7671 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7672
7673 if (real_isinf (real) || real_isinf (imag))
7674 return build_complex_cproj (type, imag->sign);
7675 else
7676 return arg;
7677 }
7678 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7679 {
7680 tree real = TREE_OPERAND (arg, 0);
7681 tree imag = TREE_OPERAND (arg, 1);
7682
7683 STRIP_NOPS (real);
7684 STRIP_NOPS (imag);
7685
7686 /* If the real part is inf and the imag part is known to be
7687 nonnegative, return (inf + 0i). Remember side-effects are
7688 possible in the imag part. */
7689 if (TREE_CODE (real) == REAL_CST
7690 && real_isinf (TREE_REAL_CST_PTR (real))
7691 && tree_expr_nonnegative_p (imag))
7692 return omit_one_operand_loc (loc, type,
7693 build_complex_cproj (type, false),
7694 arg);
7695
7696 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7697 Remember side-effects are possible in the real part. */
7698 if (TREE_CODE (imag) == REAL_CST
7699 && real_isinf (TREE_REAL_CST_PTR (imag)))
7700 return
7701 omit_one_operand_loc (loc, type,
7702 build_complex_cproj (type, TREE_REAL_CST_PTR
7703 (imag)->sign), arg);
7704 }
7705
7706 return NULL_TREE;
7707 }
7708
7709 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7710 Return NULL_TREE if no simplification can be made. */
7711
7712 static tree
7713 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7714 {
7715
7716 enum built_in_function fcode;
7717 tree res;
7718
7719 if (!validate_arg (arg, REAL_TYPE))
7720 return NULL_TREE;
7721
7722 /* Calculate the result when the argument is a constant. */
7723 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7724 return res;
7725
7726 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7727 fcode = builtin_mathfn_code (arg);
7728 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7729 {
7730 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7731 arg = fold_build2_loc (loc, MULT_EXPR, type,
7732 CALL_EXPR_ARG (arg, 0),
7733 build_real (type, dconsthalf));
7734 return build_call_expr_loc (loc, expfn, 1, arg);
7735 }
7736
7737 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7738 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7739 {
7740 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7741
7742 if (powfn)
7743 {
7744 tree arg0 = CALL_EXPR_ARG (arg, 0);
7745 tree arg1 = (BUILTIN_SQRT_P (fcode)
7746 ? build_real (type, dconst_quarter ())
7747 : build_real_truncate (type, dconst_sixth ()));
7748 return build_call_expr_loc (loc, powfn, 2, arg0, arg1);
7749 }
7750 }
7751
7752 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7753 if (flag_unsafe_math_optimizations
7754 && (fcode == BUILT_IN_POW
7755 || fcode == BUILT_IN_POWF
7756 || fcode == BUILT_IN_POWL))
7757 {
7758 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7759 tree arg0 = CALL_EXPR_ARG (arg, 0);
7760 tree arg1 = CALL_EXPR_ARG (arg, 1);
7761 tree narg1;
7762 if (!tree_expr_nonnegative_p (arg0))
7763 arg0 = build1 (ABS_EXPR, type, arg0);
7764 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7765 build_real (type, dconsthalf));
7766 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7767 }
7768
7769 return NULL_TREE;
7770 }
7771
7772 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7773 Return NULL_TREE if no simplification can be made. */
7774
7775 static tree
7776 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7777 {
7778 const enum built_in_function fcode = builtin_mathfn_code (arg);
7779 tree res;
7780
7781 if (!validate_arg (arg, REAL_TYPE))
7782 return NULL_TREE;
7783
7784 /* Calculate the result when the argument is a constant. */
7785 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7786 return res;
7787
7788 if (flag_unsafe_math_optimizations)
7789 {
7790 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7791 if (BUILTIN_EXPONENT_P (fcode))
7792 {
7793 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7794 arg = fold_build2_loc (loc, MULT_EXPR, type,
7795 CALL_EXPR_ARG (arg, 0),
7796 build_real_truncate (type, dconst_third ()));
7797 return build_call_expr_loc (loc, expfn, 1, arg);
7798 }
7799
7800 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7801 if (BUILTIN_SQRT_P (fcode))
7802 {
7803 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7804
7805 if (powfn)
7806 {
7807 tree arg0 = CALL_EXPR_ARG (arg, 0);
7808 tree tree_root = build_real_truncate (type, dconst_sixth ());
7809 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7810 }
7811 }
7812
7813 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7814 if (BUILTIN_CBRT_P (fcode))
7815 {
7816 tree arg0 = CALL_EXPR_ARG (arg, 0);
7817 if (tree_expr_nonnegative_p (arg0))
7818 {
7819 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7820
7821 if (powfn)
7822 {
7823 tree tree_root = build_real_truncate (type, dconst_ninth ());
7824 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7825 }
7826 }
7827 }
7828
7829 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7830 if (fcode == BUILT_IN_POW
7831 || fcode == BUILT_IN_POWF
7832 || fcode == BUILT_IN_POWL)
7833 {
7834 tree arg00 = CALL_EXPR_ARG (arg, 0);
7835 tree arg01 = CALL_EXPR_ARG (arg, 1);
7836 if (tree_expr_nonnegative_p (arg00))
7837 {
7838 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7839 tree c = build_real_truncate (type, dconst_third ());
7840 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01, c);
7841 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7842 }
7843 }
7844 }
7845 return NULL_TREE;
7846 }
7847
7848 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7849 TYPE is the type of the return value. Return NULL_TREE if no
7850 simplification can be made. */
7851
7852 static tree
7853 fold_builtin_cos (location_t loc,
7854 tree arg, tree type, tree fndecl)
7855 {
7856 tree res, narg;
7857
7858 if (!validate_arg (arg, REAL_TYPE))
7859 return NULL_TREE;
7860
7861 /* Calculate the result when the argument is a constant. */
7862 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7863 return res;
7864
7865 /* Optimize cos(-x) into cos (x). */
7866 if ((narg = fold_strip_sign_ops (arg)))
7867 return build_call_expr_loc (loc, fndecl, 1, narg);
7868
7869 return NULL_TREE;
7870 }
7871
7872 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7873 Return NULL_TREE if no simplification can be made. */
7874
7875 static tree
7876 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7877 {
7878 if (validate_arg (arg, REAL_TYPE))
7879 {
7880 tree res, narg;
7881
7882 /* Calculate the result when the argument is a constant. */
7883 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7884 return res;
7885
7886 /* Optimize cosh(-x) into cosh (x). */
7887 if ((narg = fold_strip_sign_ops (arg)))
7888 return build_call_expr_loc (loc, fndecl, 1, narg);
7889 }
7890
7891 return NULL_TREE;
7892 }
7893
7894 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7895 argument ARG. TYPE is the type of the return value. Return
7896 NULL_TREE if no simplification can be made. */
7897
7898 static tree
7899 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7900 bool hyper)
7901 {
7902 if (validate_arg (arg, COMPLEX_TYPE)
7903 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7904 {
7905 tree tmp;
7906
7907 /* Calculate the result when the argument is a constant. */
7908 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7909 return tmp;
7910
7911 /* Optimize fn(-x) into fn(x). */
7912 if ((tmp = fold_strip_sign_ops (arg)))
7913 return build_call_expr_loc (loc, fndecl, 1, tmp);
7914 }
7915
7916 return NULL_TREE;
7917 }
7918
7919 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7920 Return NULL_TREE if no simplification can be made. */
7921
7922 static tree
7923 fold_builtin_tan (tree arg, tree type)
7924 {
7925 enum built_in_function fcode;
7926 tree res;
7927
7928 if (!validate_arg (arg, REAL_TYPE))
7929 return NULL_TREE;
7930
7931 /* Calculate the result when the argument is a constant. */
7932 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7933 return res;
7934
7935 /* Optimize tan(atan(x)) = x. */
7936 fcode = builtin_mathfn_code (arg);
7937 if (flag_unsafe_math_optimizations
7938 && (fcode == BUILT_IN_ATAN
7939 || fcode == BUILT_IN_ATANF
7940 || fcode == BUILT_IN_ATANL))
7941 return CALL_EXPR_ARG (arg, 0);
7942
7943 return NULL_TREE;
7944 }
7945
7946 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7947 NULL_TREE if no simplification can be made. */
7948
7949 static tree
7950 fold_builtin_sincos (location_t loc,
7951 tree arg0, tree arg1, tree arg2)
7952 {
7953 tree type;
7954 tree res, fn, call;
7955
7956 if (!validate_arg (arg0, REAL_TYPE)
7957 || !validate_arg (arg1, POINTER_TYPE)
7958 || !validate_arg (arg2, POINTER_TYPE))
7959 return NULL_TREE;
7960
7961 type = TREE_TYPE (arg0);
7962
7963 /* Calculate the result when the argument is a constant. */
7964 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7965 return res;
7966
7967 /* Canonicalize sincos to cexpi. */
7968 if (!targetm.libc_has_function (function_c99_math_complex))
7969 return NULL_TREE;
7970 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7971 if (!fn)
7972 return NULL_TREE;
7973
7974 call = build_call_expr_loc (loc, fn, 1, arg0);
7975 call = builtin_save_expr (call);
7976
7977 return build2 (COMPOUND_EXPR, void_type_node,
7978 build2 (MODIFY_EXPR, void_type_node,
7979 build_fold_indirect_ref_loc (loc, arg1),
7980 build1 (IMAGPART_EXPR, type, call)),
7981 build2 (MODIFY_EXPR, void_type_node,
7982 build_fold_indirect_ref_loc (loc, arg2),
7983 build1 (REALPART_EXPR, type, call)));
7984 }
7985
7986 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7987 NULL_TREE if no simplification can be made. */
7988
7989 static tree
7990 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7991 {
7992 tree rtype;
7993 tree realp, imagp, ifn;
7994 tree res;
7995
7996 if (!validate_arg (arg0, COMPLEX_TYPE)
7997 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7998 return NULL_TREE;
7999
8000 /* Calculate the result when the argument is a constant. */
8001 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8002 return res;
8003
8004 rtype = TREE_TYPE (TREE_TYPE (arg0));
8005
8006 /* In case we can figure out the real part of arg0 and it is constant zero
8007 fold to cexpi. */
8008 if (!targetm.libc_has_function (function_c99_math_complex))
8009 return NULL_TREE;
8010 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8011 if (!ifn)
8012 return NULL_TREE;
8013
8014 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8015 && real_zerop (realp))
8016 {
8017 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8018 return build_call_expr_loc (loc, ifn, 1, narg);
8019 }
8020
8021 /* In case we can easily decompose real and imaginary parts split cexp
8022 to exp (r) * cexpi (i). */
8023 if (flag_unsafe_math_optimizations
8024 && realp)
8025 {
8026 tree rfn, rcall, icall;
8027
8028 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8029 if (!rfn)
8030 return NULL_TREE;
8031
8032 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8033 if (!imagp)
8034 return NULL_TREE;
8035
8036 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8037 icall = builtin_save_expr (icall);
8038 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8039 rcall = builtin_save_expr (rcall);
8040 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8041 fold_build2_loc (loc, MULT_EXPR, rtype,
8042 rcall,
8043 fold_build1_loc (loc, REALPART_EXPR,
8044 rtype, icall)),
8045 fold_build2_loc (loc, MULT_EXPR, rtype,
8046 rcall,
8047 fold_build1_loc (loc, IMAGPART_EXPR,
8048 rtype, icall)));
8049 }
8050
8051 return NULL_TREE;
8052 }
8053
8054 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8055 Return NULL_TREE if no simplification can be made. */
8056
8057 static tree
8058 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8059 {
8060 if (!validate_arg (arg, REAL_TYPE))
8061 return NULL_TREE;
8062
8063 /* Optimize trunc of constant value. */
8064 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8065 {
8066 REAL_VALUE_TYPE r, x;
8067 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8068
8069 x = TREE_REAL_CST (arg);
8070 real_trunc (&r, TYPE_MODE (type), &x);
8071 return build_real (type, r);
8072 }
8073
8074 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8075 }
8076
8077 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8078 Return NULL_TREE if no simplification can be made. */
8079
8080 static tree
8081 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8082 {
8083 if (!validate_arg (arg, REAL_TYPE))
8084 return NULL_TREE;
8085
8086 /* Optimize floor of constant value. */
8087 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8088 {
8089 REAL_VALUE_TYPE x;
8090
8091 x = TREE_REAL_CST (arg);
8092 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8093 {
8094 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8095 REAL_VALUE_TYPE r;
8096
8097 real_floor (&r, TYPE_MODE (type), &x);
8098 return build_real (type, r);
8099 }
8100 }
8101
8102 /* Fold floor (x) where x is nonnegative to trunc (x). */
8103 if (tree_expr_nonnegative_p (arg))
8104 {
8105 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8106 if (truncfn)
8107 return build_call_expr_loc (loc, truncfn, 1, arg);
8108 }
8109
8110 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8111 }
8112
8113 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8114 Return NULL_TREE if no simplification can be made. */
8115
8116 static tree
8117 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8118 {
8119 if (!validate_arg (arg, REAL_TYPE))
8120 return NULL_TREE;
8121
8122 /* Optimize ceil of constant value. */
8123 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8124 {
8125 REAL_VALUE_TYPE x;
8126
8127 x = TREE_REAL_CST (arg);
8128 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8129 {
8130 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8131 REAL_VALUE_TYPE r;
8132
8133 real_ceil (&r, TYPE_MODE (type), &x);
8134 return build_real (type, r);
8135 }
8136 }
8137
8138 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8139 }
8140
8141 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8142 Return NULL_TREE if no simplification can be made. */
8143
8144 static tree
8145 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8146 {
8147 if (!validate_arg (arg, REAL_TYPE))
8148 return NULL_TREE;
8149
8150 /* Optimize round of constant value. */
8151 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8152 {
8153 REAL_VALUE_TYPE x;
8154
8155 x = TREE_REAL_CST (arg);
8156 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8157 {
8158 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8159 REAL_VALUE_TYPE r;
8160
8161 real_round (&r, TYPE_MODE (type), &x);
8162 return build_real (type, r);
8163 }
8164 }
8165
8166 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8167 }
8168
8169 /* Fold function call to builtin lround, lroundf or lroundl (or the
8170 corresponding long long versions) and other rounding functions. ARG
8171 is the argument to the call. Return NULL_TREE if no simplification
8172 can be made. */
8173
8174 static tree
8175 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8176 {
8177 if (!validate_arg (arg, REAL_TYPE))
8178 return NULL_TREE;
8179
8180 /* Optimize lround of constant value. */
8181 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8182 {
8183 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8184
8185 if (real_isfinite (&x))
8186 {
8187 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8188 tree ftype = TREE_TYPE (arg);
8189 REAL_VALUE_TYPE r;
8190 bool fail = false;
8191
8192 switch (DECL_FUNCTION_CODE (fndecl))
8193 {
8194 CASE_FLT_FN (BUILT_IN_IFLOOR):
8195 CASE_FLT_FN (BUILT_IN_LFLOOR):
8196 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8197 real_floor (&r, TYPE_MODE (ftype), &x);
8198 break;
8199
8200 CASE_FLT_FN (BUILT_IN_ICEIL):
8201 CASE_FLT_FN (BUILT_IN_LCEIL):
8202 CASE_FLT_FN (BUILT_IN_LLCEIL):
8203 real_ceil (&r, TYPE_MODE (ftype), &x);
8204 break;
8205
8206 CASE_FLT_FN (BUILT_IN_IROUND):
8207 CASE_FLT_FN (BUILT_IN_LROUND):
8208 CASE_FLT_FN (BUILT_IN_LLROUND):
8209 real_round (&r, TYPE_MODE (ftype), &x);
8210 break;
8211
8212 default:
8213 gcc_unreachable ();
8214 }
8215
8216 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8217 if (!fail)
8218 return wide_int_to_tree (itype, val);
8219 }
8220 }
8221
8222 switch (DECL_FUNCTION_CODE (fndecl))
8223 {
8224 CASE_FLT_FN (BUILT_IN_LFLOOR):
8225 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8226 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8227 if (tree_expr_nonnegative_p (arg))
8228 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8229 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8230 break;
8231 default:;
8232 }
8233
8234 return fold_fixed_mathfn (loc, fndecl, arg);
8235 }
8236
8237 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8238 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8239 the argument to the call. Return NULL_TREE if no simplification can
8240 be made. */
8241
8242 static tree
8243 fold_builtin_bitop (tree fndecl, tree arg)
8244 {
8245 if (!validate_arg (arg, INTEGER_TYPE))
8246 return NULL_TREE;
8247
8248 /* Optimize for constant argument. */
8249 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8250 {
8251 tree type = TREE_TYPE (arg);
8252 int result;
8253
8254 switch (DECL_FUNCTION_CODE (fndecl))
8255 {
8256 CASE_INT_FN (BUILT_IN_FFS):
8257 result = wi::ffs (arg);
8258 break;
8259
8260 CASE_INT_FN (BUILT_IN_CLZ):
8261 if (wi::ne_p (arg, 0))
8262 result = wi::clz (arg);
8263 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8264 result = TYPE_PRECISION (type);
8265 break;
8266
8267 CASE_INT_FN (BUILT_IN_CTZ):
8268 if (wi::ne_p (arg, 0))
8269 result = wi::ctz (arg);
8270 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8271 result = TYPE_PRECISION (type);
8272 break;
8273
8274 CASE_INT_FN (BUILT_IN_CLRSB):
8275 result = wi::clrsb (arg);
8276 break;
8277
8278 CASE_INT_FN (BUILT_IN_POPCOUNT):
8279 result = wi::popcount (arg);
8280 break;
8281
8282 CASE_INT_FN (BUILT_IN_PARITY):
8283 result = wi::parity (arg);
8284 break;
8285
8286 default:
8287 gcc_unreachable ();
8288 }
8289
8290 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8291 }
8292
8293 return NULL_TREE;
8294 }
8295
8296 /* Fold function call to builtin_bswap and the short, long and long long
8297 variants. Return NULL_TREE if no simplification can be made. */
8298 static tree
8299 fold_builtin_bswap (tree fndecl, tree arg)
8300 {
8301 if (! validate_arg (arg, INTEGER_TYPE))
8302 return NULL_TREE;
8303
8304 /* Optimize constant value. */
8305 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8306 {
8307 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8308
8309 switch (DECL_FUNCTION_CODE (fndecl))
8310 {
8311 case BUILT_IN_BSWAP16:
8312 case BUILT_IN_BSWAP32:
8313 case BUILT_IN_BSWAP64:
8314 {
8315 signop sgn = TYPE_SIGN (type);
8316 tree result =
8317 wide_int_to_tree (type,
8318 wide_int::from (arg, TYPE_PRECISION (type),
8319 sgn).bswap ());
8320 return result;
8321 }
8322 default:
8323 gcc_unreachable ();
8324 }
8325 }
8326
8327 return NULL_TREE;
8328 }
8329
8330 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8331 NULL_TREE if no simplification can be made. */
8332
8333 static tree
8334 fold_builtin_hypot (location_t loc, tree fndecl,
8335 tree arg0, tree arg1, tree type)
8336 {
8337 tree res, narg0, narg1;
8338
8339 if (!validate_arg (arg0, REAL_TYPE)
8340 || !validate_arg (arg1, REAL_TYPE))
8341 return NULL_TREE;
8342
8343 /* Calculate the result when the argument is a constant. */
8344 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8345 return res;
8346
8347 /* If either argument to hypot has a negate or abs, strip that off.
8348 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8349 narg0 = fold_strip_sign_ops (arg0);
8350 narg1 = fold_strip_sign_ops (arg1);
8351 if (narg0 || narg1)
8352 {
8353 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8354 narg1 ? narg1 : arg1);
8355 }
8356
8357 /* If either argument is zero, hypot is fabs of the other. */
8358 if (real_zerop (arg0))
8359 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8360 else if (real_zerop (arg1))
8361 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8362
8363 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8364 if (flag_unsafe_math_optimizations
8365 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8366 return fold_build2_loc (loc, MULT_EXPR, type,
8367 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8368 build_real_truncate (type, dconst_sqrt2 ()));
8369
8370 return NULL_TREE;
8371 }
8372
8373
8374 /* Fold a builtin function call to pow, powf, or powl. Return
8375 NULL_TREE if no simplification can be made. */
8376 static tree
8377 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8378 {
8379 tree res;
8380
8381 if (!validate_arg (arg0, REAL_TYPE)
8382 || !validate_arg (arg1, REAL_TYPE))
8383 return NULL_TREE;
8384
8385 /* Calculate the result when the argument is a constant. */
8386 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8387 return res;
8388
8389 /* Optimize pow(1.0,y) = 1.0. */
8390 if (real_onep (arg0))
8391 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8392
8393 if (TREE_CODE (arg1) == REAL_CST
8394 && !TREE_OVERFLOW (arg1))
8395 {
8396 REAL_VALUE_TYPE cint;
8397 REAL_VALUE_TYPE c;
8398 HOST_WIDE_INT n;
8399
8400 c = TREE_REAL_CST (arg1);
8401
8402 /* Optimize pow(x,0.0) = 1.0. */
8403 if (real_equal (&c, &dconst0))
8404 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8405 arg0);
8406
8407 /* Optimize pow(x,1.0) = x. */
8408 if (real_equal (&c, &dconst1))
8409 return arg0;
8410
8411 /* Optimize pow(x,-1.0) = 1.0/x. */
8412 if (real_equal (&c, &dconstm1))
8413 return fold_build2_loc (loc, RDIV_EXPR, type,
8414 build_real (type, dconst1), arg0);
8415
8416 /* Optimize pow(x,0.5) = sqrt(x). */
8417 if (flag_unsafe_math_optimizations
8418 && real_equal (&c, &dconsthalf))
8419 {
8420 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8421
8422 if (sqrtfn != NULL_TREE)
8423 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8424 }
8425
8426 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8427 if (flag_unsafe_math_optimizations)
8428 {
8429 const REAL_VALUE_TYPE dconstroot
8430 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8431
8432 if (real_equal (&c, &dconstroot))
8433 {
8434 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8435 if (cbrtfn != NULL_TREE)
8436 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8437 }
8438 }
8439
8440 /* Check for an integer exponent. */
8441 n = real_to_integer (&c);
8442 real_from_integer (&cint, VOIDmode, n, SIGNED);
8443 if (real_identical (&c, &cint))
8444 {
8445 /* Attempt to evaluate pow at compile-time, unless this should
8446 raise an exception. */
8447 if (TREE_CODE (arg0) == REAL_CST
8448 && !TREE_OVERFLOW (arg0)
8449 && (n > 0
8450 || (!flag_trapping_math && !flag_errno_math)
8451 || !real_equal (&TREE_REAL_CST (arg0), &dconst0)))
8452 {
8453 REAL_VALUE_TYPE x;
8454 bool inexact;
8455
8456 x = TREE_REAL_CST (arg0);
8457 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8458 if (flag_unsafe_math_optimizations || !inexact)
8459 return build_real (type, x);
8460 }
8461
8462 /* Strip sign ops from even integer powers. */
8463 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8464 {
8465 tree narg0 = fold_strip_sign_ops (arg0);
8466 if (narg0)
8467 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8468 }
8469 }
8470 }
8471
8472 if (flag_unsafe_math_optimizations)
8473 {
8474 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8475
8476 /* Optimize pow(expN(x),y) = expN(x*y). */
8477 if (BUILTIN_EXPONENT_P (fcode))
8478 {
8479 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8480 tree arg = CALL_EXPR_ARG (arg0, 0);
8481 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8482 return build_call_expr_loc (loc, expfn, 1, arg);
8483 }
8484
8485 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8486 if (BUILTIN_SQRT_P (fcode))
8487 {
8488 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8489 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8490 build_real (type, dconsthalf));
8491 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8492 }
8493
8494 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8495 if (BUILTIN_CBRT_P (fcode))
8496 {
8497 tree arg = CALL_EXPR_ARG (arg0, 0);
8498 if (tree_expr_nonnegative_p (arg))
8499 {
8500 tree c = build_real_truncate (type, dconst_third ());
8501 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1, c);
8502 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8503 }
8504 }
8505
8506 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8507 if (fcode == BUILT_IN_POW
8508 || fcode == BUILT_IN_POWF
8509 || fcode == BUILT_IN_POWL)
8510 {
8511 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8512 if (tree_expr_nonnegative_p (arg00))
8513 {
8514 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8515 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8516 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8517 }
8518 }
8519 }
8520
8521 return NULL_TREE;
8522 }
8523
8524 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8525 Return NULL_TREE if no simplification can be made. */
8526 static tree
8527 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8528 tree arg0, tree arg1, tree type)
8529 {
8530 if (!validate_arg (arg0, REAL_TYPE)
8531 || !validate_arg (arg1, INTEGER_TYPE))
8532 return NULL_TREE;
8533
8534 /* Optimize pow(1.0,y) = 1.0. */
8535 if (real_onep (arg0))
8536 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8537
8538 if (tree_fits_shwi_p (arg1))
8539 {
8540 HOST_WIDE_INT c = tree_to_shwi (arg1);
8541
8542 /* Evaluate powi at compile-time. */
8543 if (TREE_CODE (arg0) == REAL_CST
8544 && !TREE_OVERFLOW (arg0))
8545 {
8546 REAL_VALUE_TYPE x;
8547 x = TREE_REAL_CST (arg0);
8548 real_powi (&x, TYPE_MODE (type), &x, c);
8549 return build_real (type, x);
8550 }
8551
8552 /* Optimize pow(x,0) = 1.0. */
8553 if (c == 0)
8554 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8555 arg0);
8556
8557 /* Optimize pow(x,1) = x. */
8558 if (c == 1)
8559 return arg0;
8560
8561 /* Optimize pow(x,-1) = 1.0/x. */
8562 if (c == -1)
8563 return fold_build2_loc (loc, RDIV_EXPR, type,
8564 build_real (type, dconst1), arg0);
8565 }
8566
8567 return NULL_TREE;
8568 }
8569
8570 /* A subroutine of fold_builtin to fold the various exponent
8571 functions. Return NULL_TREE if no simplification can be made.
8572 FUNC is the corresponding MPFR exponent function. */
8573
8574 static tree
8575 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8576 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8577 {
8578 if (validate_arg (arg, REAL_TYPE))
8579 {
8580 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8581 tree res;
8582
8583 /* Calculate the result when the argument is a constant. */
8584 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8585 return res;
8586
8587 /* Optimize expN(logN(x)) = x. */
8588 if (flag_unsafe_math_optimizations)
8589 {
8590 const enum built_in_function fcode = builtin_mathfn_code (arg);
8591
8592 if ((func == mpfr_exp
8593 && (fcode == BUILT_IN_LOG
8594 || fcode == BUILT_IN_LOGF
8595 || fcode == BUILT_IN_LOGL))
8596 || (func == mpfr_exp2
8597 && (fcode == BUILT_IN_LOG2
8598 || fcode == BUILT_IN_LOG2F
8599 || fcode == BUILT_IN_LOG2L))
8600 || (func == mpfr_exp10
8601 && (fcode == BUILT_IN_LOG10
8602 || fcode == BUILT_IN_LOG10F
8603 || fcode == BUILT_IN_LOG10L)))
8604 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8605 }
8606 }
8607
8608 return NULL_TREE;
8609 }
8610
8611 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8612 arguments to the call, and TYPE is its return type.
8613 Return NULL_TREE if no simplification can be made. */
8614
8615 static tree
8616 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8617 {
8618 if (!validate_arg (arg1, POINTER_TYPE)
8619 || !validate_arg (arg2, INTEGER_TYPE)
8620 || !validate_arg (len, INTEGER_TYPE))
8621 return NULL_TREE;
8622 else
8623 {
8624 const char *p1;
8625
8626 if (TREE_CODE (arg2) != INTEGER_CST
8627 || !tree_fits_uhwi_p (len))
8628 return NULL_TREE;
8629
8630 p1 = c_getstr (arg1);
8631 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8632 {
8633 char c;
8634 const char *r;
8635 tree tem;
8636
8637 if (target_char_cast (arg2, &c))
8638 return NULL_TREE;
8639
8640 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8641
8642 if (r == NULL)
8643 return build_int_cst (TREE_TYPE (arg1), 0);
8644
8645 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8646 return fold_convert_loc (loc, type, tem);
8647 }
8648 return NULL_TREE;
8649 }
8650 }
8651
8652 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8653 Return NULL_TREE if no simplification can be made. */
8654
8655 static tree
8656 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8657 {
8658 const char *p1, *p2;
8659
8660 if (!validate_arg (arg1, POINTER_TYPE)
8661 || !validate_arg (arg2, POINTER_TYPE)
8662 || !validate_arg (len, INTEGER_TYPE))
8663 return NULL_TREE;
8664
8665 /* If the LEN parameter is zero, return zero. */
8666 if (integer_zerop (len))
8667 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8668 arg1, arg2);
8669
8670 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8671 if (operand_equal_p (arg1, arg2, 0))
8672 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8673
8674 p1 = c_getstr (arg1);
8675 p2 = c_getstr (arg2);
8676
8677 /* If all arguments are constant, and the value of len is not greater
8678 than the lengths of arg1 and arg2, evaluate at compile-time. */
8679 if (tree_fits_uhwi_p (len) && p1 && p2
8680 && compare_tree_int (len, strlen (p1) + 1) <= 0
8681 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8682 {
8683 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8684
8685 if (r > 0)
8686 return integer_one_node;
8687 else if (r < 0)
8688 return integer_minus_one_node;
8689 else
8690 return integer_zero_node;
8691 }
8692
8693 /* If len parameter is one, return an expression corresponding to
8694 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8695 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8696 {
8697 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8698 tree cst_uchar_ptr_node
8699 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8700
8701 tree ind1
8702 = fold_convert_loc (loc, integer_type_node,
8703 build1 (INDIRECT_REF, cst_uchar_node,
8704 fold_convert_loc (loc,
8705 cst_uchar_ptr_node,
8706 arg1)));
8707 tree ind2
8708 = fold_convert_loc (loc, integer_type_node,
8709 build1 (INDIRECT_REF, cst_uchar_node,
8710 fold_convert_loc (loc,
8711 cst_uchar_ptr_node,
8712 arg2)));
8713 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8714 }
8715
8716 return NULL_TREE;
8717 }
8718
8719 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8720 Return NULL_TREE if no simplification can be made. */
8721
8722 static tree
8723 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8724 {
8725 const char *p1, *p2;
8726
8727 if (!validate_arg (arg1, POINTER_TYPE)
8728 || !validate_arg (arg2, POINTER_TYPE))
8729 return NULL_TREE;
8730
8731 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8732 if (operand_equal_p (arg1, arg2, 0))
8733 return integer_zero_node;
8734
8735 p1 = c_getstr (arg1);
8736 p2 = c_getstr (arg2);
8737
8738 if (p1 && p2)
8739 {
8740 const int i = strcmp (p1, p2);
8741 if (i < 0)
8742 return integer_minus_one_node;
8743 else if (i > 0)
8744 return integer_one_node;
8745 else
8746 return integer_zero_node;
8747 }
8748
8749 /* If the second arg is "", return *(const unsigned char*)arg1. */
8750 if (p2 && *p2 == '\0')
8751 {
8752 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8753 tree cst_uchar_ptr_node
8754 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8755
8756 return fold_convert_loc (loc, integer_type_node,
8757 build1 (INDIRECT_REF, cst_uchar_node,
8758 fold_convert_loc (loc,
8759 cst_uchar_ptr_node,
8760 arg1)));
8761 }
8762
8763 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8764 if (p1 && *p1 == '\0')
8765 {
8766 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8767 tree cst_uchar_ptr_node
8768 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8769
8770 tree temp
8771 = fold_convert_loc (loc, integer_type_node,
8772 build1 (INDIRECT_REF, cst_uchar_node,
8773 fold_convert_loc (loc,
8774 cst_uchar_ptr_node,
8775 arg2)));
8776 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8777 }
8778
8779 return NULL_TREE;
8780 }
8781
8782 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8783 Return NULL_TREE if no simplification can be made. */
8784
8785 static tree
8786 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8787 {
8788 const char *p1, *p2;
8789
8790 if (!validate_arg (arg1, POINTER_TYPE)
8791 || !validate_arg (arg2, POINTER_TYPE)
8792 || !validate_arg (len, INTEGER_TYPE))
8793 return NULL_TREE;
8794
8795 /* If the LEN parameter is zero, return zero. */
8796 if (integer_zerop (len))
8797 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8798 arg1, arg2);
8799
8800 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8801 if (operand_equal_p (arg1, arg2, 0))
8802 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8803
8804 p1 = c_getstr (arg1);
8805 p2 = c_getstr (arg2);
8806
8807 if (tree_fits_uhwi_p (len) && p1 && p2)
8808 {
8809 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8810 if (i > 0)
8811 return integer_one_node;
8812 else if (i < 0)
8813 return integer_minus_one_node;
8814 else
8815 return integer_zero_node;
8816 }
8817
8818 /* If the second arg is "", and the length is greater than zero,
8819 return *(const unsigned char*)arg1. */
8820 if (p2 && *p2 == '\0'
8821 && TREE_CODE (len) == INTEGER_CST
8822 && tree_int_cst_sgn (len) == 1)
8823 {
8824 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8825 tree cst_uchar_ptr_node
8826 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8827
8828 return fold_convert_loc (loc, integer_type_node,
8829 build1 (INDIRECT_REF, cst_uchar_node,
8830 fold_convert_loc (loc,
8831 cst_uchar_ptr_node,
8832 arg1)));
8833 }
8834
8835 /* If the first arg is "", and the length is greater than zero,
8836 return -*(const unsigned char*)arg2. */
8837 if (p1 && *p1 == '\0'
8838 && TREE_CODE (len) == INTEGER_CST
8839 && tree_int_cst_sgn (len) == 1)
8840 {
8841 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8842 tree cst_uchar_ptr_node
8843 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8844
8845 tree temp = fold_convert_loc (loc, integer_type_node,
8846 build1 (INDIRECT_REF, cst_uchar_node,
8847 fold_convert_loc (loc,
8848 cst_uchar_ptr_node,
8849 arg2)));
8850 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8851 }
8852
8853 /* If len parameter is one, return an expression corresponding to
8854 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8855 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8856 {
8857 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8858 tree cst_uchar_ptr_node
8859 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8860
8861 tree ind1 = fold_convert_loc (loc, integer_type_node,
8862 build1 (INDIRECT_REF, cst_uchar_node,
8863 fold_convert_loc (loc,
8864 cst_uchar_ptr_node,
8865 arg1)));
8866 tree ind2 = fold_convert_loc (loc, integer_type_node,
8867 build1 (INDIRECT_REF, cst_uchar_node,
8868 fold_convert_loc (loc,
8869 cst_uchar_ptr_node,
8870 arg2)));
8871 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8872 }
8873
8874 return NULL_TREE;
8875 }
8876
8877 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8878 ARG. Return NULL_TREE if no simplification can be made. */
8879
8880 static tree
8881 fold_builtin_signbit (location_t loc, tree arg, tree type)
8882 {
8883 if (!validate_arg (arg, REAL_TYPE))
8884 return NULL_TREE;
8885
8886 /* If ARG is a compile-time constant, determine the result. */
8887 if (TREE_CODE (arg) == REAL_CST
8888 && !TREE_OVERFLOW (arg))
8889 {
8890 REAL_VALUE_TYPE c;
8891
8892 c = TREE_REAL_CST (arg);
8893 return (REAL_VALUE_NEGATIVE (c)
8894 ? build_one_cst (type)
8895 : build_zero_cst (type));
8896 }
8897
8898 /* If ARG is non-negative, the result is always zero. */
8899 if (tree_expr_nonnegative_p (arg))
8900 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8901
8902 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8903 if (!HONOR_SIGNED_ZEROS (arg))
8904 return fold_convert (type,
8905 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8906 build_real (TREE_TYPE (arg), dconst0)));
8907
8908 return NULL_TREE;
8909 }
8910
8911 /* Fold function call to builtin copysign, copysignf or copysignl with
8912 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8913 be made. */
8914
8915 static tree
8916 fold_builtin_copysign (location_t loc, tree fndecl,
8917 tree arg1, tree arg2, tree type)
8918 {
8919 tree tem;
8920
8921 if (!validate_arg (arg1, REAL_TYPE)
8922 || !validate_arg (arg2, REAL_TYPE))
8923 return NULL_TREE;
8924
8925 /* copysign(X,X) is X. */
8926 if (operand_equal_p (arg1, arg2, 0))
8927 return fold_convert_loc (loc, type, arg1);
8928
8929 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8930 if (TREE_CODE (arg1) == REAL_CST
8931 && TREE_CODE (arg2) == REAL_CST
8932 && !TREE_OVERFLOW (arg1)
8933 && !TREE_OVERFLOW (arg2))
8934 {
8935 REAL_VALUE_TYPE c1, c2;
8936
8937 c1 = TREE_REAL_CST (arg1);
8938 c2 = TREE_REAL_CST (arg2);
8939 /* c1.sign := c2.sign. */
8940 real_copysign (&c1, &c2);
8941 return build_real (type, c1);
8942 }
8943
8944 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8945 Remember to evaluate Y for side-effects. */
8946 if (tree_expr_nonnegative_p (arg2))
8947 return omit_one_operand_loc (loc, type,
8948 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8949 arg2);
8950
8951 /* Strip sign changing operations for the first argument. */
8952 tem = fold_strip_sign_ops (arg1);
8953 if (tem)
8954 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8955
8956 return NULL_TREE;
8957 }
8958
8959 /* Fold a call to builtin isascii with argument ARG. */
8960
8961 static tree
8962 fold_builtin_isascii (location_t loc, tree arg)
8963 {
8964 if (!validate_arg (arg, INTEGER_TYPE))
8965 return NULL_TREE;
8966 else
8967 {
8968 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8969 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8970 build_int_cst (integer_type_node,
8971 ~ (unsigned HOST_WIDE_INT) 0x7f));
8972 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8973 arg, integer_zero_node);
8974 }
8975 }
8976
8977 /* Fold a call to builtin toascii with argument ARG. */
8978
8979 static tree
8980 fold_builtin_toascii (location_t loc, tree arg)
8981 {
8982 if (!validate_arg (arg, INTEGER_TYPE))
8983 return NULL_TREE;
8984
8985 /* Transform toascii(c) -> (c & 0x7f). */
8986 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8987 build_int_cst (integer_type_node, 0x7f));
8988 }
8989
8990 /* Fold a call to builtin isdigit with argument ARG. */
8991
8992 static tree
8993 fold_builtin_isdigit (location_t loc, tree arg)
8994 {
8995 if (!validate_arg (arg, INTEGER_TYPE))
8996 return NULL_TREE;
8997 else
8998 {
8999 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9000 /* According to the C standard, isdigit is unaffected by locale.
9001 However, it definitely is affected by the target character set. */
9002 unsigned HOST_WIDE_INT target_digit0
9003 = lang_hooks.to_target_charset ('0');
9004
9005 if (target_digit0 == 0)
9006 return NULL_TREE;
9007
9008 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9009 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9010 build_int_cst (unsigned_type_node, target_digit0));
9011 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9012 build_int_cst (unsigned_type_node, 9));
9013 }
9014 }
9015
9016 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9017
9018 static tree
9019 fold_builtin_fabs (location_t loc, tree arg, tree type)
9020 {
9021 if (!validate_arg (arg, REAL_TYPE))
9022 return NULL_TREE;
9023
9024 arg = fold_convert_loc (loc, type, arg);
9025 if (TREE_CODE (arg) == REAL_CST)
9026 return fold_abs_const (arg, type);
9027 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9028 }
9029
9030 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9031
9032 static tree
9033 fold_builtin_abs (location_t loc, tree arg, tree type)
9034 {
9035 if (!validate_arg (arg, INTEGER_TYPE))
9036 return NULL_TREE;
9037
9038 arg = fold_convert_loc (loc, type, arg);
9039 if (TREE_CODE (arg) == INTEGER_CST)
9040 return fold_abs_const (arg, type);
9041 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9042 }
9043
9044 /* Fold a fma operation with arguments ARG[012]. */
9045
9046 tree
9047 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9048 tree type, tree arg0, tree arg1, tree arg2)
9049 {
9050 if (TREE_CODE (arg0) == REAL_CST
9051 && TREE_CODE (arg1) == REAL_CST
9052 && TREE_CODE (arg2) == REAL_CST)
9053 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9054
9055 return NULL_TREE;
9056 }
9057
9058 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9059
9060 static tree
9061 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9062 {
9063 if (validate_arg (arg0, REAL_TYPE)
9064 && validate_arg (arg1, REAL_TYPE)
9065 && validate_arg (arg2, REAL_TYPE))
9066 {
9067 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9068 if (tem)
9069 return tem;
9070
9071 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9072 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9073 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9074 }
9075 return NULL_TREE;
9076 }
9077
9078 /* Fold a call to builtin fmin or fmax. */
9079
9080 static tree
9081 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9082 tree type, bool max)
9083 {
9084 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9085 {
9086 /* Calculate the result when the argument is a constant. */
9087 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9088
9089 if (res)
9090 return res;
9091
9092 /* If either argument is NaN, return the other one. Avoid the
9093 transformation if we get (and honor) a signalling NaN. Using
9094 omit_one_operand() ensures we create a non-lvalue. */
9095 if (TREE_CODE (arg0) == REAL_CST
9096 && real_isnan (&TREE_REAL_CST (arg0))
9097 && (! HONOR_SNANS (arg0)
9098 || ! TREE_REAL_CST (arg0).signalling))
9099 return omit_one_operand_loc (loc, type, arg1, arg0);
9100 if (TREE_CODE (arg1) == REAL_CST
9101 && real_isnan (&TREE_REAL_CST (arg1))
9102 && (! HONOR_SNANS (arg1)
9103 || ! TREE_REAL_CST (arg1).signalling))
9104 return omit_one_operand_loc (loc, type, arg0, arg1);
9105
9106 /* Transform fmin/fmax(x,x) -> x. */
9107 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9108 return omit_one_operand_loc (loc, type, arg0, arg1);
9109
9110 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9111 functions to return the numeric arg if the other one is NaN.
9112 These tree codes don't honor that, so only transform if
9113 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9114 handled, so we don't have to worry about it either. */
9115 if (flag_finite_math_only)
9116 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9117 fold_convert_loc (loc, type, arg0),
9118 fold_convert_loc (loc, type, arg1));
9119 }
9120 return NULL_TREE;
9121 }
9122
9123 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9124
9125 static tree
9126 fold_builtin_carg (location_t loc, tree arg, tree type)
9127 {
9128 if (validate_arg (arg, COMPLEX_TYPE)
9129 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9130 {
9131 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9132
9133 if (atan2_fn)
9134 {
9135 tree new_arg = builtin_save_expr (arg);
9136 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9137 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9138 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9139 }
9140 }
9141
9142 return NULL_TREE;
9143 }
9144
9145 /* Fold a call to builtin logb/ilogb. */
9146
9147 static tree
9148 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9149 {
9150 if (! validate_arg (arg, REAL_TYPE))
9151 return NULL_TREE;
9152
9153 STRIP_NOPS (arg);
9154
9155 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9156 {
9157 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9158
9159 switch (value->cl)
9160 {
9161 case rvc_nan:
9162 case rvc_inf:
9163 /* If arg is Inf or NaN and we're logb, return it. */
9164 if (TREE_CODE (rettype) == REAL_TYPE)
9165 {
9166 /* For logb(-Inf) we have to return +Inf. */
9167 if (real_isinf (value) && real_isneg (value))
9168 {
9169 REAL_VALUE_TYPE tem;
9170 real_inf (&tem);
9171 return build_real (rettype, tem);
9172 }
9173 return fold_convert_loc (loc, rettype, arg);
9174 }
9175 /* Fall through... */
9176 case rvc_zero:
9177 /* Zero may set errno and/or raise an exception for logb, also
9178 for ilogb we don't know FP_ILOGB0. */
9179 return NULL_TREE;
9180 case rvc_normal:
9181 /* For normal numbers, proceed iff radix == 2. In GCC,
9182 normalized significands are in the range [0.5, 1.0). We
9183 want the exponent as if they were [1.0, 2.0) so get the
9184 exponent and subtract 1. */
9185 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9186 return fold_convert_loc (loc, rettype,
9187 build_int_cst (integer_type_node,
9188 REAL_EXP (value)-1));
9189 break;
9190 }
9191 }
9192
9193 return NULL_TREE;
9194 }
9195
9196 /* Fold a call to builtin significand, if radix == 2. */
9197
9198 static tree
9199 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9200 {
9201 if (! validate_arg (arg, REAL_TYPE))
9202 return NULL_TREE;
9203
9204 STRIP_NOPS (arg);
9205
9206 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9207 {
9208 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9209
9210 switch (value->cl)
9211 {
9212 case rvc_zero:
9213 case rvc_nan:
9214 case rvc_inf:
9215 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9216 return fold_convert_loc (loc, rettype, arg);
9217 case rvc_normal:
9218 /* For normal numbers, proceed iff radix == 2. */
9219 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9220 {
9221 REAL_VALUE_TYPE result = *value;
9222 /* In GCC, normalized significands are in the range [0.5,
9223 1.0). We want them to be [1.0, 2.0) so set the
9224 exponent to 1. */
9225 SET_REAL_EXP (&result, 1);
9226 return build_real (rettype, result);
9227 }
9228 break;
9229 }
9230 }
9231
9232 return NULL_TREE;
9233 }
9234
9235 /* Fold a call to builtin frexp, we can assume the base is 2. */
9236
9237 static tree
9238 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9239 {
9240 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9241 return NULL_TREE;
9242
9243 STRIP_NOPS (arg0);
9244
9245 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9246 return NULL_TREE;
9247
9248 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9249
9250 /* Proceed if a valid pointer type was passed in. */
9251 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9252 {
9253 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9254 tree frac, exp;
9255
9256 switch (value->cl)
9257 {
9258 case rvc_zero:
9259 /* For +-0, return (*exp = 0, +-0). */
9260 exp = integer_zero_node;
9261 frac = arg0;
9262 break;
9263 case rvc_nan:
9264 case rvc_inf:
9265 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9266 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9267 case rvc_normal:
9268 {
9269 /* Since the frexp function always expects base 2, and in
9270 GCC normalized significands are already in the range
9271 [0.5, 1.0), we have exactly what frexp wants. */
9272 REAL_VALUE_TYPE frac_rvt = *value;
9273 SET_REAL_EXP (&frac_rvt, 0);
9274 frac = build_real (rettype, frac_rvt);
9275 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9276 }
9277 break;
9278 default:
9279 gcc_unreachable ();
9280 }
9281
9282 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9283 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9284 TREE_SIDE_EFFECTS (arg1) = 1;
9285 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9286 }
9287
9288 return NULL_TREE;
9289 }
9290
9291 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9292 then we can assume the base is two. If it's false, then we have to
9293 check the mode of the TYPE parameter in certain cases. */
9294
9295 static tree
9296 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9297 tree type, bool ldexp)
9298 {
9299 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9300 {
9301 STRIP_NOPS (arg0);
9302 STRIP_NOPS (arg1);
9303
9304 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9305 if (real_zerop (arg0) || integer_zerop (arg1)
9306 || (TREE_CODE (arg0) == REAL_CST
9307 && !real_isfinite (&TREE_REAL_CST (arg0))))
9308 return omit_one_operand_loc (loc, type, arg0, arg1);
9309
9310 /* If both arguments are constant, then try to evaluate it. */
9311 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9312 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9313 && tree_fits_shwi_p (arg1))
9314 {
9315 /* Bound the maximum adjustment to twice the range of the
9316 mode's valid exponents. Use abs to ensure the range is
9317 positive as a sanity check. */
9318 const long max_exp_adj = 2 *
9319 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9320 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9321
9322 /* Get the user-requested adjustment. */
9323 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9324
9325 /* The requested adjustment must be inside this range. This
9326 is a preliminary cap to avoid things like overflow, we
9327 may still fail to compute the result for other reasons. */
9328 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9329 {
9330 REAL_VALUE_TYPE initial_result;
9331
9332 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9333
9334 /* Ensure we didn't overflow. */
9335 if (! real_isinf (&initial_result))
9336 {
9337 const REAL_VALUE_TYPE trunc_result
9338 = real_value_truncate (TYPE_MODE (type), initial_result);
9339
9340 /* Only proceed if the target mode can hold the
9341 resulting value. */
9342 if (real_equal (&initial_result, &trunc_result))
9343 return build_real (type, trunc_result);
9344 }
9345 }
9346 }
9347 }
9348
9349 return NULL_TREE;
9350 }
9351
9352 /* Fold a call to builtin modf. */
9353
9354 static tree
9355 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9356 {
9357 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9358 return NULL_TREE;
9359
9360 STRIP_NOPS (arg0);
9361
9362 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9363 return NULL_TREE;
9364
9365 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9366
9367 /* Proceed if a valid pointer type was passed in. */
9368 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9369 {
9370 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9371 REAL_VALUE_TYPE trunc, frac;
9372
9373 switch (value->cl)
9374 {
9375 case rvc_nan:
9376 case rvc_zero:
9377 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9378 trunc = frac = *value;
9379 break;
9380 case rvc_inf:
9381 /* For +-Inf, return (*arg1 = arg0, +-0). */
9382 frac = dconst0;
9383 frac.sign = value->sign;
9384 trunc = *value;
9385 break;
9386 case rvc_normal:
9387 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9388 real_trunc (&trunc, VOIDmode, value);
9389 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9390 /* If the original number was negative and already
9391 integral, then the fractional part is -0.0. */
9392 if (value->sign && frac.cl == rvc_zero)
9393 frac.sign = value->sign;
9394 break;
9395 }
9396
9397 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9398 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9399 build_real (rettype, trunc));
9400 TREE_SIDE_EFFECTS (arg1) = 1;
9401 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9402 build_real (rettype, frac));
9403 }
9404
9405 return NULL_TREE;
9406 }
9407
9408 /* Given a location LOC, an interclass builtin function decl FNDECL
9409 and its single argument ARG, return an folded expression computing
9410 the same, or NULL_TREE if we either couldn't or didn't want to fold
9411 (the latter happen if there's an RTL instruction available). */
9412
9413 static tree
9414 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9415 {
9416 machine_mode mode;
9417
9418 if (!validate_arg (arg, REAL_TYPE))
9419 return NULL_TREE;
9420
9421 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9422 return NULL_TREE;
9423
9424 mode = TYPE_MODE (TREE_TYPE (arg));
9425
9426 /* If there is no optab, try generic code. */
9427 switch (DECL_FUNCTION_CODE (fndecl))
9428 {
9429 tree result;
9430
9431 CASE_FLT_FN (BUILT_IN_ISINF):
9432 {
9433 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9434 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9435 tree const type = TREE_TYPE (arg);
9436 REAL_VALUE_TYPE r;
9437 char buf[128];
9438
9439 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9440 real_from_string (&r, buf);
9441 result = build_call_expr (isgr_fn, 2,
9442 fold_build1_loc (loc, ABS_EXPR, type, arg),
9443 build_real (type, r));
9444 return result;
9445 }
9446 CASE_FLT_FN (BUILT_IN_FINITE):
9447 case BUILT_IN_ISFINITE:
9448 {
9449 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9450 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9451 tree const type = TREE_TYPE (arg);
9452 REAL_VALUE_TYPE r;
9453 char buf[128];
9454
9455 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9456 real_from_string (&r, buf);
9457 result = build_call_expr (isle_fn, 2,
9458 fold_build1_loc (loc, ABS_EXPR, type, arg),
9459 build_real (type, r));
9460 /*result = fold_build2_loc (loc, UNGT_EXPR,
9461 TREE_TYPE (TREE_TYPE (fndecl)),
9462 fold_build1_loc (loc, ABS_EXPR, type, arg),
9463 build_real (type, r));
9464 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9465 TREE_TYPE (TREE_TYPE (fndecl)),
9466 result);*/
9467 return result;
9468 }
9469 case BUILT_IN_ISNORMAL:
9470 {
9471 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9472 islessequal(fabs(x),DBL_MAX). */
9473 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9474 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9475 tree const type = TREE_TYPE (arg);
9476 REAL_VALUE_TYPE rmax, rmin;
9477 char buf[128];
9478
9479 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9480 real_from_string (&rmax, buf);
9481 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9482 real_from_string (&rmin, buf);
9483 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9484 result = build_call_expr (isle_fn, 2, arg,
9485 build_real (type, rmax));
9486 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9487 build_call_expr (isge_fn, 2, arg,
9488 build_real (type, rmin)));
9489 return result;
9490 }
9491 default:
9492 break;
9493 }
9494
9495 return NULL_TREE;
9496 }
9497
9498 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9499 ARG is the argument for the call. */
9500
9501 static tree
9502 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9503 {
9504 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9505 REAL_VALUE_TYPE r;
9506
9507 if (!validate_arg (arg, REAL_TYPE))
9508 return NULL_TREE;
9509
9510 switch (builtin_index)
9511 {
9512 case BUILT_IN_ISINF:
9513 if (!HONOR_INFINITIES (arg))
9514 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9515
9516 if (TREE_CODE (arg) == REAL_CST)
9517 {
9518 r = TREE_REAL_CST (arg);
9519 if (real_isinf (&r))
9520 return real_compare (GT_EXPR, &r, &dconst0)
9521 ? integer_one_node : integer_minus_one_node;
9522 else
9523 return integer_zero_node;
9524 }
9525
9526 return NULL_TREE;
9527
9528 case BUILT_IN_ISINF_SIGN:
9529 {
9530 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9531 /* In a boolean context, GCC will fold the inner COND_EXPR to
9532 1. So e.g. "if (isinf_sign(x))" would be folded to just
9533 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9534 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9535 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9536 tree tmp = NULL_TREE;
9537
9538 arg = builtin_save_expr (arg);
9539
9540 if (signbit_fn && isinf_fn)
9541 {
9542 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9543 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9544
9545 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9546 signbit_call, integer_zero_node);
9547 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9548 isinf_call, integer_zero_node);
9549
9550 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9551 integer_minus_one_node, integer_one_node);
9552 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9553 isinf_call, tmp,
9554 integer_zero_node);
9555 }
9556
9557 return tmp;
9558 }
9559
9560 case BUILT_IN_ISFINITE:
9561 if (!HONOR_NANS (arg)
9562 && !HONOR_INFINITIES (arg))
9563 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9564
9565 if (TREE_CODE (arg) == REAL_CST)
9566 {
9567 r = TREE_REAL_CST (arg);
9568 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9569 }
9570
9571 return NULL_TREE;
9572
9573 case BUILT_IN_ISNAN:
9574 if (!HONOR_NANS (arg))
9575 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9576
9577 if (TREE_CODE (arg) == REAL_CST)
9578 {
9579 r = TREE_REAL_CST (arg);
9580 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9581 }
9582
9583 arg = builtin_save_expr (arg);
9584 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9585
9586 default:
9587 gcc_unreachable ();
9588 }
9589 }
9590
9591 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9592 This builtin will generate code to return the appropriate floating
9593 point classification depending on the value of the floating point
9594 number passed in. The possible return values must be supplied as
9595 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9596 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9597 one floating point argument which is "type generic". */
9598
9599 static tree
9600 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9601 {
9602 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9603 arg, type, res, tmp;
9604 machine_mode mode;
9605 REAL_VALUE_TYPE r;
9606 char buf[128];
9607
9608 /* Verify the required arguments in the original call. */
9609 if (nargs != 6
9610 || !validate_arg (args[0], INTEGER_TYPE)
9611 || !validate_arg (args[1], INTEGER_TYPE)
9612 || !validate_arg (args[2], INTEGER_TYPE)
9613 || !validate_arg (args[3], INTEGER_TYPE)
9614 || !validate_arg (args[4], INTEGER_TYPE)
9615 || !validate_arg (args[5], REAL_TYPE))
9616 return NULL_TREE;
9617
9618 fp_nan = args[0];
9619 fp_infinite = args[1];
9620 fp_normal = args[2];
9621 fp_subnormal = args[3];
9622 fp_zero = args[4];
9623 arg = args[5];
9624 type = TREE_TYPE (arg);
9625 mode = TYPE_MODE (type);
9626 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9627
9628 /* fpclassify(x) ->
9629 isnan(x) ? FP_NAN :
9630 (fabs(x) == Inf ? FP_INFINITE :
9631 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9632 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9633
9634 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9635 build_real (type, dconst0));
9636 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9637 tmp, fp_zero, fp_subnormal);
9638
9639 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9640 real_from_string (&r, buf);
9641 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9642 arg, build_real (type, r));
9643 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9644
9645 if (HONOR_INFINITIES (mode))
9646 {
9647 real_inf (&r);
9648 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9649 build_real (type, r));
9650 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9651 fp_infinite, res);
9652 }
9653
9654 if (HONOR_NANS (mode))
9655 {
9656 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9657 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9658 }
9659
9660 return res;
9661 }
9662
9663 /* Fold a call to an unordered comparison function such as
9664 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9665 being called and ARG0 and ARG1 are the arguments for the call.
9666 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9667 the opposite of the desired result. UNORDERED_CODE is used
9668 for modes that can hold NaNs and ORDERED_CODE is used for
9669 the rest. */
9670
9671 static tree
9672 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9673 enum tree_code unordered_code,
9674 enum tree_code ordered_code)
9675 {
9676 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9677 enum tree_code code;
9678 tree type0, type1;
9679 enum tree_code code0, code1;
9680 tree cmp_type = NULL_TREE;
9681
9682 type0 = TREE_TYPE (arg0);
9683 type1 = TREE_TYPE (arg1);
9684
9685 code0 = TREE_CODE (type0);
9686 code1 = TREE_CODE (type1);
9687
9688 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9689 /* Choose the wider of two real types. */
9690 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9691 ? type0 : type1;
9692 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9693 cmp_type = type0;
9694 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9695 cmp_type = type1;
9696
9697 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9698 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9699
9700 if (unordered_code == UNORDERED_EXPR)
9701 {
9702 if (!HONOR_NANS (arg0))
9703 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9704 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9705 }
9706
9707 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9708 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9709 fold_build2_loc (loc, code, type, arg0, arg1));
9710 }
9711
9712 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9713 arithmetics if it can never overflow, or into internal functions that
9714 return both result of arithmetics and overflowed boolean flag in
9715 a complex integer result, or some other check for overflow. */
9716
9717 static tree
9718 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9719 tree arg0, tree arg1, tree arg2)
9720 {
9721 enum internal_fn ifn = IFN_LAST;
9722 tree type = TREE_TYPE (TREE_TYPE (arg2));
9723 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9724 switch (fcode)
9725 {
9726 case BUILT_IN_ADD_OVERFLOW:
9727 case BUILT_IN_SADD_OVERFLOW:
9728 case BUILT_IN_SADDL_OVERFLOW:
9729 case BUILT_IN_SADDLL_OVERFLOW:
9730 case BUILT_IN_UADD_OVERFLOW:
9731 case BUILT_IN_UADDL_OVERFLOW:
9732 case BUILT_IN_UADDLL_OVERFLOW:
9733 ifn = IFN_ADD_OVERFLOW;
9734 break;
9735 case BUILT_IN_SUB_OVERFLOW:
9736 case BUILT_IN_SSUB_OVERFLOW:
9737 case BUILT_IN_SSUBL_OVERFLOW:
9738 case BUILT_IN_SSUBLL_OVERFLOW:
9739 case BUILT_IN_USUB_OVERFLOW:
9740 case BUILT_IN_USUBL_OVERFLOW:
9741 case BUILT_IN_USUBLL_OVERFLOW:
9742 ifn = IFN_SUB_OVERFLOW;
9743 break;
9744 case BUILT_IN_MUL_OVERFLOW:
9745 case BUILT_IN_SMUL_OVERFLOW:
9746 case BUILT_IN_SMULL_OVERFLOW:
9747 case BUILT_IN_SMULLL_OVERFLOW:
9748 case BUILT_IN_UMUL_OVERFLOW:
9749 case BUILT_IN_UMULL_OVERFLOW:
9750 case BUILT_IN_UMULLL_OVERFLOW:
9751 ifn = IFN_MUL_OVERFLOW;
9752 break;
9753 default:
9754 gcc_unreachable ();
9755 }
9756 tree ctype = build_complex_type (type);
9757 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9758 2, arg0, arg1);
9759 tree tgt = save_expr (call);
9760 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9761 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9762 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9763 tree store
9764 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9765 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9766 }
9767
9768 /* Fold a call to built-in function FNDECL with 0 arguments.
9769 This function returns NULL_TREE if no simplification was possible. */
9770
9771 static tree
9772 fold_builtin_0 (location_t loc, tree fndecl)
9773 {
9774 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9775 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9776 switch (fcode)
9777 {
9778 CASE_FLT_FN (BUILT_IN_INF):
9779 case BUILT_IN_INFD32:
9780 case BUILT_IN_INFD64:
9781 case BUILT_IN_INFD128:
9782 return fold_builtin_inf (loc, type, true);
9783
9784 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9785 return fold_builtin_inf (loc, type, false);
9786
9787 case BUILT_IN_CLASSIFY_TYPE:
9788 return fold_builtin_classify_type (NULL_TREE);
9789
9790 default:
9791 break;
9792 }
9793 return NULL_TREE;
9794 }
9795
9796 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9797 This function returns NULL_TREE if no simplification was possible. */
9798
9799 static tree
9800 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9801 {
9802 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9803 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9804 switch (fcode)
9805 {
9806 case BUILT_IN_CONSTANT_P:
9807 {
9808 tree val = fold_builtin_constant_p (arg0);
9809
9810 /* Gimplification will pull the CALL_EXPR for the builtin out of
9811 an if condition. When not optimizing, we'll not CSE it back.
9812 To avoid link error types of regressions, return false now. */
9813 if (!val && !optimize)
9814 val = integer_zero_node;
9815
9816 return val;
9817 }
9818
9819 case BUILT_IN_CLASSIFY_TYPE:
9820 return fold_builtin_classify_type (arg0);
9821
9822 case BUILT_IN_STRLEN:
9823 return fold_builtin_strlen (loc, type, arg0);
9824
9825 CASE_FLT_FN (BUILT_IN_FABS):
9826 case BUILT_IN_FABSD32:
9827 case BUILT_IN_FABSD64:
9828 case BUILT_IN_FABSD128:
9829 return fold_builtin_fabs (loc, arg0, type);
9830
9831 case BUILT_IN_ABS:
9832 case BUILT_IN_LABS:
9833 case BUILT_IN_LLABS:
9834 case BUILT_IN_IMAXABS:
9835 return fold_builtin_abs (loc, arg0, type);
9836
9837 CASE_FLT_FN (BUILT_IN_CONJ):
9838 if (validate_arg (arg0, COMPLEX_TYPE)
9839 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9840 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9841 break;
9842
9843 CASE_FLT_FN (BUILT_IN_CREAL):
9844 if (validate_arg (arg0, COMPLEX_TYPE)
9845 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9846 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9847 break;
9848
9849 CASE_FLT_FN (BUILT_IN_CIMAG):
9850 if (validate_arg (arg0, COMPLEX_TYPE)
9851 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9852 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9853 break;
9854
9855 CASE_FLT_FN (BUILT_IN_CCOS):
9856 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9857
9858 CASE_FLT_FN (BUILT_IN_CCOSH):
9859 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9860
9861 CASE_FLT_FN (BUILT_IN_CPROJ):
9862 return fold_builtin_cproj (loc, arg0, type);
9863
9864 CASE_FLT_FN (BUILT_IN_CSIN):
9865 if (validate_arg (arg0, COMPLEX_TYPE)
9866 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9867 return do_mpc_arg1 (arg0, type, mpc_sin);
9868 break;
9869
9870 CASE_FLT_FN (BUILT_IN_CSINH):
9871 if (validate_arg (arg0, COMPLEX_TYPE)
9872 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9873 return do_mpc_arg1 (arg0, type, mpc_sinh);
9874 break;
9875
9876 CASE_FLT_FN (BUILT_IN_CTAN):
9877 if (validate_arg (arg0, COMPLEX_TYPE)
9878 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9879 return do_mpc_arg1 (arg0, type, mpc_tan);
9880 break;
9881
9882 CASE_FLT_FN (BUILT_IN_CTANH):
9883 if (validate_arg (arg0, COMPLEX_TYPE)
9884 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9885 return do_mpc_arg1 (arg0, type, mpc_tanh);
9886 break;
9887
9888 CASE_FLT_FN (BUILT_IN_CLOG):
9889 if (validate_arg (arg0, COMPLEX_TYPE)
9890 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9891 return do_mpc_arg1 (arg0, type, mpc_log);
9892 break;
9893
9894 CASE_FLT_FN (BUILT_IN_CSQRT):
9895 if (validate_arg (arg0, COMPLEX_TYPE)
9896 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9897 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9898 break;
9899
9900 CASE_FLT_FN (BUILT_IN_CASIN):
9901 if (validate_arg (arg0, COMPLEX_TYPE)
9902 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9903 return do_mpc_arg1 (arg0, type, mpc_asin);
9904 break;
9905
9906 CASE_FLT_FN (BUILT_IN_CACOS):
9907 if (validate_arg (arg0, COMPLEX_TYPE)
9908 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9909 return do_mpc_arg1 (arg0, type, mpc_acos);
9910 break;
9911
9912 CASE_FLT_FN (BUILT_IN_CATAN):
9913 if (validate_arg (arg0, COMPLEX_TYPE)
9914 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9915 return do_mpc_arg1 (arg0, type, mpc_atan);
9916 break;
9917
9918 CASE_FLT_FN (BUILT_IN_CASINH):
9919 if (validate_arg (arg0, COMPLEX_TYPE)
9920 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9921 return do_mpc_arg1 (arg0, type, mpc_asinh);
9922 break;
9923
9924 CASE_FLT_FN (BUILT_IN_CACOSH):
9925 if (validate_arg (arg0, COMPLEX_TYPE)
9926 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9927 return do_mpc_arg1 (arg0, type, mpc_acosh);
9928 break;
9929
9930 CASE_FLT_FN (BUILT_IN_CATANH):
9931 if (validate_arg (arg0, COMPLEX_TYPE)
9932 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9933 return do_mpc_arg1 (arg0, type, mpc_atanh);
9934 break;
9935
9936 CASE_FLT_FN (BUILT_IN_CABS):
9937 return fold_builtin_cabs (loc, arg0, type, fndecl);
9938
9939 CASE_FLT_FN (BUILT_IN_CARG):
9940 return fold_builtin_carg (loc, arg0, type);
9941
9942 CASE_FLT_FN (BUILT_IN_SQRT):
9943 return fold_builtin_sqrt (loc, arg0, type);
9944
9945 CASE_FLT_FN (BUILT_IN_CBRT):
9946 return fold_builtin_cbrt (loc, arg0, type);
9947
9948 CASE_FLT_FN (BUILT_IN_ASIN):
9949 if (validate_arg (arg0, REAL_TYPE))
9950 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9951 &dconstm1, &dconst1, true);
9952 break;
9953
9954 CASE_FLT_FN (BUILT_IN_ACOS):
9955 if (validate_arg (arg0, REAL_TYPE))
9956 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9957 &dconstm1, &dconst1, true);
9958 break;
9959
9960 CASE_FLT_FN (BUILT_IN_ATAN):
9961 if (validate_arg (arg0, REAL_TYPE))
9962 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9963 break;
9964
9965 CASE_FLT_FN (BUILT_IN_ASINH):
9966 if (validate_arg (arg0, REAL_TYPE))
9967 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9968 break;
9969
9970 CASE_FLT_FN (BUILT_IN_ACOSH):
9971 if (validate_arg (arg0, REAL_TYPE))
9972 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9973 &dconst1, NULL, true);
9974 break;
9975
9976 CASE_FLT_FN (BUILT_IN_ATANH):
9977 if (validate_arg (arg0, REAL_TYPE))
9978 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9979 &dconstm1, &dconst1, false);
9980 break;
9981
9982 CASE_FLT_FN (BUILT_IN_SIN):
9983 if (validate_arg (arg0, REAL_TYPE))
9984 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9985 break;
9986
9987 CASE_FLT_FN (BUILT_IN_COS):
9988 return fold_builtin_cos (loc, arg0, type, fndecl);
9989
9990 CASE_FLT_FN (BUILT_IN_TAN):
9991 return fold_builtin_tan (arg0, type);
9992
9993 CASE_FLT_FN (BUILT_IN_CEXP):
9994 return fold_builtin_cexp (loc, arg0, type);
9995
9996 CASE_FLT_FN (BUILT_IN_CEXPI):
9997 if (validate_arg (arg0, REAL_TYPE))
9998 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9999 break;
10000
10001 CASE_FLT_FN (BUILT_IN_SINH):
10002 if (validate_arg (arg0, REAL_TYPE))
10003 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10004 break;
10005
10006 CASE_FLT_FN (BUILT_IN_COSH):
10007 return fold_builtin_cosh (loc, arg0, type, fndecl);
10008
10009 CASE_FLT_FN (BUILT_IN_TANH):
10010 if (validate_arg (arg0, REAL_TYPE))
10011 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10012 break;
10013
10014 CASE_FLT_FN (BUILT_IN_ERF):
10015 if (validate_arg (arg0, REAL_TYPE))
10016 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10017 break;
10018
10019 CASE_FLT_FN (BUILT_IN_ERFC):
10020 if (validate_arg (arg0, REAL_TYPE))
10021 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10022 break;
10023
10024 CASE_FLT_FN (BUILT_IN_TGAMMA):
10025 if (validate_arg (arg0, REAL_TYPE))
10026 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10027 break;
10028
10029 CASE_FLT_FN (BUILT_IN_EXP):
10030 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10031
10032 CASE_FLT_FN (BUILT_IN_EXP2):
10033 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10034
10035 CASE_FLT_FN (BUILT_IN_EXP10):
10036 CASE_FLT_FN (BUILT_IN_POW10):
10037 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10038
10039 CASE_FLT_FN (BUILT_IN_EXPM1):
10040 if (validate_arg (arg0, REAL_TYPE))
10041 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10042 break;
10043
10044 CASE_FLT_FN (BUILT_IN_LOG):
10045 if (validate_arg (arg0, REAL_TYPE))
10046 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
10047 break;
10048
10049 CASE_FLT_FN (BUILT_IN_LOG2):
10050 if (validate_arg (arg0, REAL_TYPE))
10051 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
10052 break;
10053
10054 CASE_FLT_FN (BUILT_IN_LOG10):
10055 if (validate_arg (arg0, REAL_TYPE))
10056 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
10057 break;
10058
10059 CASE_FLT_FN (BUILT_IN_LOG1P):
10060 if (validate_arg (arg0, REAL_TYPE))
10061 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10062 &dconstm1, NULL, false);
10063 break;
10064
10065 CASE_FLT_FN (BUILT_IN_J0):
10066 if (validate_arg (arg0, REAL_TYPE))
10067 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10068 NULL, NULL, 0);
10069 break;
10070
10071 CASE_FLT_FN (BUILT_IN_J1):
10072 if (validate_arg (arg0, REAL_TYPE))
10073 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10074 NULL, NULL, 0);
10075 break;
10076
10077 CASE_FLT_FN (BUILT_IN_Y0):
10078 if (validate_arg (arg0, REAL_TYPE))
10079 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10080 &dconst0, NULL, false);
10081 break;
10082
10083 CASE_FLT_FN (BUILT_IN_Y1):
10084 if (validate_arg (arg0, REAL_TYPE))
10085 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10086 &dconst0, NULL, false);
10087 break;
10088
10089 CASE_FLT_FN (BUILT_IN_NAN):
10090 case BUILT_IN_NAND32:
10091 case BUILT_IN_NAND64:
10092 case BUILT_IN_NAND128:
10093 return fold_builtin_nan (arg0, type, true);
10094
10095 CASE_FLT_FN (BUILT_IN_NANS):
10096 return fold_builtin_nan (arg0, type, false);
10097
10098 CASE_FLT_FN (BUILT_IN_FLOOR):
10099 return fold_builtin_floor (loc, fndecl, arg0);
10100
10101 CASE_FLT_FN (BUILT_IN_CEIL):
10102 return fold_builtin_ceil (loc, fndecl, arg0);
10103
10104 CASE_FLT_FN (BUILT_IN_TRUNC):
10105 return fold_builtin_trunc (loc, fndecl, arg0);
10106
10107 CASE_FLT_FN (BUILT_IN_ROUND):
10108 return fold_builtin_round (loc, fndecl, arg0);
10109
10110 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10111 CASE_FLT_FN (BUILT_IN_RINT):
10112 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10113
10114 CASE_FLT_FN (BUILT_IN_ICEIL):
10115 CASE_FLT_FN (BUILT_IN_LCEIL):
10116 CASE_FLT_FN (BUILT_IN_LLCEIL):
10117 CASE_FLT_FN (BUILT_IN_LFLOOR):
10118 CASE_FLT_FN (BUILT_IN_IFLOOR):
10119 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10120 CASE_FLT_FN (BUILT_IN_IROUND):
10121 CASE_FLT_FN (BUILT_IN_LROUND):
10122 CASE_FLT_FN (BUILT_IN_LLROUND):
10123 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10124
10125 CASE_FLT_FN (BUILT_IN_IRINT):
10126 CASE_FLT_FN (BUILT_IN_LRINT):
10127 CASE_FLT_FN (BUILT_IN_LLRINT):
10128 return fold_fixed_mathfn (loc, fndecl, arg0);
10129
10130 case BUILT_IN_BSWAP16:
10131 case BUILT_IN_BSWAP32:
10132 case BUILT_IN_BSWAP64:
10133 return fold_builtin_bswap (fndecl, arg0);
10134
10135 CASE_INT_FN (BUILT_IN_FFS):
10136 CASE_INT_FN (BUILT_IN_CLZ):
10137 CASE_INT_FN (BUILT_IN_CTZ):
10138 CASE_INT_FN (BUILT_IN_CLRSB):
10139 CASE_INT_FN (BUILT_IN_POPCOUNT):
10140 CASE_INT_FN (BUILT_IN_PARITY):
10141 return fold_builtin_bitop (fndecl, arg0);
10142
10143 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10144 return fold_builtin_signbit (loc, arg0, type);
10145
10146 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10147 return fold_builtin_significand (loc, arg0, type);
10148
10149 CASE_FLT_FN (BUILT_IN_ILOGB):
10150 CASE_FLT_FN (BUILT_IN_LOGB):
10151 return fold_builtin_logb (loc, arg0, type);
10152
10153 case BUILT_IN_ISASCII:
10154 return fold_builtin_isascii (loc, arg0);
10155
10156 case BUILT_IN_TOASCII:
10157 return fold_builtin_toascii (loc, arg0);
10158
10159 case BUILT_IN_ISDIGIT:
10160 return fold_builtin_isdigit (loc, arg0);
10161
10162 CASE_FLT_FN (BUILT_IN_FINITE):
10163 case BUILT_IN_FINITED32:
10164 case BUILT_IN_FINITED64:
10165 case BUILT_IN_FINITED128:
10166 case BUILT_IN_ISFINITE:
10167 {
10168 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10169 if (ret)
10170 return ret;
10171 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10172 }
10173
10174 CASE_FLT_FN (BUILT_IN_ISINF):
10175 case BUILT_IN_ISINFD32:
10176 case BUILT_IN_ISINFD64:
10177 case BUILT_IN_ISINFD128:
10178 {
10179 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10180 if (ret)
10181 return ret;
10182 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10183 }
10184
10185 case BUILT_IN_ISNORMAL:
10186 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10187
10188 case BUILT_IN_ISINF_SIGN:
10189 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10190
10191 CASE_FLT_FN (BUILT_IN_ISNAN):
10192 case BUILT_IN_ISNAND32:
10193 case BUILT_IN_ISNAND64:
10194 case BUILT_IN_ISNAND128:
10195 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10196
10197 case BUILT_IN_FREE:
10198 if (integer_zerop (arg0))
10199 return build_empty_stmt (loc);
10200 break;
10201
10202 default:
10203 break;
10204 }
10205
10206 return NULL_TREE;
10207
10208 }
10209
10210 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10211 This function returns NULL_TREE if no simplification was possible. */
10212
10213 static tree
10214 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
10215 {
10216 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10217 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10218
10219 switch (fcode)
10220 {
10221 CASE_FLT_FN (BUILT_IN_JN):
10222 if (validate_arg (arg0, INTEGER_TYPE)
10223 && validate_arg (arg1, REAL_TYPE))
10224 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10225 break;
10226
10227 CASE_FLT_FN (BUILT_IN_YN):
10228 if (validate_arg (arg0, INTEGER_TYPE)
10229 && validate_arg (arg1, REAL_TYPE))
10230 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10231 &dconst0, false);
10232 break;
10233
10234 CASE_FLT_FN (BUILT_IN_DREM):
10235 CASE_FLT_FN (BUILT_IN_REMAINDER):
10236 if (validate_arg (arg0, REAL_TYPE)
10237 && validate_arg (arg1, REAL_TYPE))
10238 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10239 break;
10240
10241 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10242 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10243 if (validate_arg (arg0, REAL_TYPE)
10244 && validate_arg (arg1, POINTER_TYPE))
10245 return do_mpfr_lgamma_r (arg0, arg1, type);
10246 break;
10247
10248 CASE_FLT_FN (BUILT_IN_ATAN2):
10249 if (validate_arg (arg0, REAL_TYPE)
10250 && validate_arg (arg1, REAL_TYPE))
10251 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10252 break;
10253
10254 CASE_FLT_FN (BUILT_IN_FDIM):
10255 if (validate_arg (arg0, REAL_TYPE)
10256 && validate_arg (arg1, REAL_TYPE))
10257 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10258 break;
10259
10260 CASE_FLT_FN (BUILT_IN_HYPOT):
10261 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10262
10263 CASE_FLT_FN (BUILT_IN_CPOW):
10264 if (validate_arg (arg0, COMPLEX_TYPE)
10265 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10266 && validate_arg (arg1, COMPLEX_TYPE)
10267 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10268 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10269 break;
10270
10271 CASE_FLT_FN (BUILT_IN_LDEXP):
10272 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10273 CASE_FLT_FN (BUILT_IN_SCALBN):
10274 CASE_FLT_FN (BUILT_IN_SCALBLN):
10275 return fold_builtin_load_exponent (loc, arg0, arg1,
10276 type, /*ldexp=*/false);
10277
10278 CASE_FLT_FN (BUILT_IN_FREXP):
10279 return fold_builtin_frexp (loc, arg0, arg1, type);
10280
10281 CASE_FLT_FN (BUILT_IN_MODF):
10282 return fold_builtin_modf (loc, arg0, arg1, type);
10283
10284 case BUILT_IN_STRSTR:
10285 return fold_builtin_strstr (loc, arg0, arg1, type);
10286
10287 case BUILT_IN_STRSPN:
10288 return fold_builtin_strspn (loc, arg0, arg1);
10289
10290 case BUILT_IN_STRCSPN:
10291 return fold_builtin_strcspn (loc, arg0, arg1);
10292
10293 case BUILT_IN_STRCHR:
10294 case BUILT_IN_INDEX:
10295 return fold_builtin_strchr (loc, arg0, arg1, type);
10296
10297 case BUILT_IN_STRRCHR:
10298 case BUILT_IN_RINDEX:
10299 return fold_builtin_strrchr (loc, arg0, arg1, type);
10300
10301 case BUILT_IN_STRCMP:
10302 return fold_builtin_strcmp (loc, arg0, arg1);
10303
10304 case BUILT_IN_STRPBRK:
10305 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10306
10307 case BUILT_IN_EXPECT:
10308 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10309
10310 CASE_FLT_FN (BUILT_IN_POW):
10311 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10312
10313 CASE_FLT_FN (BUILT_IN_POWI):
10314 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10315
10316 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10317 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10318
10319 CASE_FLT_FN (BUILT_IN_FMIN):
10320 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10321
10322 CASE_FLT_FN (BUILT_IN_FMAX):
10323 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10324
10325 case BUILT_IN_ISGREATER:
10326 return fold_builtin_unordered_cmp (loc, fndecl,
10327 arg0, arg1, UNLE_EXPR, LE_EXPR);
10328 case BUILT_IN_ISGREATEREQUAL:
10329 return fold_builtin_unordered_cmp (loc, fndecl,
10330 arg0, arg1, UNLT_EXPR, LT_EXPR);
10331 case BUILT_IN_ISLESS:
10332 return fold_builtin_unordered_cmp (loc, fndecl,
10333 arg0, arg1, UNGE_EXPR, GE_EXPR);
10334 case BUILT_IN_ISLESSEQUAL:
10335 return fold_builtin_unordered_cmp (loc, fndecl,
10336 arg0, arg1, UNGT_EXPR, GT_EXPR);
10337 case BUILT_IN_ISLESSGREATER:
10338 return fold_builtin_unordered_cmp (loc, fndecl,
10339 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10340 case BUILT_IN_ISUNORDERED:
10341 return fold_builtin_unordered_cmp (loc, fndecl,
10342 arg0, arg1, UNORDERED_EXPR,
10343 NOP_EXPR);
10344
10345 /* We do the folding for va_start in the expander. */
10346 case BUILT_IN_VA_START:
10347 break;
10348
10349 case BUILT_IN_OBJECT_SIZE:
10350 return fold_builtin_object_size (arg0, arg1);
10351
10352 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10353 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10354
10355 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10356 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10357
10358 default:
10359 break;
10360 }
10361 return NULL_TREE;
10362 }
10363
10364 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10365 and ARG2.
10366 This function returns NULL_TREE if no simplification was possible. */
10367
10368 static tree
10369 fold_builtin_3 (location_t loc, tree fndecl,
10370 tree arg0, tree arg1, tree arg2)
10371 {
10372 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10373 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10374 switch (fcode)
10375 {
10376
10377 CASE_FLT_FN (BUILT_IN_SINCOS):
10378 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10379
10380 CASE_FLT_FN (BUILT_IN_FMA):
10381 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10382 break;
10383
10384 CASE_FLT_FN (BUILT_IN_REMQUO):
10385 if (validate_arg (arg0, REAL_TYPE)
10386 && validate_arg (arg1, REAL_TYPE)
10387 && validate_arg (arg2, POINTER_TYPE))
10388 return do_mpfr_remquo (arg0, arg1, arg2);
10389 break;
10390
10391 case BUILT_IN_STRNCMP:
10392 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10393
10394 case BUILT_IN_MEMCHR:
10395 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10396
10397 case BUILT_IN_BCMP:
10398 case BUILT_IN_MEMCMP:
10399 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10400
10401 case BUILT_IN_EXPECT:
10402 return fold_builtin_expect (loc, arg0, arg1, arg2);
10403
10404 case BUILT_IN_ADD_OVERFLOW:
10405 case BUILT_IN_SUB_OVERFLOW:
10406 case BUILT_IN_MUL_OVERFLOW:
10407 case BUILT_IN_SADD_OVERFLOW:
10408 case BUILT_IN_SADDL_OVERFLOW:
10409 case BUILT_IN_SADDLL_OVERFLOW:
10410 case BUILT_IN_SSUB_OVERFLOW:
10411 case BUILT_IN_SSUBL_OVERFLOW:
10412 case BUILT_IN_SSUBLL_OVERFLOW:
10413 case BUILT_IN_SMUL_OVERFLOW:
10414 case BUILT_IN_SMULL_OVERFLOW:
10415 case BUILT_IN_SMULLL_OVERFLOW:
10416 case BUILT_IN_UADD_OVERFLOW:
10417 case BUILT_IN_UADDL_OVERFLOW:
10418 case BUILT_IN_UADDLL_OVERFLOW:
10419 case BUILT_IN_USUB_OVERFLOW:
10420 case BUILT_IN_USUBL_OVERFLOW:
10421 case BUILT_IN_USUBLL_OVERFLOW:
10422 case BUILT_IN_UMUL_OVERFLOW:
10423 case BUILT_IN_UMULL_OVERFLOW:
10424 case BUILT_IN_UMULLL_OVERFLOW:
10425 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10426
10427 default:
10428 break;
10429 }
10430 return NULL_TREE;
10431 }
10432
10433 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10434 arguments. IGNORE is true if the result of the
10435 function call is ignored. This function returns NULL_TREE if no
10436 simplification was possible. */
10437
10438 tree
10439 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
10440 {
10441 tree ret = NULL_TREE;
10442
10443 switch (nargs)
10444 {
10445 case 0:
10446 ret = fold_builtin_0 (loc, fndecl);
10447 break;
10448 case 1:
10449 ret = fold_builtin_1 (loc, fndecl, args[0]);
10450 break;
10451 case 2:
10452 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
10453 break;
10454 case 3:
10455 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10456 break;
10457 default:
10458 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10459 break;
10460 }
10461 if (ret)
10462 {
10463 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10464 SET_EXPR_LOCATION (ret, loc);
10465 TREE_NO_WARNING (ret) = 1;
10466 return ret;
10467 }
10468 return NULL_TREE;
10469 }
10470
10471 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10472 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10473 of arguments in ARGS to be omitted. OLDNARGS is the number of
10474 elements in ARGS. */
10475
10476 static tree
10477 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10478 int skip, tree fndecl, int n, va_list newargs)
10479 {
10480 int nargs = oldnargs - skip + n;
10481 tree *buffer;
10482
10483 if (n > 0)
10484 {
10485 int i, j;
10486
10487 buffer = XALLOCAVEC (tree, nargs);
10488 for (i = 0; i < n; i++)
10489 buffer[i] = va_arg (newargs, tree);
10490 for (j = skip; j < oldnargs; j++, i++)
10491 buffer[i] = args[j];
10492 }
10493 else
10494 buffer = args + skip;
10495
10496 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10497 }
10498
10499 /* Return true if FNDECL shouldn't be folded right now.
10500 If a built-in function has an inline attribute always_inline
10501 wrapper, defer folding it after always_inline functions have
10502 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10503 might not be performed. */
10504
10505 bool
10506 avoid_folding_inline_builtin (tree fndecl)
10507 {
10508 return (DECL_DECLARED_INLINE_P (fndecl)
10509 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10510 && cfun
10511 && !cfun->always_inline_functions_inlined
10512 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10513 }
10514
10515 /* A wrapper function for builtin folding that prevents warnings for
10516 "statement without effect" and the like, caused by removing the
10517 call node earlier than the warning is generated. */
10518
10519 tree
10520 fold_call_expr (location_t loc, tree exp, bool ignore)
10521 {
10522 tree ret = NULL_TREE;
10523 tree fndecl = get_callee_fndecl (exp);
10524 if (fndecl
10525 && TREE_CODE (fndecl) == FUNCTION_DECL
10526 && DECL_BUILT_IN (fndecl)
10527 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10528 yet. Defer folding until we see all the arguments
10529 (after inlining). */
10530 && !CALL_EXPR_VA_ARG_PACK (exp))
10531 {
10532 int nargs = call_expr_nargs (exp);
10533
10534 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10535 instead last argument is __builtin_va_arg_pack (). Defer folding
10536 even in that case, until arguments are finalized. */
10537 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10538 {
10539 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10540 if (fndecl2
10541 && TREE_CODE (fndecl2) == FUNCTION_DECL
10542 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10543 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10544 return NULL_TREE;
10545 }
10546
10547 if (avoid_folding_inline_builtin (fndecl))
10548 return NULL_TREE;
10549
10550 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10551 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10552 CALL_EXPR_ARGP (exp), ignore);
10553 else
10554 {
10555 tree *args = CALL_EXPR_ARGP (exp);
10556 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10557 if (ret)
10558 return ret;
10559 }
10560 }
10561 return NULL_TREE;
10562 }
10563
10564 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10565 N arguments are passed in the array ARGARRAY. Return a folded
10566 expression or NULL_TREE if no simplification was possible. */
10567
10568 tree
10569 fold_builtin_call_array (location_t loc, tree,
10570 tree fn,
10571 int n,
10572 tree *argarray)
10573 {
10574 if (TREE_CODE (fn) != ADDR_EXPR)
10575 return NULL_TREE;
10576
10577 tree fndecl = TREE_OPERAND (fn, 0);
10578 if (TREE_CODE (fndecl) == FUNCTION_DECL
10579 && DECL_BUILT_IN (fndecl))
10580 {
10581 /* If last argument is __builtin_va_arg_pack (), arguments to this
10582 function are not finalized yet. Defer folding until they are. */
10583 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10584 {
10585 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10586 if (fndecl2
10587 && TREE_CODE (fndecl2) == FUNCTION_DECL
10588 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10589 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10590 return NULL_TREE;
10591 }
10592 if (avoid_folding_inline_builtin (fndecl))
10593 return NULL_TREE;
10594 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10595 return targetm.fold_builtin (fndecl, n, argarray, false);
10596 else
10597 return fold_builtin_n (loc, fndecl, argarray, n, false);
10598 }
10599
10600 return NULL_TREE;
10601 }
10602
10603 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10604 along with N new arguments specified as the "..." parameters. SKIP
10605 is the number of arguments in EXP to be omitted. This function is used
10606 to do varargs-to-varargs transformations. */
10607
10608 static tree
10609 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10610 {
10611 va_list ap;
10612 tree t;
10613
10614 va_start (ap, n);
10615 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10616 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10617 va_end (ap);
10618
10619 return t;
10620 }
10621
10622 /* Validate a single argument ARG against a tree code CODE representing
10623 a type. */
10624
10625 static bool
10626 validate_arg (const_tree arg, enum tree_code code)
10627 {
10628 if (!arg)
10629 return false;
10630 else if (code == POINTER_TYPE)
10631 return POINTER_TYPE_P (TREE_TYPE (arg));
10632 else if (code == INTEGER_TYPE)
10633 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10634 return code == TREE_CODE (TREE_TYPE (arg));
10635 }
10636
10637 /* This function validates the types of a function call argument list
10638 against a specified list of tree_codes. If the last specifier is a 0,
10639 that represents an ellipses, otherwise the last specifier must be a
10640 VOID_TYPE.
10641
10642 This is the GIMPLE version of validate_arglist. Eventually we want to
10643 completely convert builtins.c to work from GIMPLEs and the tree based
10644 validate_arglist will then be removed. */
10645
10646 bool
10647 validate_gimple_arglist (const gcall *call, ...)
10648 {
10649 enum tree_code code;
10650 bool res = 0;
10651 va_list ap;
10652 const_tree arg;
10653 size_t i;
10654
10655 va_start (ap, call);
10656 i = 0;
10657
10658 do
10659 {
10660 code = (enum tree_code) va_arg (ap, int);
10661 switch (code)
10662 {
10663 case 0:
10664 /* This signifies an ellipses, any further arguments are all ok. */
10665 res = true;
10666 goto end;
10667 case VOID_TYPE:
10668 /* This signifies an endlink, if no arguments remain, return
10669 true, otherwise return false. */
10670 res = (i == gimple_call_num_args (call));
10671 goto end;
10672 default:
10673 /* If no parameters remain or the parameter's code does not
10674 match the specified code, return false. Otherwise continue
10675 checking any remaining arguments. */
10676 arg = gimple_call_arg (call, i++);
10677 if (!validate_arg (arg, code))
10678 goto end;
10679 break;
10680 }
10681 }
10682 while (1);
10683
10684 /* We need gotos here since we can only have one VA_CLOSE in a
10685 function. */
10686 end: ;
10687 va_end (ap);
10688
10689 return res;
10690 }
10691
10692 /* Default target-specific builtin expander that does nothing. */
10693
10694 rtx
10695 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10696 rtx target ATTRIBUTE_UNUSED,
10697 rtx subtarget ATTRIBUTE_UNUSED,
10698 machine_mode mode ATTRIBUTE_UNUSED,
10699 int ignore ATTRIBUTE_UNUSED)
10700 {
10701 return NULL_RTX;
10702 }
10703
10704 /* Returns true is EXP represents data that would potentially reside
10705 in a readonly section. */
10706
10707 bool
10708 readonly_data_expr (tree exp)
10709 {
10710 STRIP_NOPS (exp);
10711
10712 if (TREE_CODE (exp) != ADDR_EXPR)
10713 return false;
10714
10715 exp = get_base_address (TREE_OPERAND (exp, 0));
10716 if (!exp)
10717 return false;
10718
10719 /* Make sure we call decl_readonly_section only for trees it
10720 can handle (since it returns true for everything it doesn't
10721 understand). */
10722 if (TREE_CODE (exp) == STRING_CST
10723 || TREE_CODE (exp) == CONSTRUCTOR
10724 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10725 return decl_readonly_section (exp, 0);
10726 else
10727 return false;
10728 }
10729
10730 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10731 to the call, and TYPE is its return type.
10732
10733 Return NULL_TREE if no simplification was possible, otherwise return the
10734 simplified form of the call as a tree.
10735
10736 The simplified form may be a constant or other expression which
10737 computes the same value, but in a more efficient manner (including
10738 calls to other builtin functions).
10739
10740 The call may contain arguments which need to be evaluated, but
10741 which are not useful to determine the result of the call. In
10742 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10743 COMPOUND_EXPR will be an argument which must be evaluated.
10744 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10745 COMPOUND_EXPR in the chain will contain the tree for the simplified
10746 form of the builtin function call. */
10747
10748 static tree
10749 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10750 {
10751 if (!validate_arg (s1, POINTER_TYPE)
10752 || !validate_arg (s2, POINTER_TYPE))
10753 return NULL_TREE;
10754 else
10755 {
10756 tree fn;
10757 const char *p1, *p2;
10758
10759 p2 = c_getstr (s2);
10760 if (p2 == NULL)
10761 return NULL_TREE;
10762
10763 p1 = c_getstr (s1);
10764 if (p1 != NULL)
10765 {
10766 const char *r = strstr (p1, p2);
10767 tree tem;
10768
10769 if (r == NULL)
10770 return build_int_cst (TREE_TYPE (s1), 0);
10771
10772 /* Return an offset into the constant string argument. */
10773 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10774 return fold_convert_loc (loc, type, tem);
10775 }
10776
10777 /* The argument is const char *, and the result is char *, so we need
10778 a type conversion here to avoid a warning. */
10779 if (p2[0] == '\0')
10780 return fold_convert_loc (loc, type, s1);
10781
10782 if (p2[1] != '\0')
10783 return NULL_TREE;
10784
10785 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10786 if (!fn)
10787 return NULL_TREE;
10788
10789 /* New argument list transforming strstr(s1, s2) to
10790 strchr(s1, s2[0]). */
10791 return build_call_expr_loc (loc, fn, 2, s1,
10792 build_int_cst (integer_type_node, p2[0]));
10793 }
10794 }
10795
10796 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10797 the call, and TYPE is its return type.
10798
10799 Return NULL_TREE if no simplification was possible, otherwise return the
10800 simplified form of the call as a tree.
10801
10802 The simplified form may be a constant or other expression which
10803 computes the same value, but in a more efficient manner (including
10804 calls to other builtin functions).
10805
10806 The call may contain arguments which need to be evaluated, but
10807 which are not useful to determine the result of the call. In
10808 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10809 COMPOUND_EXPR will be an argument which must be evaluated.
10810 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10811 COMPOUND_EXPR in the chain will contain the tree for the simplified
10812 form of the builtin function call. */
10813
10814 static tree
10815 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10816 {
10817 if (!validate_arg (s1, POINTER_TYPE)
10818 || !validate_arg (s2, INTEGER_TYPE))
10819 return NULL_TREE;
10820 else
10821 {
10822 const char *p1;
10823
10824 if (TREE_CODE (s2) != INTEGER_CST)
10825 return NULL_TREE;
10826
10827 p1 = c_getstr (s1);
10828 if (p1 != NULL)
10829 {
10830 char c;
10831 const char *r;
10832 tree tem;
10833
10834 if (target_char_cast (s2, &c))
10835 return NULL_TREE;
10836
10837 r = strchr (p1, c);
10838
10839 if (r == NULL)
10840 return build_int_cst (TREE_TYPE (s1), 0);
10841
10842 /* Return an offset into the constant string argument. */
10843 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10844 return fold_convert_loc (loc, type, tem);
10845 }
10846 return NULL_TREE;
10847 }
10848 }
10849
10850 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10851 the call, and TYPE is its return type.
10852
10853 Return NULL_TREE if no simplification was possible, otherwise return the
10854 simplified form of the call as a tree.
10855
10856 The simplified form may be a constant or other expression which
10857 computes the same value, but in a more efficient manner (including
10858 calls to other builtin functions).
10859
10860 The call may contain arguments which need to be evaluated, but
10861 which are not useful to determine the result of the call. In
10862 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10863 COMPOUND_EXPR will be an argument which must be evaluated.
10864 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10865 COMPOUND_EXPR in the chain will contain the tree for the simplified
10866 form of the builtin function call. */
10867
10868 static tree
10869 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10870 {
10871 if (!validate_arg (s1, POINTER_TYPE)
10872 || !validate_arg (s2, INTEGER_TYPE))
10873 return NULL_TREE;
10874 else
10875 {
10876 tree fn;
10877 const char *p1;
10878
10879 if (TREE_CODE (s2) != INTEGER_CST)
10880 return NULL_TREE;
10881
10882 p1 = c_getstr (s1);
10883 if (p1 != NULL)
10884 {
10885 char c;
10886 const char *r;
10887 tree tem;
10888
10889 if (target_char_cast (s2, &c))
10890 return NULL_TREE;
10891
10892 r = strrchr (p1, c);
10893
10894 if (r == NULL)
10895 return build_int_cst (TREE_TYPE (s1), 0);
10896
10897 /* Return an offset into the constant string argument. */
10898 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10899 return fold_convert_loc (loc, type, tem);
10900 }
10901
10902 if (! integer_zerop (s2))
10903 return NULL_TREE;
10904
10905 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10906 if (!fn)
10907 return NULL_TREE;
10908
10909 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10910 return build_call_expr_loc (loc, fn, 2, s1, s2);
10911 }
10912 }
10913
10914 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10915 to the call, and TYPE is its return type.
10916
10917 Return NULL_TREE if no simplification was possible, otherwise return the
10918 simplified form of the call as a tree.
10919
10920 The simplified form may be a constant or other expression which
10921 computes the same value, but in a more efficient manner (including
10922 calls to other builtin functions).
10923
10924 The call may contain arguments which need to be evaluated, but
10925 which are not useful to determine the result of the call. In
10926 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10927 COMPOUND_EXPR will be an argument which must be evaluated.
10928 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10929 COMPOUND_EXPR in the chain will contain the tree for the simplified
10930 form of the builtin function call. */
10931
10932 static tree
10933 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10934 {
10935 if (!validate_arg (s1, POINTER_TYPE)
10936 || !validate_arg (s2, POINTER_TYPE))
10937 return NULL_TREE;
10938 else
10939 {
10940 tree fn;
10941 const char *p1, *p2;
10942
10943 p2 = c_getstr (s2);
10944 if (p2 == NULL)
10945 return NULL_TREE;
10946
10947 p1 = c_getstr (s1);
10948 if (p1 != NULL)
10949 {
10950 const char *r = strpbrk (p1, p2);
10951 tree tem;
10952
10953 if (r == NULL)
10954 return build_int_cst (TREE_TYPE (s1), 0);
10955
10956 /* Return an offset into the constant string argument. */
10957 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10958 return fold_convert_loc (loc, type, tem);
10959 }
10960
10961 if (p2[0] == '\0')
10962 /* strpbrk(x, "") == NULL.
10963 Evaluate and ignore s1 in case it had side-effects. */
10964 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
10965
10966 if (p2[1] != '\0')
10967 return NULL_TREE; /* Really call strpbrk. */
10968
10969 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10970 if (!fn)
10971 return NULL_TREE;
10972
10973 /* New argument list transforming strpbrk(s1, s2) to
10974 strchr(s1, s2[0]). */
10975 return build_call_expr_loc (loc, fn, 2, s1,
10976 build_int_cst (integer_type_node, p2[0]));
10977 }
10978 }
10979
10980 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10981 to the call.
10982
10983 Return NULL_TREE if no simplification was possible, otherwise return the
10984 simplified form of the call as a tree.
10985
10986 The simplified form may be a constant or other expression which
10987 computes the same value, but in a more efficient manner (including
10988 calls to other builtin functions).
10989
10990 The call may contain arguments which need to be evaluated, but
10991 which are not useful to determine the result of the call. In
10992 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10993 COMPOUND_EXPR will be an argument which must be evaluated.
10994 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10995 COMPOUND_EXPR in the chain will contain the tree for the simplified
10996 form of the builtin function call. */
10997
10998 static tree
10999 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11000 {
11001 if (!validate_arg (s1, POINTER_TYPE)
11002 || !validate_arg (s2, POINTER_TYPE))
11003 return NULL_TREE;
11004 else
11005 {
11006 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11007
11008 /* If both arguments are constants, evaluate at compile-time. */
11009 if (p1 && p2)
11010 {
11011 const size_t r = strspn (p1, p2);
11012 return build_int_cst (size_type_node, r);
11013 }
11014
11015 /* If either argument is "", return NULL_TREE. */
11016 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11017 /* Evaluate and ignore both arguments in case either one has
11018 side-effects. */
11019 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11020 s1, s2);
11021 return NULL_TREE;
11022 }
11023 }
11024
11025 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11026 to the call.
11027
11028 Return NULL_TREE if no simplification was possible, otherwise return the
11029 simplified form of the call as a tree.
11030
11031 The simplified form may be a constant or other expression which
11032 computes the same value, but in a more efficient manner (including
11033 calls to other builtin functions).
11034
11035 The call may contain arguments which need to be evaluated, but
11036 which are not useful to determine the result of the call. In
11037 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11038 COMPOUND_EXPR will be an argument which must be evaluated.
11039 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11040 COMPOUND_EXPR in the chain will contain the tree for the simplified
11041 form of the builtin function call. */
11042
11043 static tree
11044 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11045 {
11046 if (!validate_arg (s1, POINTER_TYPE)
11047 || !validate_arg (s2, POINTER_TYPE))
11048 return NULL_TREE;
11049 else
11050 {
11051 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11052
11053 /* If both arguments are constants, evaluate at compile-time. */
11054 if (p1 && p2)
11055 {
11056 const size_t r = strcspn (p1, p2);
11057 return build_int_cst (size_type_node, r);
11058 }
11059
11060 /* If the first argument is "", return NULL_TREE. */
11061 if (p1 && *p1 == '\0')
11062 {
11063 /* Evaluate and ignore argument s2 in case it has
11064 side-effects. */
11065 return omit_one_operand_loc (loc, size_type_node,
11066 size_zero_node, s2);
11067 }
11068
11069 /* If the second argument is "", return __builtin_strlen(s1). */
11070 if (p2 && *p2 == '\0')
11071 {
11072 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11073
11074 /* If the replacement _DECL isn't initialized, don't do the
11075 transformation. */
11076 if (!fn)
11077 return NULL_TREE;
11078
11079 return build_call_expr_loc (loc, fn, 1, s1);
11080 }
11081 return NULL_TREE;
11082 }
11083 }
11084
11085 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11086 produced. False otherwise. This is done so that we don't output the error
11087 or warning twice or three times. */
11088
11089 bool
11090 fold_builtin_next_arg (tree exp, bool va_start_p)
11091 {
11092 tree fntype = TREE_TYPE (current_function_decl);
11093 int nargs = call_expr_nargs (exp);
11094 tree arg;
11095 /* There is good chance the current input_location points inside the
11096 definition of the va_start macro (perhaps on the token for
11097 builtin) in a system header, so warnings will not be emitted.
11098 Use the location in real source code. */
11099 source_location current_location =
11100 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11101 NULL);
11102
11103 if (!stdarg_p (fntype))
11104 {
11105 error ("%<va_start%> used in function with fixed args");
11106 return true;
11107 }
11108
11109 if (va_start_p)
11110 {
11111 if (va_start_p && (nargs != 2))
11112 {
11113 error ("wrong number of arguments to function %<va_start%>");
11114 return true;
11115 }
11116 arg = CALL_EXPR_ARG (exp, 1);
11117 }
11118 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11119 when we checked the arguments and if needed issued a warning. */
11120 else
11121 {
11122 if (nargs == 0)
11123 {
11124 /* Evidently an out of date version of <stdarg.h>; can't validate
11125 va_start's second argument, but can still work as intended. */
11126 warning_at (current_location,
11127 OPT_Wvarargs,
11128 "%<__builtin_next_arg%> called without an argument");
11129 return true;
11130 }
11131 else if (nargs > 1)
11132 {
11133 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11134 return true;
11135 }
11136 arg = CALL_EXPR_ARG (exp, 0);
11137 }
11138
11139 if (TREE_CODE (arg) == SSA_NAME)
11140 arg = SSA_NAME_VAR (arg);
11141
11142 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11143 or __builtin_next_arg (0) the first time we see it, after checking
11144 the arguments and if needed issuing a warning. */
11145 if (!integer_zerop (arg))
11146 {
11147 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11148
11149 /* Strip off all nops for the sake of the comparison. This
11150 is not quite the same as STRIP_NOPS. It does more.
11151 We must also strip off INDIRECT_EXPR for C++ reference
11152 parameters. */
11153 while (CONVERT_EXPR_P (arg)
11154 || TREE_CODE (arg) == INDIRECT_REF)
11155 arg = TREE_OPERAND (arg, 0);
11156 if (arg != last_parm)
11157 {
11158 /* FIXME: Sometimes with the tree optimizers we can get the
11159 not the last argument even though the user used the last
11160 argument. We just warn and set the arg to be the last
11161 argument so that we will get wrong-code because of
11162 it. */
11163 warning_at (current_location,
11164 OPT_Wvarargs,
11165 "second parameter of %<va_start%> not last named argument");
11166 }
11167
11168 /* Undefined by C99 7.15.1.4p4 (va_start):
11169 "If the parameter parmN is declared with the register storage
11170 class, with a function or array type, or with a type that is
11171 not compatible with the type that results after application of
11172 the default argument promotions, the behavior is undefined."
11173 */
11174 else if (DECL_REGISTER (arg))
11175 {
11176 warning_at (current_location,
11177 OPT_Wvarargs,
11178 "undefined behaviour when second parameter of "
11179 "%<va_start%> is declared with %<register%> storage");
11180 }
11181
11182 /* We want to verify the second parameter just once before the tree
11183 optimizers are run and then avoid keeping it in the tree,
11184 as otherwise we could warn even for correct code like:
11185 void foo (int i, ...)
11186 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11187 if (va_start_p)
11188 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11189 else
11190 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11191 }
11192 return false;
11193 }
11194
11195
11196 /* Expand a call EXP to __builtin_object_size. */
11197
11198 static rtx
11199 expand_builtin_object_size (tree exp)
11200 {
11201 tree ost;
11202 int object_size_type;
11203 tree fndecl = get_callee_fndecl (exp);
11204
11205 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11206 {
11207 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11208 exp, fndecl);
11209 expand_builtin_trap ();
11210 return const0_rtx;
11211 }
11212
11213 ost = CALL_EXPR_ARG (exp, 1);
11214 STRIP_NOPS (ost);
11215
11216 if (TREE_CODE (ost) != INTEGER_CST
11217 || tree_int_cst_sgn (ost) < 0
11218 || compare_tree_int (ost, 3) > 0)
11219 {
11220 error ("%Klast argument of %D is not integer constant between 0 and 3",
11221 exp, fndecl);
11222 expand_builtin_trap ();
11223 return const0_rtx;
11224 }
11225
11226 object_size_type = tree_to_shwi (ost);
11227
11228 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11229 }
11230
11231 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11232 FCODE is the BUILT_IN_* to use.
11233 Return NULL_RTX if we failed; the caller should emit a normal call,
11234 otherwise try to get the result in TARGET, if convenient (and in
11235 mode MODE if that's convenient). */
11236
11237 static rtx
11238 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11239 enum built_in_function fcode)
11240 {
11241 tree dest, src, len, size;
11242
11243 if (!validate_arglist (exp,
11244 POINTER_TYPE,
11245 fcode == BUILT_IN_MEMSET_CHK
11246 ? INTEGER_TYPE : POINTER_TYPE,
11247 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11248 return NULL_RTX;
11249
11250 dest = CALL_EXPR_ARG (exp, 0);
11251 src = CALL_EXPR_ARG (exp, 1);
11252 len = CALL_EXPR_ARG (exp, 2);
11253 size = CALL_EXPR_ARG (exp, 3);
11254
11255 if (! tree_fits_uhwi_p (size))
11256 return NULL_RTX;
11257
11258 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11259 {
11260 tree fn;
11261
11262 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11263 {
11264 warning_at (tree_nonartificial_location (exp),
11265 0, "%Kcall to %D will always overflow destination buffer",
11266 exp, get_callee_fndecl (exp));
11267 return NULL_RTX;
11268 }
11269
11270 fn = NULL_TREE;
11271 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11272 mem{cpy,pcpy,move,set} is available. */
11273 switch (fcode)
11274 {
11275 case BUILT_IN_MEMCPY_CHK:
11276 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11277 break;
11278 case BUILT_IN_MEMPCPY_CHK:
11279 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11280 break;
11281 case BUILT_IN_MEMMOVE_CHK:
11282 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11283 break;
11284 case BUILT_IN_MEMSET_CHK:
11285 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11286 break;
11287 default:
11288 break;
11289 }
11290
11291 if (! fn)
11292 return NULL_RTX;
11293
11294 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11295 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11296 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11297 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11298 }
11299 else if (fcode == BUILT_IN_MEMSET_CHK)
11300 return NULL_RTX;
11301 else
11302 {
11303 unsigned int dest_align = get_pointer_alignment (dest);
11304
11305 /* If DEST is not a pointer type, call the normal function. */
11306 if (dest_align == 0)
11307 return NULL_RTX;
11308
11309 /* If SRC and DEST are the same (and not volatile), do nothing. */
11310 if (operand_equal_p (src, dest, 0))
11311 {
11312 tree expr;
11313
11314 if (fcode != BUILT_IN_MEMPCPY_CHK)
11315 {
11316 /* Evaluate and ignore LEN in case it has side-effects. */
11317 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11318 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11319 }
11320
11321 expr = fold_build_pointer_plus (dest, len);
11322 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11323 }
11324
11325 /* __memmove_chk special case. */
11326 if (fcode == BUILT_IN_MEMMOVE_CHK)
11327 {
11328 unsigned int src_align = get_pointer_alignment (src);
11329
11330 if (src_align == 0)
11331 return NULL_RTX;
11332
11333 /* If src is categorized for a readonly section we can use
11334 normal __memcpy_chk. */
11335 if (readonly_data_expr (src))
11336 {
11337 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11338 if (!fn)
11339 return NULL_RTX;
11340 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11341 dest, src, len, size);
11342 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11343 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11344 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11345 }
11346 }
11347 return NULL_RTX;
11348 }
11349 }
11350
11351 /* Emit warning if a buffer overflow is detected at compile time. */
11352
11353 static void
11354 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11355 {
11356 int is_strlen = 0;
11357 tree len, size;
11358 location_t loc = tree_nonartificial_location (exp);
11359
11360 switch (fcode)
11361 {
11362 case BUILT_IN_STRCPY_CHK:
11363 case BUILT_IN_STPCPY_CHK:
11364 /* For __strcat_chk the warning will be emitted only if overflowing
11365 by at least strlen (dest) + 1 bytes. */
11366 case BUILT_IN_STRCAT_CHK:
11367 len = CALL_EXPR_ARG (exp, 1);
11368 size = CALL_EXPR_ARG (exp, 2);
11369 is_strlen = 1;
11370 break;
11371 case BUILT_IN_STRNCAT_CHK:
11372 case BUILT_IN_STRNCPY_CHK:
11373 case BUILT_IN_STPNCPY_CHK:
11374 len = CALL_EXPR_ARG (exp, 2);
11375 size = CALL_EXPR_ARG (exp, 3);
11376 break;
11377 case BUILT_IN_SNPRINTF_CHK:
11378 case BUILT_IN_VSNPRINTF_CHK:
11379 len = CALL_EXPR_ARG (exp, 1);
11380 size = CALL_EXPR_ARG (exp, 3);
11381 break;
11382 default:
11383 gcc_unreachable ();
11384 }
11385
11386 if (!len || !size)
11387 return;
11388
11389 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11390 return;
11391
11392 if (is_strlen)
11393 {
11394 len = c_strlen (len, 1);
11395 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11396 return;
11397 }
11398 else if (fcode == BUILT_IN_STRNCAT_CHK)
11399 {
11400 tree src = CALL_EXPR_ARG (exp, 1);
11401 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11402 return;
11403 src = c_strlen (src, 1);
11404 if (! src || ! tree_fits_uhwi_p (src))
11405 {
11406 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11407 exp, get_callee_fndecl (exp));
11408 return;
11409 }
11410 else if (tree_int_cst_lt (src, size))
11411 return;
11412 }
11413 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11414 return;
11415
11416 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11417 exp, get_callee_fndecl (exp));
11418 }
11419
11420 /* Emit warning if a buffer overflow is detected at compile time
11421 in __sprintf_chk/__vsprintf_chk calls. */
11422
11423 static void
11424 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11425 {
11426 tree size, len, fmt;
11427 const char *fmt_str;
11428 int nargs = call_expr_nargs (exp);
11429
11430 /* Verify the required arguments in the original call. */
11431
11432 if (nargs < 4)
11433 return;
11434 size = CALL_EXPR_ARG (exp, 2);
11435 fmt = CALL_EXPR_ARG (exp, 3);
11436
11437 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11438 return;
11439
11440 /* Check whether the format is a literal string constant. */
11441 fmt_str = c_getstr (fmt);
11442 if (fmt_str == NULL)
11443 return;
11444
11445 if (!init_target_chars ())
11446 return;
11447
11448 /* If the format doesn't contain % args or %%, we know its size. */
11449 if (strchr (fmt_str, target_percent) == 0)
11450 len = build_int_cstu (size_type_node, strlen (fmt_str));
11451 /* If the format is "%s" and first ... argument is a string literal,
11452 we know it too. */
11453 else if (fcode == BUILT_IN_SPRINTF_CHK
11454 && strcmp (fmt_str, target_percent_s) == 0)
11455 {
11456 tree arg;
11457
11458 if (nargs < 5)
11459 return;
11460 arg = CALL_EXPR_ARG (exp, 4);
11461 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11462 return;
11463
11464 len = c_strlen (arg, 1);
11465 if (!len || ! tree_fits_uhwi_p (len))
11466 return;
11467 }
11468 else
11469 return;
11470
11471 if (! tree_int_cst_lt (len, size))
11472 warning_at (tree_nonartificial_location (exp),
11473 0, "%Kcall to %D will always overflow destination buffer",
11474 exp, get_callee_fndecl (exp));
11475 }
11476
11477 /* Emit warning if a free is called with address of a variable. */
11478
11479 static void
11480 maybe_emit_free_warning (tree exp)
11481 {
11482 tree arg = CALL_EXPR_ARG (exp, 0);
11483
11484 STRIP_NOPS (arg);
11485 if (TREE_CODE (arg) != ADDR_EXPR)
11486 return;
11487
11488 arg = get_base_address (TREE_OPERAND (arg, 0));
11489 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11490 return;
11491
11492 if (SSA_VAR_P (arg))
11493 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11494 "%Kattempt to free a non-heap object %qD", exp, arg);
11495 else
11496 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11497 "%Kattempt to free a non-heap object", exp);
11498 }
11499
11500 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11501 if possible. */
11502
11503 static tree
11504 fold_builtin_object_size (tree ptr, tree ost)
11505 {
11506 unsigned HOST_WIDE_INT bytes;
11507 int object_size_type;
11508
11509 if (!validate_arg (ptr, POINTER_TYPE)
11510 || !validate_arg (ost, INTEGER_TYPE))
11511 return NULL_TREE;
11512
11513 STRIP_NOPS (ost);
11514
11515 if (TREE_CODE (ost) != INTEGER_CST
11516 || tree_int_cst_sgn (ost) < 0
11517 || compare_tree_int (ost, 3) > 0)
11518 return NULL_TREE;
11519
11520 object_size_type = tree_to_shwi (ost);
11521
11522 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11523 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11524 and (size_t) 0 for types 2 and 3. */
11525 if (TREE_SIDE_EFFECTS (ptr))
11526 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11527
11528 if (TREE_CODE (ptr) == ADDR_EXPR)
11529 {
11530 bytes = compute_builtin_object_size (ptr, object_size_type);
11531 if (wi::fits_to_tree_p (bytes, size_type_node))
11532 return build_int_cstu (size_type_node, bytes);
11533 }
11534 else if (TREE_CODE (ptr) == SSA_NAME)
11535 {
11536 /* If object size is not known yet, delay folding until
11537 later. Maybe subsequent passes will help determining
11538 it. */
11539 bytes = compute_builtin_object_size (ptr, object_size_type);
11540 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11541 && wi::fits_to_tree_p (bytes, size_type_node))
11542 return build_int_cstu (size_type_node, bytes);
11543 }
11544
11545 return NULL_TREE;
11546 }
11547
11548 /* Builtins with folding operations that operate on "..." arguments
11549 need special handling; we need to store the arguments in a convenient
11550 data structure before attempting any folding. Fortunately there are
11551 only a few builtins that fall into this category. FNDECL is the
11552 function, EXP is the CALL_EXPR for the call. */
11553
11554 static tree
11555 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11556 {
11557 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11558 tree ret = NULL_TREE;
11559
11560 switch (fcode)
11561 {
11562 case BUILT_IN_FPCLASSIFY:
11563 ret = fold_builtin_fpclassify (loc, args, nargs);
11564 break;
11565
11566 default:
11567 break;
11568 }
11569 if (ret)
11570 {
11571 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11572 SET_EXPR_LOCATION (ret, loc);
11573 TREE_NO_WARNING (ret) = 1;
11574 return ret;
11575 }
11576 return NULL_TREE;
11577 }
11578
11579 /* Initialize format string characters in the target charset. */
11580
11581 bool
11582 init_target_chars (void)
11583 {
11584 static bool init;
11585 if (!init)
11586 {
11587 target_newline = lang_hooks.to_target_charset ('\n');
11588 target_percent = lang_hooks.to_target_charset ('%');
11589 target_c = lang_hooks.to_target_charset ('c');
11590 target_s = lang_hooks.to_target_charset ('s');
11591 if (target_newline == 0 || target_percent == 0 || target_c == 0
11592 || target_s == 0)
11593 return false;
11594
11595 target_percent_c[0] = target_percent;
11596 target_percent_c[1] = target_c;
11597 target_percent_c[2] = '\0';
11598
11599 target_percent_s[0] = target_percent;
11600 target_percent_s[1] = target_s;
11601 target_percent_s[2] = '\0';
11602
11603 target_percent_s_newline[0] = target_percent;
11604 target_percent_s_newline[1] = target_s;
11605 target_percent_s_newline[2] = target_newline;
11606 target_percent_s_newline[3] = '\0';
11607
11608 init = true;
11609 }
11610 return true;
11611 }
11612
11613 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11614 and no overflow/underflow occurred. INEXACT is true if M was not
11615 exactly calculated. TYPE is the tree type for the result. This
11616 function assumes that you cleared the MPFR flags and then
11617 calculated M to see if anything subsequently set a flag prior to
11618 entering this function. Return NULL_TREE if any checks fail. */
11619
11620 static tree
11621 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11622 {
11623 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11624 overflow/underflow occurred. If -frounding-math, proceed iff the
11625 result of calling FUNC was exact. */
11626 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11627 && (!flag_rounding_math || !inexact))
11628 {
11629 REAL_VALUE_TYPE rr;
11630
11631 real_from_mpfr (&rr, m, type, GMP_RNDN);
11632 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11633 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11634 but the mpft_t is not, then we underflowed in the
11635 conversion. */
11636 if (real_isfinite (&rr)
11637 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11638 {
11639 REAL_VALUE_TYPE rmode;
11640
11641 real_convert (&rmode, TYPE_MODE (type), &rr);
11642 /* Proceed iff the specified mode can hold the value. */
11643 if (real_identical (&rmode, &rr))
11644 return build_real (type, rmode);
11645 }
11646 }
11647 return NULL_TREE;
11648 }
11649
11650 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11651 number and no overflow/underflow occurred. INEXACT is true if M
11652 was not exactly calculated. TYPE is the tree type for the result.
11653 This function assumes that you cleared the MPFR flags and then
11654 calculated M to see if anything subsequently set a flag prior to
11655 entering this function. Return NULL_TREE if any checks fail, if
11656 FORCE_CONVERT is true, then bypass the checks. */
11657
11658 static tree
11659 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11660 {
11661 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11662 overflow/underflow occurred. If -frounding-math, proceed iff the
11663 result of calling FUNC was exact. */
11664 if (force_convert
11665 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11666 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11667 && (!flag_rounding_math || !inexact)))
11668 {
11669 REAL_VALUE_TYPE re, im;
11670
11671 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11672 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11673 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11674 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11675 but the mpft_t is not, then we underflowed in the
11676 conversion. */
11677 if (force_convert
11678 || (real_isfinite (&re) && real_isfinite (&im)
11679 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11680 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11681 {
11682 REAL_VALUE_TYPE re_mode, im_mode;
11683
11684 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11685 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11686 /* Proceed iff the specified mode can hold the value. */
11687 if (force_convert
11688 || (real_identical (&re_mode, &re)
11689 && real_identical (&im_mode, &im)))
11690 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11691 build_real (TREE_TYPE (type), im_mode));
11692 }
11693 }
11694 return NULL_TREE;
11695 }
11696
11697 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11698 FUNC on it and return the resulting value as a tree with type TYPE.
11699 If MIN and/or MAX are not NULL, then the supplied ARG must be
11700 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11701 acceptable values, otherwise they are not. The mpfr precision is
11702 set to the precision of TYPE. We assume that function FUNC returns
11703 zero if the result could be calculated exactly within the requested
11704 precision. */
11705
11706 static tree
11707 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11708 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11709 bool inclusive)
11710 {
11711 tree result = NULL_TREE;
11712
11713 STRIP_NOPS (arg);
11714
11715 /* To proceed, MPFR must exactly represent the target floating point
11716 format, which only happens when the target base equals two. */
11717 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11718 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
11719 {
11720 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11721
11722 if (real_isfinite (ra)
11723 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11724 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
11725 {
11726 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11727 const int prec = fmt->p;
11728 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11729 int inexact;
11730 mpfr_t m;
11731
11732 mpfr_init2 (m, prec);
11733 mpfr_from_real (m, ra, GMP_RNDN);
11734 mpfr_clear_flags ();
11735 inexact = func (m, m, rnd);
11736 result = do_mpfr_ckconv (m, type, inexact);
11737 mpfr_clear (m);
11738 }
11739 }
11740
11741 return result;
11742 }
11743
11744 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11745 FUNC on it and return the resulting value as a tree with type TYPE.
11746 The mpfr precision is set to the precision of TYPE. We assume that
11747 function FUNC returns zero if the result could be calculated
11748 exactly within the requested precision. */
11749
11750 static tree
11751 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11752 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11753 {
11754 tree result = NULL_TREE;
11755
11756 STRIP_NOPS (arg1);
11757 STRIP_NOPS (arg2);
11758
11759 /* To proceed, MPFR must exactly represent the target floating point
11760 format, which only happens when the target base equals two. */
11761 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11762 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11763 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11764 {
11765 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11766 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11767
11768 if (real_isfinite (ra1) && real_isfinite (ra2))
11769 {
11770 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11771 const int prec = fmt->p;
11772 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11773 int inexact;
11774 mpfr_t m1, m2;
11775
11776 mpfr_inits2 (prec, m1, m2, NULL);
11777 mpfr_from_real (m1, ra1, GMP_RNDN);
11778 mpfr_from_real (m2, ra2, GMP_RNDN);
11779 mpfr_clear_flags ();
11780 inexact = func (m1, m1, m2, rnd);
11781 result = do_mpfr_ckconv (m1, type, inexact);
11782 mpfr_clears (m1, m2, NULL);
11783 }
11784 }
11785
11786 return result;
11787 }
11788
11789 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11790 FUNC on it and return the resulting value as a tree with type TYPE.
11791 The mpfr precision is set to the precision of TYPE. We assume that
11792 function FUNC returns zero if the result could be calculated
11793 exactly within the requested precision. */
11794
11795 static tree
11796 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11797 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11798 {
11799 tree result = NULL_TREE;
11800
11801 STRIP_NOPS (arg1);
11802 STRIP_NOPS (arg2);
11803 STRIP_NOPS (arg3);
11804
11805 /* To proceed, MPFR must exactly represent the target floating point
11806 format, which only happens when the target base equals two. */
11807 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11808 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11809 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11810 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
11811 {
11812 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11813 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11814 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11815
11816 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
11817 {
11818 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11819 const int prec = fmt->p;
11820 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11821 int inexact;
11822 mpfr_t m1, m2, m3;
11823
11824 mpfr_inits2 (prec, m1, m2, m3, NULL);
11825 mpfr_from_real (m1, ra1, GMP_RNDN);
11826 mpfr_from_real (m2, ra2, GMP_RNDN);
11827 mpfr_from_real (m3, ra3, GMP_RNDN);
11828 mpfr_clear_flags ();
11829 inexact = func (m1, m1, m2, m3, rnd);
11830 result = do_mpfr_ckconv (m1, type, inexact);
11831 mpfr_clears (m1, m2, m3, NULL);
11832 }
11833 }
11834
11835 return result;
11836 }
11837
11838 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11839 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11840 If ARG_SINP and ARG_COSP are NULL then the result is returned
11841 as a complex value.
11842 The type is taken from the type of ARG and is used for setting the
11843 precision of the calculation and results. */
11844
11845 static tree
11846 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11847 {
11848 tree const type = TREE_TYPE (arg);
11849 tree result = NULL_TREE;
11850
11851 STRIP_NOPS (arg);
11852
11853 /* To proceed, MPFR must exactly represent the target floating point
11854 format, which only happens when the target base equals two. */
11855 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11856 && TREE_CODE (arg) == REAL_CST
11857 && !TREE_OVERFLOW (arg))
11858 {
11859 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11860
11861 if (real_isfinite (ra))
11862 {
11863 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11864 const int prec = fmt->p;
11865 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11866 tree result_s, result_c;
11867 int inexact;
11868 mpfr_t m, ms, mc;
11869
11870 mpfr_inits2 (prec, m, ms, mc, NULL);
11871 mpfr_from_real (m, ra, GMP_RNDN);
11872 mpfr_clear_flags ();
11873 inexact = mpfr_sin_cos (ms, mc, m, rnd);
11874 result_s = do_mpfr_ckconv (ms, type, inexact);
11875 result_c = do_mpfr_ckconv (mc, type, inexact);
11876 mpfr_clears (m, ms, mc, NULL);
11877 if (result_s && result_c)
11878 {
11879 /* If we are to return in a complex value do so. */
11880 if (!arg_sinp && !arg_cosp)
11881 return build_complex (build_complex_type (type),
11882 result_c, result_s);
11883
11884 /* Dereference the sin/cos pointer arguments. */
11885 arg_sinp = build_fold_indirect_ref (arg_sinp);
11886 arg_cosp = build_fold_indirect_ref (arg_cosp);
11887 /* Proceed if valid pointer type were passed in. */
11888 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
11889 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
11890 {
11891 /* Set the values. */
11892 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
11893 result_s);
11894 TREE_SIDE_EFFECTS (result_s) = 1;
11895 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
11896 result_c);
11897 TREE_SIDE_EFFECTS (result_c) = 1;
11898 /* Combine the assignments into a compound expr. */
11899 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11900 result_s, result_c));
11901 }
11902 }
11903 }
11904 }
11905 return result;
11906 }
11907
11908 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
11909 two-argument mpfr order N Bessel function FUNC on them and return
11910 the resulting value as a tree with type TYPE. The mpfr precision
11911 is set to the precision of TYPE. We assume that function FUNC
11912 returns zero if the result could be calculated exactly within the
11913 requested precision. */
11914 static tree
11915 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
11916 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
11917 const REAL_VALUE_TYPE *min, bool inclusive)
11918 {
11919 tree result = NULL_TREE;
11920
11921 STRIP_NOPS (arg1);
11922 STRIP_NOPS (arg2);
11923
11924 /* To proceed, MPFR must exactly represent the target floating point
11925 format, which only happens when the target base equals two. */
11926 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11927 && tree_fits_shwi_p (arg1)
11928 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11929 {
11930 const HOST_WIDE_INT n = tree_to_shwi (arg1);
11931 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
11932
11933 if (n == (long)n
11934 && real_isfinite (ra)
11935 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
11936 {
11937 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11938 const int prec = fmt->p;
11939 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11940 int inexact;
11941 mpfr_t m;
11942
11943 mpfr_init2 (m, prec);
11944 mpfr_from_real (m, ra, GMP_RNDN);
11945 mpfr_clear_flags ();
11946 inexact = func (m, n, m, rnd);
11947 result = do_mpfr_ckconv (m, type, inexact);
11948 mpfr_clear (m);
11949 }
11950 }
11951
11952 return result;
11953 }
11954
11955 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11956 the pointer *(ARG_QUO) and return the result. The type is taken
11957 from the type of ARG0 and is used for setting the precision of the
11958 calculation and results. */
11959
11960 static tree
11961 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11962 {
11963 tree const type = TREE_TYPE (arg0);
11964 tree result = NULL_TREE;
11965
11966 STRIP_NOPS (arg0);
11967 STRIP_NOPS (arg1);
11968
11969 /* To proceed, MPFR must exactly represent the target floating point
11970 format, which only happens when the target base equals two. */
11971 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11972 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
11973 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
11974 {
11975 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
11976 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
11977
11978 if (real_isfinite (ra0) && real_isfinite (ra1))
11979 {
11980 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11981 const int prec = fmt->p;
11982 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11983 tree result_rem;
11984 long integer_quo;
11985 mpfr_t m0, m1;
11986
11987 mpfr_inits2 (prec, m0, m1, NULL);
11988 mpfr_from_real (m0, ra0, GMP_RNDN);
11989 mpfr_from_real (m1, ra1, GMP_RNDN);
11990 mpfr_clear_flags ();
11991 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
11992 /* Remquo is independent of the rounding mode, so pass
11993 inexact=0 to do_mpfr_ckconv(). */
11994 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
11995 mpfr_clears (m0, m1, NULL);
11996 if (result_rem)
11997 {
11998 /* MPFR calculates quo in the host's long so it may
11999 return more bits in quo than the target int can hold
12000 if sizeof(host long) > sizeof(target int). This can
12001 happen even for native compilers in LP64 mode. In
12002 these cases, modulo the quo value with the largest
12003 number that the target int can hold while leaving one
12004 bit for the sign. */
12005 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12006 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12007
12008 /* Dereference the quo pointer argument. */
12009 arg_quo = build_fold_indirect_ref (arg_quo);
12010 /* Proceed iff a valid pointer type was passed in. */
12011 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12012 {
12013 /* Set the value. */
12014 tree result_quo
12015 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12016 build_int_cst (TREE_TYPE (arg_quo),
12017 integer_quo));
12018 TREE_SIDE_EFFECTS (result_quo) = 1;
12019 /* Combine the quo assignment with the rem. */
12020 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12021 result_quo, result_rem));
12022 }
12023 }
12024 }
12025 }
12026 return result;
12027 }
12028
12029 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12030 resulting value as a tree with type TYPE. The mpfr precision is
12031 set to the precision of TYPE. We assume that this mpfr function
12032 returns zero if the result could be calculated exactly within the
12033 requested precision. In addition, the integer pointer represented
12034 by ARG_SG will be dereferenced and set to the appropriate signgam
12035 (-1,1) value. */
12036
12037 static tree
12038 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12039 {
12040 tree result = NULL_TREE;
12041
12042 STRIP_NOPS (arg);
12043
12044 /* To proceed, MPFR must exactly represent the target floating point
12045 format, which only happens when the target base equals two. Also
12046 verify ARG is a constant and that ARG_SG is an int pointer. */
12047 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12048 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12049 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12050 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12051 {
12052 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12053
12054 /* In addition to NaN and Inf, the argument cannot be zero or a
12055 negative integer. */
12056 if (real_isfinite (ra)
12057 && ra->cl != rvc_zero
12058 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12059 {
12060 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12061 const int prec = fmt->p;
12062 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12063 int inexact, sg;
12064 mpfr_t m;
12065 tree result_lg;
12066
12067 mpfr_init2 (m, prec);
12068 mpfr_from_real (m, ra, GMP_RNDN);
12069 mpfr_clear_flags ();
12070 inexact = mpfr_lgamma (m, &sg, m, rnd);
12071 result_lg = do_mpfr_ckconv (m, type, inexact);
12072 mpfr_clear (m);
12073 if (result_lg)
12074 {
12075 tree result_sg;
12076
12077 /* Dereference the arg_sg pointer argument. */
12078 arg_sg = build_fold_indirect_ref (arg_sg);
12079 /* Assign the signgam value into *arg_sg. */
12080 result_sg = fold_build2 (MODIFY_EXPR,
12081 TREE_TYPE (arg_sg), arg_sg,
12082 build_int_cst (TREE_TYPE (arg_sg), sg));
12083 TREE_SIDE_EFFECTS (result_sg) = 1;
12084 /* Combine the signgam assignment with the lgamma result. */
12085 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12086 result_sg, result_lg));
12087 }
12088 }
12089 }
12090
12091 return result;
12092 }
12093
12094 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12095 function FUNC on it and return the resulting value as a tree with
12096 type TYPE. The mpfr precision is set to the precision of TYPE. We
12097 assume that function FUNC returns zero if the result could be
12098 calculated exactly within the requested precision. */
12099
12100 static tree
12101 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12102 {
12103 tree result = NULL_TREE;
12104
12105 STRIP_NOPS (arg);
12106
12107 /* To proceed, MPFR must exactly represent the target floating point
12108 format, which only happens when the target base equals two. */
12109 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12110 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12111 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12112 {
12113 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12114 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12115
12116 if (real_isfinite (re) && real_isfinite (im))
12117 {
12118 const struct real_format *const fmt =
12119 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12120 const int prec = fmt->p;
12121 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12122 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12123 int inexact;
12124 mpc_t m;
12125
12126 mpc_init2 (m, prec);
12127 mpfr_from_real (mpc_realref (m), re, rnd);
12128 mpfr_from_real (mpc_imagref (m), im, rnd);
12129 mpfr_clear_flags ();
12130 inexact = func (m, m, crnd);
12131 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12132 mpc_clear (m);
12133 }
12134 }
12135
12136 return result;
12137 }
12138
12139 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12140 mpc function FUNC on it and return the resulting value as a tree
12141 with type TYPE. The mpfr precision is set to the precision of
12142 TYPE. We assume that function FUNC returns zero if the result
12143 could be calculated exactly within the requested precision. If
12144 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12145 in the arguments and/or results. */
12146
12147 tree
12148 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12149 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12150 {
12151 tree result = NULL_TREE;
12152
12153 STRIP_NOPS (arg0);
12154 STRIP_NOPS (arg1);
12155
12156 /* To proceed, MPFR must exactly represent the target floating point
12157 format, which only happens when the target base equals two. */
12158 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12159 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12160 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12161 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12162 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12163 {
12164 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12165 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12166 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12167 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12168
12169 if (do_nonfinite
12170 || (real_isfinite (re0) && real_isfinite (im0)
12171 && real_isfinite (re1) && real_isfinite (im1)))
12172 {
12173 const struct real_format *const fmt =
12174 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12175 const int prec = fmt->p;
12176 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12177 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12178 int inexact;
12179 mpc_t m0, m1;
12180
12181 mpc_init2 (m0, prec);
12182 mpc_init2 (m1, prec);
12183 mpfr_from_real (mpc_realref (m0), re0, rnd);
12184 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12185 mpfr_from_real (mpc_realref (m1), re1, rnd);
12186 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12187 mpfr_clear_flags ();
12188 inexact = func (m0, m0, m1, crnd);
12189 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12190 mpc_clear (m0);
12191 mpc_clear (m1);
12192 }
12193 }
12194
12195 return result;
12196 }
12197
12198 /* A wrapper function for builtin folding that prevents warnings for
12199 "statement without effect" and the like, caused by removing the
12200 call node earlier than the warning is generated. */
12201
12202 tree
12203 fold_call_stmt (gcall *stmt, bool ignore)
12204 {
12205 tree ret = NULL_TREE;
12206 tree fndecl = gimple_call_fndecl (stmt);
12207 location_t loc = gimple_location (stmt);
12208 if (fndecl
12209 && TREE_CODE (fndecl) == FUNCTION_DECL
12210 && DECL_BUILT_IN (fndecl)
12211 && !gimple_call_va_arg_pack_p (stmt))
12212 {
12213 int nargs = gimple_call_num_args (stmt);
12214 tree *args = (nargs > 0
12215 ? gimple_call_arg_ptr (stmt, 0)
12216 : &error_mark_node);
12217
12218 if (avoid_folding_inline_builtin (fndecl))
12219 return NULL_TREE;
12220 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12221 {
12222 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12223 }
12224 else
12225 {
12226 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12227 if (ret)
12228 {
12229 /* Propagate location information from original call to
12230 expansion of builtin. Otherwise things like
12231 maybe_emit_chk_warning, that operate on the expansion
12232 of a builtin, will use the wrong location information. */
12233 if (gimple_has_location (stmt))
12234 {
12235 tree realret = ret;
12236 if (TREE_CODE (ret) == NOP_EXPR)
12237 realret = TREE_OPERAND (ret, 0);
12238 if (CAN_HAVE_LOCATION_P (realret)
12239 && !EXPR_HAS_LOCATION (realret))
12240 SET_EXPR_LOCATION (realret, loc);
12241 return realret;
12242 }
12243 return ret;
12244 }
12245 }
12246 }
12247 return NULL_TREE;
12248 }
12249
12250 /* Look up the function in builtin_decl that corresponds to DECL
12251 and set ASMSPEC as its user assembler name. DECL must be a
12252 function decl that declares a builtin. */
12253
12254 void
12255 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12256 {
12257 tree builtin;
12258 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12259 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12260 && asmspec != 0);
12261
12262 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12263 set_user_assembler_name (builtin, asmspec);
12264 switch (DECL_FUNCTION_CODE (decl))
12265 {
12266 case BUILT_IN_MEMCPY:
12267 init_block_move_fn (asmspec);
12268 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12269 break;
12270 case BUILT_IN_MEMSET:
12271 init_block_clear_fn (asmspec);
12272 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12273 break;
12274 case BUILT_IN_MEMMOVE:
12275 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12276 break;
12277 case BUILT_IN_MEMCMP:
12278 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12279 break;
12280 case BUILT_IN_ABORT:
12281 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12282 break;
12283 case BUILT_IN_FFS:
12284 if (INT_TYPE_SIZE < BITS_PER_WORD)
12285 {
12286 set_user_assembler_libfunc ("ffs", asmspec);
12287 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12288 MODE_INT, 0), "ffs");
12289 }
12290 break;
12291 default:
12292 break;
12293 }
12294 }
12295
12296 /* Return true if DECL is a builtin that expands to a constant or similarly
12297 simple code. */
12298 bool
12299 is_simple_builtin (tree decl)
12300 {
12301 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12302 switch (DECL_FUNCTION_CODE (decl))
12303 {
12304 /* Builtins that expand to constants. */
12305 case BUILT_IN_CONSTANT_P:
12306 case BUILT_IN_EXPECT:
12307 case BUILT_IN_OBJECT_SIZE:
12308 case BUILT_IN_UNREACHABLE:
12309 /* Simple register moves or loads from stack. */
12310 case BUILT_IN_ASSUME_ALIGNED:
12311 case BUILT_IN_RETURN_ADDRESS:
12312 case BUILT_IN_EXTRACT_RETURN_ADDR:
12313 case BUILT_IN_FROB_RETURN_ADDR:
12314 case BUILT_IN_RETURN:
12315 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12316 case BUILT_IN_FRAME_ADDRESS:
12317 case BUILT_IN_VA_END:
12318 case BUILT_IN_STACK_SAVE:
12319 case BUILT_IN_STACK_RESTORE:
12320 /* Exception state returns or moves registers around. */
12321 case BUILT_IN_EH_FILTER:
12322 case BUILT_IN_EH_POINTER:
12323 case BUILT_IN_EH_COPY_VALUES:
12324 return true;
12325
12326 default:
12327 return false;
12328 }
12329
12330 return false;
12331 }
12332
12333 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12334 most probably expanded inline into reasonably simple code. This is a
12335 superset of is_simple_builtin. */
12336 bool
12337 is_inexpensive_builtin (tree decl)
12338 {
12339 if (!decl)
12340 return false;
12341 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12342 return true;
12343 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12344 switch (DECL_FUNCTION_CODE (decl))
12345 {
12346 case BUILT_IN_ABS:
12347 case BUILT_IN_ALLOCA:
12348 case BUILT_IN_ALLOCA_WITH_ALIGN:
12349 case BUILT_IN_BSWAP16:
12350 case BUILT_IN_BSWAP32:
12351 case BUILT_IN_BSWAP64:
12352 case BUILT_IN_CLZ:
12353 case BUILT_IN_CLZIMAX:
12354 case BUILT_IN_CLZL:
12355 case BUILT_IN_CLZLL:
12356 case BUILT_IN_CTZ:
12357 case BUILT_IN_CTZIMAX:
12358 case BUILT_IN_CTZL:
12359 case BUILT_IN_CTZLL:
12360 case BUILT_IN_FFS:
12361 case BUILT_IN_FFSIMAX:
12362 case BUILT_IN_FFSL:
12363 case BUILT_IN_FFSLL:
12364 case BUILT_IN_IMAXABS:
12365 case BUILT_IN_FINITE:
12366 case BUILT_IN_FINITEF:
12367 case BUILT_IN_FINITEL:
12368 case BUILT_IN_FINITED32:
12369 case BUILT_IN_FINITED64:
12370 case BUILT_IN_FINITED128:
12371 case BUILT_IN_FPCLASSIFY:
12372 case BUILT_IN_ISFINITE:
12373 case BUILT_IN_ISINF_SIGN:
12374 case BUILT_IN_ISINF:
12375 case BUILT_IN_ISINFF:
12376 case BUILT_IN_ISINFL:
12377 case BUILT_IN_ISINFD32:
12378 case BUILT_IN_ISINFD64:
12379 case BUILT_IN_ISINFD128:
12380 case BUILT_IN_ISNAN:
12381 case BUILT_IN_ISNANF:
12382 case BUILT_IN_ISNANL:
12383 case BUILT_IN_ISNAND32:
12384 case BUILT_IN_ISNAND64:
12385 case BUILT_IN_ISNAND128:
12386 case BUILT_IN_ISNORMAL:
12387 case BUILT_IN_ISGREATER:
12388 case BUILT_IN_ISGREATEREQUAL:
12389 case BUILT_IN_ISLESS:
12390 case BUILT_IN_ISLESSEQUAL:
12391 case BUILT_IN_ISLESSGREATER:
12392 case BUILT_IN_ISUNORDERED:
12393 case BUILT_IN_VA_ARG_PACK:
12394 case BUILT_IN_VA_ARG_PACK_LEN:
12395 case BUILT_IN_VA_COPY:
12396 case BUILT_IN_TRAP:
12397 case BUILT_IN_SAVEREGS:
12398 case BUILT_IN_POPCOUNTL:
12399 case BUILT_IN_POPCOUNTLL:
12400 case BUILT_IN_POPCOUNTIMAX:
12401 case BUILT_IN_POPCOUNT:
12402 case BUILT_IN_PARITYL:
12403 case BUILT_IN_PARITYLL:
12404 case BUILT_IN_PARITYIMAX:
12405 case BUILT_IN_PARITY:
12406 case BUILT_IN_LABS:
12407 case BUILT_IN_LLABS:
12408 case BUILT_IN_PREFETCH:
12409 case BUILT_IN_ACC_ON_DEVICE:
12410 return true;
12411
12412 default:
12413 return is_simple_builtin (decl);
12414 }
12415
12416 return false;
12417 }