builtins.c (target_percent_c): Export.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "tree-object-size.h"
32 #include "realmpfr.h"
33 #include "predict.h"
34 #include "vec.h"
35 #include "hashtab.h"
36 #include "hash-set.h"
37 #include "hard-reg-set.h"
38 #include "input.h"
39 #include "function.h"
40 #include "cfgrtl.h"
41 #include "basic-block.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-expr.h"
45 #include "is-a.h"
46 #include "gimple.h"
47 #include "flags.h"
48 #include "regs.h"
49 #include "except.h"
50 #include "insn-config.h"
51 #include "expr.h"
52 #include "insn-codes.h"
53 #include "optabs.h"
54 #include "libfuncs.h"
55 #include "recog.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "tm_p.h"
59 #include "target.h"
60 #include "langhooks.h"
61 #include "tree-ssanames.h"
62 #include "tree-dfa.h"
63 #include "value-prof.h"
64 #include "diagnostic-core.h"
65 #include "builtins.h"
66 #include "asan.h"
67 #include "cilk.h"
68 #include "ipa-ref.h"
69 #include "lto-streamer.h"
70 #include "cgraph.h"
71 #include "tree-chkp.h"
72 #include "rtl-chkp.h"
73
74
75 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
76
77 struct target_builtins default_target_builtins;
78 #if SWITCHABLE_TARGET
79 struct target_builtins *this_target_builtins = &default_target_builtins;
80 #endif
81
82 /* Define the names of the builtin function types and codes. */
83 const char *const built_in_class_names[BUILT_IN_LAST]
84 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85
86 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
87 const char * built_in_names[(int) END_BUILTINS] =
88 {
89 #include "builtins.def"
90 };
91 #undef DEF_BUILTIN
92
93 /* Setup an array of _DECL trees, make sure each element is
94 initialized to NULL_TREE. */
95 builtin_info_type builtin_info;
96
97 /* Non-zero if __builtin_constant_p should be folded right away. */
98 bool force_folding_builtin_constant_p;
99
100 static rtx c_readstr (const char *, machine_mode);
101 static int target_char_cast (tree, char *);
102 static rtx get_memory_rtx (tree, tree);
103 static int apply_args_size (void);
104 static int apply_result_size (void);
105 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
106 static rtx result_vector (int, rtx);
107 #endif
108 static void expand_builtin_update_setjmp_buf (rtx);
109 static void expand_builtin_prefetch (tree);
110 static rtx expand_builtin_apply_args (void);
111 static rtx expand_builtin_apply_args_1 (void);
112 static rtx expand_builtin_apply (rtx, rtx, rtx);
113 static void expand_builtin_return (rtx);
114 static enum type_class type_to_class (tree);
115 static rtx expand_builtin_classify_type (tree);
116 static void expand_errno_check (tree, rtx);
117 static rtx expand_builtin_mathfn (tree, rtx, rtx);
118 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
119 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
120 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
121 static rtx expand_builtin_interclass_mathfn (tree, rtx);
122 static rtx expand_builtin_sincos (tree);
123 static rtx expand_builtin_cexpi (tree, rtx);
124 static rtx expand_builtin_int_roundingfn (tree, rtx);
125 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
126 static rtx expand_builtin_next_arg (void);
127 static rtx expand_builtin_va_start (tree);
128 static rtx expand_builtin_va_end (tree);
129 static rtx expand_builtin_va_copy (tree);
130 static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
131 static rtx expand_builtin_strcmp (tree, rtx);
132 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
133 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
134 static rtx expand_builtin_memcpy (tree, rtx);
135 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
136 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
137 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
138 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
139 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
140 machine_mode, int, tree);
141 static rtx expand_builtin_strcpy (tree, rtx);
142 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
143 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
144 static rtx expand_builtin_strncpy (tree, rtx);
145 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
146 static rtx expand_builtin_memset (tree, rtx, machine_mode);
147 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
148 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
149 static rtx expand_builtin_bzero (tree);
150 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
151 static rtx expand_builtin_alloca (tree, bool);
152 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
153 static rtx expand_builtin_frame_address (tree, tree);
154 static tree stabilize_va_list_loc (location_t, tree, int);
155 static rtx expand_builtin_expect (tree, rtx);
156 static tree fold_builtin_constant_p (tree);
157 static tree fold_builtin_classify_type (tree);
158 static tree fold_builtin_strlen (location_t, tree, tree);
159 static tree fold_builtin_inf (location_t, tree, int);
160 static tree fold_builtin_nan (tree, tree, int);
161 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
162 static bool validate_arg (const_tree, enum tree_code code);
163 static bool integer_valued_real_p (tree);
164 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
165 static rtx expand_builtin_fabs (tree, rtx, rtx);
166 static rtx expand_builtin_signbit (tree, rtx);
167 static tree fold_builtin_sqrt (location_t, tree, tree);
168 static tree fold_builtin_cbrt (location_t, tree, tree);
169 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_cos (location_t, tree, tree, tree);
172 static tree fold_builtin_cosh (location_t, tree, tree, tree);
173 static tree fold_builtin_tan (tree, tree);
174 static tree fold_builtin_trunc (location_t, tree, tree);
175 static tree fold_builtin_floor (location_t, tree, tree);
176 static tree fold_builtin_ceil (location_t, tree, tree);
177 static tree fold_builtin_round (location_t, tree, tree);
178 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
179 static tree fold_builtin_bitop (tree, tree);
180 static tree fold_builtin_strchr (location_t, tree, tree, tree);
181 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
182 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
183 static tree fold_builtin_strcmp (location_t, tree, tree);
184 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
185 static tree fold_builtin_signbit (location_t, tree, tree);
186 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
187 static tree fold_builtin_isascii (location_t, tree);
188 static tree fold_builtin_toascii (location_t, tree);
189 static tree fold_builtin_isdigit (location_t, tree);
190 static tree fold_builtin_fabs (location_t, tree, tree);
191 static tree fold_builtin_abs (location_t, tree, tree);
192 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
193 enum tree_code);
194 static tree fold_builtin_0 (location_t, tree, bool);
195 static tree fold_builtin_1 (location_t, tree, tree, bool);
196 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
197 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
198 static tree fold_builtin_varargs (location_t, tree, tree*, int, bool);
199
200 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
201 static tree fold_builtin_strstr (location_t, tree, tree, tree);
202 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
203 static tree fold_builtin_strncat (location_t, tree, tree, tree);
204 static tree fold_builtin_strspn (location_t, tree, tree);
205 static tree fold_builtin_strcspn (location_t, tree, tree);
206
207 static rtx expand_builtin_object_size (tree);
208 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
209 enum built_in_function);
210 static void maybe_emit_chk_warning (tree, enum built_in_function);
211 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
212 static void maybe_emit_free_warning (tree);
213 static tree fold_builtin_object_size (tree, tree);
214 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
215
216 static unsigned HOST_WIDE_INT target_newline;
217 unsigned HOST_WIDE_INT target_percent;
218 static unsigned HOST_WIDE_INT target_c;
219 static unsigned HOST_WIDE_INT target_s;
220 char target_percent_c[3];
221 char target_percent_s[3];
222 static char target_percent_s_newline[4];
223 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_arg2 (tree, tree, tree,
226 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
227 static tree do_mpfr_arg3 (tree, tree, tree, tree,
228 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
229 static tree do_mpfr_sincos (tree, tree, tree);
230 static tree do_mpfr_bessel_n (tree, tree, tree,
231 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
232 const REAL_VALUE_TYPE *, bool);
233 static tree do_mpfr_remquo (tree, tree, tree);
234 static tree do_mpfr_lgamma_r (tree, tree, tree);
235 static void expand_builtin_sync_synchronize (void);
236
237 /* Return true if NAME starts with __builtin_ or __sync_. */
238
239 static bool
240 is_builtin_name (const char *name)
241 {
242 if (strncmp (name, "__builtin_", 10) == 0)
243 return true;
244 if (strncmp (name, "__sync_", 7) == 0)
245 return true;
246 if (strncmp (name, "__atomic_", 9) == 0)
247 return true;
248 if (flag_cilkplus
249 && (!strcmp (name, "__cilkrts_detach")
250 || !strcmp (name, "__cilkrts_pop_frame")))
251 return true;
252 return false;
253 }
254
255
256 /* Return true if DECL is a function symbol representing a built-in. */
257
258 bool
259 is_builtin_fn (tree decl)
260 {
261 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
262 }
263
264 /* Return true if NODE should be considered for inline expansion regardless
265 of the optimization level. This means whenever a function is invoked with
266 its "internal" name, which normally contains the prefix "__builtin". */
267
268 static bool
269 called_as_built_in (tree node)
270 {
271 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
272 we want the name used to call the function, not the name it
273 will have. */
274 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
275 return is_builtin_name (name);
276 }
277
278 /* Compute values M and N such that M divides (address of EXP - N) and such
279 that N < M. If these numbers can be determined, store M in alignp and N in
280 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
281 *alignp and any bit-offset to *bitposp.
282
283 Note that the address (and thus the alignment) computed here is based
284 on the address to which a symbol resolves, whereas DECL_ALIGN is based
285 on the address at which an object is actually located. These two
286 addresses are not always the same. For example, on ARM targets,
287 the address &foo of a Thumb function foo() has the lowest bit set,
288 whereas foo() itself starts on an even address.
289
290 If ADDR_P is true we are taking the address of the memory reference EXP
291 and thus cannot rely on the access taking place. */
292
293 static bool
294 get_object_alignment_2 (tree exp, unsigned int *alignp,
295 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
296 {
297 HOST_WIDE_INT bitsize, bitpos;
298 tree offset;
299 machine_mode mode;
300 int unsignedp, volatilep;
301 unsigned int align = BITS_PER_UNIT;
302 bool known_alignment = false;
303
304 /* Get the innermost object and the constant (bitpos) and possibly
305 variable (offset) offset of the access. */
306 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
307 &mode, &unsignedp, &volatilep, true);
308
309 /* Extract alignment information from the innermost object and
310 possibly adjust bitpos and offset. */
311 if (TREE_CODE (exp) == FUNCTION_DECL)
312 {
313 /* Function addresses can encode extra information besides their
314 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
315 allows the low bit to be used as a virtual bit, we know
316 that the address itself must be at least 2-byte aligned. */
317 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
318 align = 2 * BITS_PER_UNIT;
319 }
320 else if (TREE_CODE (exp) == LABEL_DECL)
321 ;
322 else if (TREE_CODE (exp) == CONST_DECL)
323 {
324 /* The alignment of a CONST_DECL is determined by its initializer. */
325 exp = DECL_INITIAL (exp);
326 align = TYPE_ALIGN (TREE_TYPE (exp));
327 #ifdef CONSTANT_ALIGNMENT
328 if (CONSTANT_CLASS_P (exp))
329 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
330 #endif
331 known_alignment = true;
332 }
333 else if (DECL_P (exp))
334 {
335 align = DECL_ALIGN (exp);
336 known_alignment = true;
337 }
338 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
339 {
340 align = TYPE_ALIGN (TREE_TYPE (exp));
341 }
342 else if (TREE_CODE (exp) == INDIRECT_REF
343 || TREE_CODE (exp) == MEM_REF
344 || TREE_CODE (exp) == TARGET_MEM_REF)
345 {
346 tree addr = TREE_OPERAND (exp, 0);
347 unsigned ptr_align;
348 unsigned HOST_WIDE_INT ptr_bitpos;
349
350 if (TREE_CODE (addr) == BIT_AND_EXPR
351 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
352 {
353 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
354 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
355 align *= BITS_PER_UNIT;
356 addr = TREE_OPERAND (addr, 0);
357 }
358
359 known_alignment
360 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
361 align = MAX (ptr_align, align);
362
363 /* The alignment of the pointer operand in a TARGET_MEM_REF
364 has to take the variable offset parts into account. */
365 if (TREE_CODE (exp) == TARGET_MEM_REF)
366 {
367 if (TMR_INDEX (exp))
368 {
369 unsigned HOST_WIDE_INT step = 1;
370 if (TMR_STEP (exp))
371 step = TREE_INT_CST_LOW (TMR_STEP (exp));
372 align = MIN (align, (step & -step) * BITS_PER_UNIT);
373 }
374 if (TMR_INDEX2 (exp))
375 align = BITS_PER_UNIT;
376 known_alignment = false;
377 }
378
379 /* When EXP is an actual memory reference then we can use
380 TYPE_ALIGN of a pointer indirection to derive alignment.
381 Do so only if get_pointer_alignment_1 did not reveal absolute
382 alignment knowledge and if using that alignment would
383 improve the situation. */
384 if (!addr_p && !known_alignment
385 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
386 align = TYPE_ALIGN (TREE_TYPE (exp));
387 else
388 {
389 /* Else adjust bitpos accordingly. */
390 bitpos += ptr_bitpos;
391 if (TREE_CODE (exp) == MEM_REF
392 || TREE_CODE (exp) == TARGET_MEM_REF)
393 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
394 }
395 }
396 else if (TREE_CODE (exp) == STRING_CST)
397 {
398 /* STRING_CST are the only constant objects we allow to be not
399 wrapped inside a CONST_DECL. */
400 align = TYPE_ALIGN (TREE_TYPE (exp));
401 #ifdef CONSTANT_ALIGNMENT
402 if (CONSTANT_CLASS_P (exp))
403 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
404 #endif
405 known_alignment = true;
406 }
407
408 /* If there is a non-constant offset part extract the maximum
409 alignment that can prevail. */
410 if (offset)
411 {
412 unsigned int trailing_zeros = tree_ctz (offset);
413 if (trailing_zeros < HOST_BITS_PER_INT)
414 {
415 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
416 if (inner)
417 align = MIN (align, inner);
418 }
419 }
420
421 *alignp = align;
422 *bitposp = bitpos & (*alignp - 1);
423 return known_alignment;
424 }
425
426 /* For a memory reference expression EXP compute values M and N such that M
427 divides (&EXP - N) and such that N < M. If these numbers can be determined,
428 store M in alignp and N in *BITPOSP and return true. Otherwise return false
429 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
430
431 bool
432 get_object_alignment_1 (tree exp, unsigned int *alignp,
433 unsigned HOST_WIDE_INT *bitposp)
434 {
435 return get_object_alignment_2 (exp, alignp, bitposp, false);
436 }
437
438 /* Return the alignment in bits of EXP, an object. */
439
440 unsigned int
441 get_object_alignment (tree exp)
442 {
443 unsigned HOST_WIDE_INT bitpos = 0;
444 unsigned int align;
445
446 get_object_alignment_1 (exp, &align, &bitpos);
447
448 /* align and bitpos now specify known low bits of the pointer.
449 ptr & (align - 1) == bitpos. */
450
451 if (bitpos != 0)
452 align = (bitpos & -bitpos);
453 return align;
454 }
455
456 /* For a pointer valued expression EXP compute values M and N such that M
457 divides (EXP - N) and such that N < M. If these numbers can be determined,
458 store M in alignp and N in *BITPOSP and return true. Return false if
459 the results are just a conservative approximation.
460
461 If EXP is not a pointer, false is returned too. */
462
463 bool
464 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
465 unsigned HOST_WIDE_INT *bitposp)
466 {
467 STRIP_NOPS (exp);
468
469 if (TREE_CODE (exp) == ADDR_EXPR)
470 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
471 alignp, bitposp, true);
472 else if (TREE_CODE (exp) == SSA_NAME
473 && POINTER_TYPE_P (TREE_TYPE (exp)))
474 {
475 unsigned int ptr_align, ptr_misalign;
476 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
477
478 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
479 {
480 *bitposp = ptr_misalign * BITS_PER_UNIT;
481 *alignp = ptr_align * BITS_PER_UNIT;
482 /* We cannot really tell whether this result is an approximation. */
483 return true;
484 }
485 else
486 {
487 *bitposp = 0;
488 *alignp = BITS_PER_UNIT;
489 return false;
490 }
491 }
492 else if (TREE_CODE (exp) == INTEGER_CST)
493 {
494 *alignp = BIGGEST_ALIGNMENT;
495 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
496 & (BIGGEST_ALIGNMENT - 1));
497 return true;
498 }
499
500 *bitposp = 0;
501 *alignp = BITS_PER_UNIT;
502 return false;
503 }
504
505 /* Return the alignment in bits of EXP, a pointer valued expression.
506 The alignment returned is, by default, the alignment of the thing that
507 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
508
509 Otherwise, look at the expression to see if we can do better, i.e., if the
510 expression is actually pointing at an object whose alignment is tighter. */
511
512 unsigned int
513 get_pointer_alignment (tree exp)
514 {
515 unsigned HOST_WIDE_INT bitpos = 0;
516 unsigned int align;
517
518 get_pointer_alignment_1 (exp, &align, &bitpos);
519
520 /* align and bitpos now specify known low bits of the pointer.
521 ptr & (align - 1) == bitpos. */
522
523 if (bitpos != 0)
524 align = (bitpos & -bitpos);
525
526 return align;
527 }
528
529 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
530 way, because it could contain a zero byte in the middle.
531 TREE_STRING_LENGTH is the size of the character array, not the string.
532
533 ONLY_VALUE should be nonzero if the result is not going to be emitted
534 into the instruction stream and zero if it is going to be expanded.
535 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
536 is returned, otherwise NULL, since
537 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
538 evaluate the side-effects.
539
540 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
541 accesses. Note that this implies the result is not going to be emitted
542 into the instruction stream.
543
544 The value returned is of type `ssizetype'.
545
546 Unfortunately, string_constant can't access the values of const char
547 arrays with initializers, so neither can we do so here. */
548
549 tree
550 c_strlen (tree src, int only_value)
551 {
552 tree offset_node;
553 HOST_WIDE_INT offset;
554 int max;
555 const char *ptr;
556 location_t loc;
557
558 STRIP_NOPS (src);
559 if (TREE_CODE (src) == COND_EXPR
560 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
561 {
562 tree len1, len2;
563
564 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
565 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
566 if (tree_int_cst_equal (len1, len2))
567 return len1;
568 }
569
570 if (TREE_CODE (src) == COMPOUND_EXPR
571 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
572 return c_strlen (TREE_OPERAND (src, 1), only_value);
573
574 loc = EXPR_LOC_OR_LOC (src, input_location);
575
576 src = string_constant (src, &offset_node);
577 if (src == 0)
578 return NULL_TREE;
579
580 max = TREE_STRING_LENGTH (src) - 1;
581 ptr = TREE_STRING_POINTER (src);
582
583 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
584 {
585 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
586 compute the offset to the following null if we don't know where to
587 start searching for it. */
588 int i;
589
590 for (i = 0; i < max; i++)
591 if (ptr[i] == 0)
592 return NULL_TREE;
593
594 /* We don't know the starting offset, but we do know that the string
595 has no internal zero bytes. We can assume that the offset falls
596 within the bounds of the string; otherwise, the programmer deserves
597 what he gets. Subtract the offset from the length of the string,
598 and return that. This would perhaps not be valid if we were dealing
599 with named arrays in addition to literal string constants. */
600
601 return size_diffop_loc (loc, size_int (max), offset_node);
602 }
603
604 /* We have a known offset into the string. Start searching there for
605 a null character if we can represent it as a single HOST_WIDE_INT. */
606 if (offset_node == 0)
607 offset = 0;
608 else if (! tree_fits_shwi_p (offset_node))
609 offset = -1;
610 else
611 offset = tree_to_shwi (offset_node);
612
613 /* If the offset is known to be out of bounds, warn, and call strlen at
614 runtime. */
615 if (offset < 0 || offset > max)
616 {
617 /* Suppress multiple warnings for propagated constant strings. */
618 if (only_value != 2
619 && !TREE_NO_WARNING (src))
620 {
621 warning_at (loc, 0, "offset outside bounds of constant string");
622 TREE_NO_WARNING (src) = 1;
623 }
624 return NULL_TREE;
625 }
626
627 /* Use strlen to search for the first zero byte. Since any strings
628 constructed with build_string will have nulls appended, we win even
629 if we get handed something like (char[4])"abcd".
630
631 Since OFFSET is our starting index into the string, no further
632 calculation is needed. */
633 return ssize_int (strlen (ptr + offset));
634 }
635
636 /* Return a char pointer for a C string if it is a string constant
637 or sum of string constant and integer constant. */
638
639 const char *
640 c_getstr (tree src)
641 {
642 tree offset_node;
643
644 src = string_constant (src, &offset_node);
645 if (src == 0)
646 return 0;
647
648 if (offset_node == 0)
649 return TREE_STRING_POINTER (src);
650 else if (!tree_fits_uhwi_p (offset_node)
651 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
652 return 0;
653
654 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
655 }
656
657 /* Return a constant integer corresponding to target reading
658 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
659
660 static rtx
661 c_readstr (const char *str, machine_mode mode)
662 {
663 HOST_WIDE_INT ch;
664 unsigned int i, j;
665 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
666
667 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
668 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
669 / HOST_BITS_PER_WIDE_INT;
670
671 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
672 for (i = 0; i < len; i++)
673 tmp[i] = 0;
674
675 ch = 1;
676 for (i = 0; i < GET_MODE_SIZE (mode); i++)
677 {
678 j = i;
679 if (WORDS_BIG_ENDIAN)
680 j = GET_MODE_SIZE (mode) - i - 1;
681 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
682 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
683 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
684 j *= BITS_PER_UNIT;
685
686 if (ch)
687 ch = (unsigned char) str[i];
688 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
689 }
690
691 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
692 return immed_wide_int_const (c, mode);
693 }
694
695 /* Cast a target constant CST to target CHAR and if that value fits into
696 host char type, return zero and put that value into variable pointed to by
697 P. */
698
699 static int
700 target_char_cast (tree cst, char *p)
701 {
702 unsigned HOST_WIDE_INT val, hostval;
703
704 if (TREE_CODE (cst) != INTEGER_CST
705 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
706 return 1;
707
708 /* Do not care if it fits or not right here. */
709 val = TREE_INT_CST_LOW (cst);
710
711 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
712 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
713
714 hostval = val;
715 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
716 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
717
718 if (val != hostval)
719 return 1;
720
721 *p = hostval;
722 return 0;
723 }
724
725 /* Similar to save_expr, but assumes that arbitrary code is not executed
726 in between the multiple evaluations. In particular, we assume that a
727 non-addressable local variable will not be modified. */
728
729 static tree
730 builtin_save_expr (tree exp)
731 {
732 if (TREE_CODE (exp) == SSA_NAME
733 || (TREE_ADDRESSABLE (exp) == 0
734 && (TREE_CODE (exp) == PARM_DECL
735 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
736 return exp;
737
738 return save_expr (exp);
739 }
740
741 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
742 times to get the address of either a higher stack frame, or a return
743 address located within it (depending on FNDECL_CODE). */
744
745 static rtx
746 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
747 {
748 int i;
749
750 #ifdef INITIAL_FRAME_ADDRESS_RTX
751 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
752 #else
753 rtx tem;
754
755 /* For a zero count with __builtin_return_address, we don't care what
756 frame address we return, because target-specific definitions will
757 override us. Therefore frame pointer elimination is OK, and using
758 the soft frame pointer is OK.
759
760 For a nonzero count, or a zero count with __builtin_frame_address,
761 we require a stable offset from the current frame pointer to the
762 previous one, so we must use the hard frame pointer, and
763 we must disable frame pointer elimination. */
764 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
765 tem = frame_pointer_rtx;
766 else
767 {
768 tem = hard_frame_pointer_rtx;
769
770 /* Tell reload not to eliminate the frame pointer. */
771 crtl->accesses_prior_frames = 1;
772 }
773 #endif
774
775 /* Some machines need special handling before we can access
776 arbitrary frames. For example, on the SPARC, we must first flush
777 all register windows to the stack. */
778 #ifdef SETUP_FRAME_ADDRESSES
779 if (count > 0)
780 SETUP_FRAME_ADDRESSES ();
781 #endif
782
783 /* On the SPARC, the return address is not in the frame, it is in a
784 register. There is no way to access it off of the current frame
785 pointer, but it can be accessed off the previous frame pointer by
786 reading the value from the register window save area. */
787 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
788 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
789 count--;
790 #endif
791
792 /* Scan back COUNT frames to the specified frame. */
793 for (i = 0; i < count; i++)
794 {
795 /* Assume the dynamic chain pointer is in the word that the
796 frame address points to, unless otherwise specified. */
797 #ifdef DYNAMIC_CHAIN_ADDRESS
798 tem = DYNAMIC_CHAIN_ADDRESS (tem);
799 #endif
800 tem = memory_address (Pmode, tem);
801 tem = gen_frame_mem (Pmode, tem);
802 tem = copy_to_reg (tem);
803 }
804
805 /* For __builtin_frame_address, return what we've got. But, on
806 the SPARC for example, we may have to add a bias. */
807 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
808 #ifdef FRAME_ADDR_RTX
809 return FRAME_ADDR_RTX (tem);
810 #else
811 return tem;
812 #endif
813
814 /* For __builtin_return_address, get the return address from that frame. */
815 #ifdef RETURN_ADDR_RTX
816 tem = RETURN_ADDR_RTX (count, tem);
817 #else
818 tem = memory_address (Pmode,
819 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
820 tem = gen_frame_mem (Pmode, tem);
821 #endif
822 return tem;
823 }
824
825 /* Alias set used for setjmp buffer. */
826 static alias_set_type setjmp_alias_set = -1;
827
828 /* Construct the leading half of a __builtin_setjmp call. Control will
829 return to RECEIVER_LABEL. This is also called directly by the SJLJ
830 exception handling code. */
831
832 void
833 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
834 {
835 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
836 rtx stack_save;
837 rtx mem;
838
839 if (setjmp_alias_set == -1)
840 setjmp_alias_set = new_alias_set ();
841
842 buf_addr = convert_memory_address (Pmode, buf_addr);
843
844 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
845
846 /* We store the frame pointer and the address of receiver_label in
847 the buffer and use the rest of it for the stack save area, which
848 is machine-dependent. */
849
850 mem = gen_rtx_MEM (Pmode, buf_addr);
851 set_mem_alias_set (mem, setjmp_alias_set);
852 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
853
854 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
855 GET_MODE_SIZE (Pmode))),
856 set_mem_alias_set (mem, setjmp_alias_set);
857
858 emit_move_insn (validize_mem (mem),
859 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
860
861 stack_save = gen_rtx_MEM (sa_mode,
862 plus_constant (Pmode, buf_addr,
863 2 * GET_MODE_SIZE (Pmode)));
864 set_mem_alias_set (stack_save, setjmp_alias_set);
865 emit_stack_save (SAVE_NONLOCAL, &stack_save);
866
867 /* If there is further processing to do, do it. */
868 #ifdef HAVE_builtin_setjmp_setup
869 if (HAVE_builtin_setjmp_setup)
870 emit_insn (gen_builtin_setjmp_setup (buf_addr));
871 #endif
872
873 /* We have a nonlocal label. */
874 cfun->has_nonlocal_label = 1;
875 }
876
877 /* Construct the trailing part of a __builtin_setjmp call. This is
878 also called directly by the SJLJ exception handling code.
879 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
880
881 void
882 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
883 {
884 rtx chain;
885
886 /* Mark the FP as used when we get here, so we have to make sure it's
887 marked as used by this function. */
888 emit_use (hard_frame_pointer_rtx);
889
890 /* Mark the static chain as clobbered here so life information
891 doesn't get messed up for it. */
892 chain = targetm.calls.static_chain (current_function_decl, true);
893 if (chain && REG_P (chain))
894 emit_clobber (chain);
895
896 /* Now put in the code to restore the frame pointer, and argument
897 pointer, if needed. */
898 #ifdef HAVE_nonlocal_goto
899 if (! HAVE_nonlocal_goto)
900 #endif
901 {
902 /* First adjust our frame pointer to its actual value. It was
903 previously set to the start of the virtual area corresponding to
904 the stacked variables when we branched here and now needs to be
905 adjusted to the actual hardware fp value.
906
907 Assignments to virtual registers are converted by
908 instantiate_virtual_regs into the corresponding assignment
909 to the underlying register (fp in this case) that makes
910 the original assignment true.
911 So the following insn will actually be decrementing fp by
912 STARTING_FRAME_OFFSET. */
913 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
914
915 /* Restoring the frame pointer also modifies the hard frame pointer.
916 Mark it used (so that the previous assignment remains live once
917 the frame pointer is eliminated) and clobbered (to represent the
918 implicit update from the assignment). */
919 emit_use (hard_frame_pointer_rtx);
920 emit_clobber (hard_frame_pointer_rtx);
921 }
922
923 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
924 if (fixed_regs[ARG_POINTER_REGNUM])
925 {
926 #ifdef ELIMINABLE_REGS
927 /* If the argument pointer can be eliminated in favor of the
928 frame pointer, we don't need to restore it. We assume here
929 that if such an elimination is present, it can always be used.
930 This is the case on all known machines; if we don't make this
931 assumption, we do unnecessary saving on many machines. */
932 size_t i;
933 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
934
935 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
936 if (elim_regs[i].from == ARG_POINTER_REGNUM
937 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
938 break;
939
940 if (i == ARRAY_SIZE (elim_regs))
941 #endif
942 {
943 /* Now restore our arg pointer from the address at which it
944 was saved in our stack frame. */
945 emit_move_insn (crtl->args.internal_arg_pointer,
946 copy_to_reg (get_arg_pointer_save_area ()));
947 }
948 }
949 #endif
950
951 #ifdef HAVE_builtin_setjmp_receiver
952 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
953 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
954 else
955 #endif
956 #ifdef HAVE_nonlocal_goto_receiver
957 if (HAVE_nonlocal_goto_receiver)
958 emit_insn (gen_nonlocal_goto_receiver ());
959 else
960 #endif
961 { /* Nothing */ }
962
963 /* We must not allow the code we just generated to be reordered by
964 scheduling. Specifically, the update of the frame pointer must
965 happen immediately, not later. */
966 emit_insn (gen_blockage ());
967 }
968
969 /* __builtin_longjmp is passed a pointer to an array of five words (not
970 all will be used on all machines). It operates similarly to the C
971 library function of the same name, but is more efficient. Much of
972 the code below is copied from the handling of non-local gotos. */
973
974 static void
975 expand_builtin_longjmp (rtx buf_addr, rtx value)
976 {
977 rtx fp, lab, stack;
978 rtx_insn *insn, *last;
979 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
980
981 /* DRAP is needed for stack realign if longjmp is expanded to current
982 function */
983 if (SUPPORTS_STACK_ALIGNMENT)
984 crtl->need_drap = true;
985
986 if (setjmp_alias_set == -1)
987 setjmp_alias_set = new_alias_set ();
988
989 buf_addr = convert_memory_address (Pmode, buf_addr);
990
991 buf_addr = force_reg (Pmode, buf_addr);
992
993 /* We require that the user must pass a second argument of 1, because
994 that is what builtin_setjmp will return. */
995 gcc_assert (value == const1_rtx);
996
997 last = get_last_insn ();
998 #ifdef HAVE_builtin_longjmp
999 if (HAVE_builtin_longjmp)
1000 emit_insn (gen_builtin_longjmp (buf_addr));
1001 else
1002 #endif
1003 {
1004 fp = gen_rtx_MEM (Pmode, buf_addr);
1005 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1006 GET_MODE_SIZE (Pmode)));
1007
1008 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1009 2 * GET_MODE_SIZE (Pmode)));
1010 set_mem_alias_set (fp, setjmp_alias_set);
1011 set_mem_alias_set (lab, setjmp_alias_set);
1012 set_mem_alias_set (stack, setjmp_alias_set);
1013
1014 /* Pick up FP, label, and SP from the block and jump. This code is
1015 from expand_goto in stmt.c; see there for detailed comments. */
1016 #ifdef HAVE_nonlocal_goto
1017 if (HAVE_nonlocal_goto)
1018 /* We have to pass a value to the nonlocal_goto pattern that will
1019 get copied into the static_chain pointer, but it does not matter
1020 what that value is, because builtin_setjmp does not use it. */
1021 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1022 else
1023 #endif
1024 {
1025 lab = copy_to_reg (lab);
1026
1027 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1028 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1029
1030 emit_move_insn (hard_frame_pointer_rtx, fp);
1031 emit_stack_restore (SAVE_NONLOCAL, stack);
1032
1033 emit_use (hard_frame_pointer_rtx);
1034 emit_use (stack_pointer_rtx);
1035 emit_indirect_jump (lab);
1036 }
1037 }
1038
1039 /* Search backwards and mark the jump insn as a non-local goto.
1040 Note that this precludes the use of __builtin_longjmp to a
1041 __builtin_setjmp target in the same function. However, we've
1042 already cautioned the user that these functions are for
1043 internal exception handling use only. */
1044 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1045 {
1046 gcc_assert (insn != last);
1047
1048 if (JUMP_P (insn))
1049 {
1050 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1051 break;
1052 }
1053 else if (CALL_P (insn))
1054 break;
1055 }
1056 }
1057
1058 static inline bool
1059 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1060 {
1061 return (iter->i < iter->n);
1062 }
1063
1064 /* This function validates the types of a function call argument list
1065 against a specified list of tree_codes. If the last specifier is a 0,
1066 that represents an ellipses, otherwise the last specifier must be a
1067 VOID_TYPE. */
1068
1069 static bool
1070 validate_arglist (const_tree callexpr, ...)
1071 {
1072 enum tree_code code;
1073 bool res = 0;
1074 va_list ap;
1075 const_call_expr_arg_iterator iter;
1076 const_tree arg;
1077
1078 va_start (ap, callexpr);
1079 init_const_call_expr_arg_iterator (callexpr, &iter);
1080
1081 do
1082 {
1083 code = (enum tree_code) va_arg (ap, int);
1084 switch (code)
1085 {
1086 case 0:
1087 /* This signifies an ellipses, any further arguments are all ok. */
1088 res = true;
1089 goto end;
1090 case VOID_TYPE:
1091 /* This signifies an endlink, if no arguments remain, return
1092 true, otherwise return false. */
1093 res = !more_const_call_expr_args_p (&iter);
1094 goto end;
1095 default:
1096 /* If no parameters remain or the parameter's code does not
1097 match the specified code, return false. Otherwise continue
1098 checking any remaining arguments. */
1099 arg = next_const_call_expr_arg (&iter);
1100 if (!validate_arg (arg, code))
1101 goto end;
1102 break;
1103 }
1104 }
1105 while (1);
1106
1107 /* We need gotos here since we can only have one VA_CLOSE in a
1108 function. */
1109 end: ;
1110 va_end (ap);
1111
1112 return res;
1113 }
1114
1115 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1116 and the address of the save area. */
1117
1118 static rtx
1119 expand_builtin_nonlocal_goto (tree exp)
1120 {
1121 tree t_label, t_save_area;
1122 rtx r_label, r_save_area, r_fp, r_sp;
1123 rtx_insn *insn;
1124
1125 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1126 return NULL_RTX;
1127
1128 t_label = CALL_EXPR_ARG (exp, 0);
1129 t_save_area = CALL_EXPR_ARG (exp, 1);
1130
1131 r_label = expand_normal (t_label);
1132 r_label = convert_memory_address (Pmode, r_label);
1133 r_save_area = expand_normal (t_save_area);
1134 r_save_area = convert_memory_address (Pmode, r_save_area);
1135 /* Copy the address of the save location to a register just in case it was
1136 based on the frame pointer. */
1137 r_save_area = copy_to_reg (r_save_area);
1138 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1139 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1140 plus_constant (Pmode, r_save_area,
1141 GET_MODE_SIZE (Pmode)));
1142
1143 crtl->has_nonlocal_goto = 1;
1144
1145 #ifdef HAVE_nonlocal_goto
1146 /* ??? We no longer need to pass the static chain value, afaik. */
1147 if (HAVE_nonlocal_goto)
1148 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1149 else
1150 #endif
1151 {
1152 r_label = copy_to_reg (r_label);
1153
1154 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1155 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1156
1157 /* Restore frame pointer for containing function. */
1158 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1159 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1160
1161 /* USE of hard_frame_pointer_rtx added for consistency;
1162 not clear if really needed. */
1163 emit_use (hard_frame_pointer_rtx);
1164 emit_use (stack_pointer_rtx);
1165
1166 /* If the architecture is using a GP register, we must
1167 conservatively assume that the target function makes use of it.
1168 The prologue of functions with nonlocal gotos must therefore
1169 initialize the GP register to the appropriate value, and we
1170 must then make sure that this value is live at the point
1171 of the jump. (Note that this doesn't necessarily apply
1172 to targets with a nonlocal_goto pattern; they are free
1173 to implement it in their own way. Note also that this is
1174 a no-op if the GP register is a global invariant.) */
1175 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1176 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1177 emit_use (pic_offset_table_rtx);
1178
1179 emit_indirect_jump (r_label);
1180 }
1181
1182 /* Search backwards to the jump insn and mark it as a
1183 non-local goto. */
1184 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1185 {
1186 if (JUMP_P (insn))
1187 {
1188 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1189 break;
1190 }
1191 else if (CALL_P (insn))
1192 break;
1193 }
1194
1195 return const0_rtx;
1196 }
1197
1198 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1199 (not all will be used on all machines) that was passed to __builtin_setjmp.
1200 It updates the stack pointer in that block to correspond to the current
1201 stack pointer. */
1202
1203 static void
1204 expand_builtin_update_setjmp_buf (rtx buf_addr)
1205 {
1206 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1207 rtx stack_save
1208 = gen_rtx_MEM (sa_mode,
1209 memory_address
1210 (sa_mode,
1211 plus_constant (Pmode, buf_addr,
1212 2 * GET_MODE_SIZE (Pmode))));
1213
1214 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1215 }
1216
1217 /* Expand a call to __builtin_prefetch. For a target that does not support
1218 data prefetch, evaluate the memory address argument in case it has side
1219 effects. */
1220
1221 static void
1222 expand_builtin_prefetch (tree exp)
1223 {
1224 tree arg0, arg1, arg2;
1225 int nargs;
1226 rtx op0, op1, op2;
1227
1228 if (!validate_arglist (exp, POINTER_TYPE, 0))
1229 return;
1230
1231 arg0 = CALL_EXPR_ARG (exp, 0);
1232
1233 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1234 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1235 locality). */
1236 nargs = call_expr_nargs (exp);
1237 if (nargs > 1)
1238 arg1 = CALL_EXPR_ARG (exp, 1);
1239 else
1240 arg1 = integer_zero_node;
1241 if (nargs > 2)
1242 arg2 = CALL_EXPR_ARG (exp, 2);
1243 else
1244 arg2 = integer_three_node;
1245
1246 /* Argument 0 is an address. */
1247 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1248
1249 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1250 if (TREE_CODE (arg1) != INTEGER_CST)
1251 {
1252 error ("second argument to %<__builtin_prefetch%> must be a constant");
1253 arg1 = integer_zero_node;
1254 }
1255 op1 = expand_normal (arg1);
1256 /* Argument 1 must be either zero or one. */
1257 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1258 {
1259 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1260 " using zero");
1261 op1 = const0_rtx;
1262 }
1263
1264 /* Argument 2 (locality) must be a compile-time constant int. */
1265 if (TREE_CODE (arg2) != INTEGER_CST)
1266 {
1267 error ("third argument to %<__builtin_prefetch%> must be a constant");
1268 arg2 = integer_zero_node;
1269 }
1270 op2 = expand_normal (arg2);
1271 /* Argument 2 must be 0, 1, 2, or 3. */
1272 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1273 {
1274 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1275 op2 = const0_rtx;
1276 }
1277
1278 #ifdef HAVE_prefetch
1279 if (HAVE_prefetch)
1280 {
1281 struct expand_operand ops[3];
1282
1283 create_address_operand (&ops[0], op0);
1284 create_integer_operand (&ops[1], INTVAL (op1));
1285 create_integer_operand (&ops[2], INTVAL (op2));
1286 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1287 return;
1288 }
1289 #endif
1290
1291 /* Don't do anything with direct references to volatile memory, but
1292 generate code to handle other side effects. */
1293 if (!MEM_P (op0) && side_effects_p (op0))
1294 emit_insn (op0);
1295 }
1296
1297 /* Get a MEM rtx for expression EXP which is the address of an operand
1298 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1299 the maximum length of the block of memory that might be accessed or
1300 NULL if unknown. */
1301
1302 static rtx
1303 get_memory_rtx (tree exp, tree len)
1304 {
1305 tree orig_exp = exp;
1306 rtx addr, mem;
1307
1308 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1309 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1310 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1311 exp = TREE_OPERAND (exp, 0);
1312
1313 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1314 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1315
1316 /* Get an expression we can use to find the attributes to assign to MEM.
1317 First remove any nops. */
1318 while (CONVERT_EXPR_P (exp)
1319 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1320 exp = TREE_OPERAND (exp, 0);
1321
1322 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1323 (as builtin stringops may alias with anything). */
1324 exp = fold_build2 (MEM_REF,
1325 build_array_type (char_type_node,
1326 build_range_type (sizetype,
1327 size_one_node, len)),
1328 exp, build_int_cst (ptr_type_node, 0));
1329
1330 /* If the MEM_REF has no acceptable address, try to get the base object
1331 from the original address we got, and build an all-aliasing
1332 unknown-sized access to that one. */
1333 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1334 set_mem_attributes (mem, exp, 0);
1335 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1336 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1337 0))))
1338 {
1339 exp = build_fold_addr_expr (exp);
1340 exp = fold_build2 (MEM_REF,
1341 build_array_type (char_type_node,
1342 build_range_type (sizetype,
1343 size_zero_node,
1344 NULL)),
1345 exp, build_int_cst (ptr_type_node, 0));
1346 set_mem_attributes (mem, exp, 0);
1347 }
1348 set_mem_alias_set (mem, 0);
1349 return mem;
1350 }
1351 \f
1352 /* Built-in functions to perform an untyped call and return. */
1353
1354 #define apply_args_mode \
1355 (this_target_builtins->x_apply_args_mode)
1356 #define apply_result_mode \
1357 (this_target_builtins->x_apply_result_mode)
1358
1359 /* Return the size required for the block returned by __builtin_apply_args,
1360 and initialize apply_args_mode. */
1361
1362 static int
1363 apply_args_size (void)
1364 {
1365 static int size = -1;
1366 int align;
1367 unsigned int regno;
1368 machine_mode mode;
1369
1370 /* The values computed by this function never change. */
1371 if (size < 0)
1372 {
1373 /* The first value is the incoming arg-pointer. */
1374 size = GET_MODE_SIZE (Pmode);
1375
1376 /* The second value is the structure value address unless this is
1377 passed as an "invisible" first argument. */
1378 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1379 size += GET_MODE_SIZE (Pmode);
1380
1381 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1382 if (FUNCTION_ARG_REGNO_P (regno))
1383 {
1384 mode = targetm.calls.get_raw_arg_mode (regno);
1385
1386 gcc_assert (mode != VOIDmode);
1387
1388 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1389 if (size % align != 0)
1390 size = CEIL (size, align) * align;
1391 size += GET_MODE_SIZE (mode);
1392 apply_args_mode[regno] = mode;
1393 }
1394 else
1395 {
1396 apply_args_mode[regno] = VOIDmode;
1397 }
1398 }
1399 return size;
1400 }
1401
1402 /* Return the size required for the block returned by __builtin_apply,
1403 and initialize apply_result_mode. */
1404
1405 static int
1406 apply_result_size (void)
1407 {
1408 static int size = -1;
1409 int align, regno;
1410 machine_mode mode;
1411
1412 /* The values computed by this function never change. */
1413 if (size < 0)
1414 {
1415 size = 0;
1416
1417 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1418 if (targetm.calls.function_value_regno_p (regno))
1419 {
1420 mode = targetm.calls.get_raw_result_mode (regno);
1421
1422 gcc_assert (mode != VOIDmode);
1423
1424 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1425 if (size % align != 0)
1426 size = CEIL (size, align) * align;
1427 size += GET_MODE_SIZE (mode);
1428 apply_result_mode[regno] = mode;
1429 }
1430 else
1431 apply_result_mode[regno] = VOIDmode;
1432
1433 /* Allow targets that use untyped_call and untyped_return to override
1434 the size so that machine-specific information can be stored here. */
1435 #ifdef APPLY_RESULT_SIZE
1436 size = APPLY_RESULT_SIZE;
1437 #endif
1438 }
1439 return size;
1440 }
1441
1442 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1443 /* Create a vector describing the result block RESULT. If SAVEP is true,
1444 the result block is used to save the values; otherwise it is used to
1445 restore the values. */
1446
1447 static rtx
1448 result_vector (int savep, rtx result)
1449 {
1450 int regno, size, align, nelts;
1451 machine_mode mode;
1452 rtx reg, mem;
1453 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1454
1455 size = nelts = 0;
1456 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1457 if ((mode = apply_result_mode[regno]) != VOIDmode)
1458 {
1459 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1460 if (size % align != 0)
1461 size = CEIL (size, align) * align;
1462 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1463 mem = adjust_address (result, mode, size);
1464 savevec[nelts++] = (savep
1465 ? gen_rtx_SET (VOIDmode, mem, reg)
1466 : gen_rtx_SET (VOIDmode, reg, mem));
1467 size += GET_MODE_SIZE (mode);
1468 }
1469 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1470 }
1471 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1472
1473 /* Save the state required to perform an untyped call with the same
1474 arguments as were passed to the current function. */
1475
1476 static rtx
1477 expand_builtin_apply_args_1 (void)
1478 {
1479 rtx registers, tem;
1480 int size, align, regno;
1481 machine_mode mode;
1482 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1483
1484 /* Create a block where the arg-pointer, structure value address,
1485 and argument registers can be saved. */
1486 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1487
1488 /* Walk past the arg-pointer and structure value address. */
1489 size = GET_MODE_SIZE (Pmode);
1490 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1491 size += GET_MODE_SIZE (Pmode);
1492
1493 /* Save each register used in calling a function to the block. */
1494 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1495 if ((mode = apply_args_mode[regno]) != VOIDmode)
1496 {
1497 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1498 if (size % align != 0)
1499 size = CEIL (size, align) * align;
1500
1501 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1502
1503 emit_move_insn (adjust_address (registers, mode, size), tem);
1504 size += GET_MODE_SIZE (mode);
1505 }
1506
1507 /* Save the arg pointer to the block. */
1508 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1509 #ifdef STACK_GROWS_DOWNWARD
1510 /* We need the pointer as the caller actually passed them to us, not
1511 as we might have pretended they were passed. Make sure it's a valid
1512 operand, as emit_move_insn isn't expected to handle a PLUS. */
1513 tem
1514 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1515 NULL_RTX);
1516 #endif
1517 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1518
1519 size = GET_MODE_SIZE (Pmode);
1520
1521 /* Save the structure value address unless this is passed as an
1522 "invisible" first argument. */
1523 if (struct_incoming_value)
1524 {
1525 emit_move_insn (adjust_address (registers, Pmode, size),
1526 copy_to_reg (struct_incoming_value));
1527 size += GET_MODE_SIZE (Pmode);
1528 }
1529
1530 /* Return the address of the block. */
1531 return copy_addr_to_reg (XEXP (registers, 0));
1532 }
1533
1534 /* __builtin_apply_args returns block of memory allocated on
1535 the stack into which is stored the arg pointer, structure
1536 value address, static chain, and all the registers that might
1537 possibly be used in performing a function call. The code is
1538 moved to the start of the function so the incoming values are
1539 saved. */
1540
1541 static rtx
1542 expand_builtin_apply_args (void)
1543 {
1544 /* Don't do __builtin_apply_args more than once in a function.
1545 Save the result of the first call and reuse it. */
1546 if (apply_args_value != 0)
1547 return apply_args_value;
1548 {
1549 /* When this function is called, it means that registers must be
1550 saved on entry to this function. So we migrate the
1551 call to the first insn of this function. */
1552 rtx temp;
1553 rtx seq;
1554
1555 start_sequence ();
1556 temp = expand_builtin_apply_args_1 ();
1557 seq = get_insns ();
1558 end_sequence ();
1559
1560 apply_args_value = temp;
1561
1562 /* Put the insns after the NOTE that starts the function.
1563 If this is inside a start_sequence, make the outer-level insn
1564 chain current, so the code is placed at the start of the
1565 function. If internal_arg_pointer is a non-virtual pseudo,
1566 it needs to be placed after the function that initializes
1567 that pseudo. */
1568 push_topmost_sequence ();
1569 if (REG_P (crtl->args.internal_arg_pointer)
1570 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1571 emit_insn_before (seq, parm_birth_insn);
1572 else
1573 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1574 pop_topmost_sequence ();
1575 return temp;
1576 }
1577 }
1578
1579 /* Perform an untyped call and save the state required to perform an
1580 untyped return of whatever value was returned by the given function. */
1581
1582 static rtx
1583 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1584 {
1585 int size, align, regno;
1586 machine_mode mode;
1587 rtx incoming_args, result, reg, dest, src;
1588 rtx_call_insn *call_insn;
1589 rtx old_stack_level = 0;
1590 rtx call_fusage = 0;
1591 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1592
1593 arguments = convert_memory_address (Pmode, arguments);
1594
1595 /* Create a block where the return registers can be saved. */
1596 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1597
1598 /* Fetch the arg pointer from the ARGUMENTS block. */
1599 incoming_args = gen_reg_rtx (Pmode);
1600 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1601 #ifndef STACK_GROWS_DOWNWARD
1602 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1603 incoming_args, 0, OPTAB_LIB_WIDEN);
1604 #endif
1605
1606 /* Push a new argument block and copy the arguments. Do not allow
1607 the (potential) memcpy call below to interfere with our stack
1608 manipulations. */
1609 do_pending_stack_adjust ();
1610 NO_DEFER_POP;
1611
1612 /* Save the stack with nonlocal if available. */
1613 #ifdef HAVE_save_stack_nonlocal
1614 if (HAVE_save_stack_nonlocal)
1615 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1616 else
1617 #endif
1618 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1619
1620 /* Allocate a block of memory onto the stack and copy the memory
1621 arguments to the outgoing arguments address. We can pass TRUE
1622 as the 4th argument because we just saved the stack pointer
1623 and will restore it right after the call. */
1624 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1625
1626 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1627 may have already set current_function_calls_alloca to true.
1628 current_function_calls_alloca won't be set if argsize is zero,
1629 so we have to guarantee need_drap is true here. */
1630 if (SUPPORTS_STACK_ALIGNMENT)
1631 crtl->need_drap = true;
1632
1633 dest = virtual_outgoing_args_rtx;
1634 #ifndef STACK_GROWS_DOWNWARD
1635 if (CONST_INT_P (argsize))
1636 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1637 else
1638 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1639 #endif
1640 dest = gen_rtx_MEM (BLKmode, dest);
1641 set_mem_align (dest, PARM_BOUNDARY);
1642 src = gen_rtx_MEM (BLKmode, incoming_args);
1643 set_mem_align (src, PARM_BOUNDARY);
1644 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1645
1646 /* Refer to the argument block. */
1647 apply_args_size ();
1648 arguments = gen_rtx_MEM (BLKmode, arguments);
1649 set_mem_align (arguments, PARM_BOUNDARY);
1650
1651 /* Walk past the arg-pointer and structure value address. */
1652 size = GET_MODE_SIZE (Pmode);
1653 if (struct_value)
1654 size += GET_MODE_SIZE (Pmode);
1655
1656 /* Restore each of the registers previously saved. Make USE insns
1657 for each of these registers for use in making the call. */
1658 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1659 if ((mode = apply_args_mode[regno]) != VOIDmode)
1660 {
1661 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1662 if (size % align != 0)
1663 size = CEIL (size, align) * align;
1664 reg = gen_rtx_REG (mode, regno);
1665 emit_move_insn (reg, adjust_address (arguments, mode, size));
1666 use_reg (&call_fusage, reg);
1667 size += GET_MODE_SIZE (mode);
1668 }
1669
1670 /* Restore the structure value address unless this is passed as an
1671 "invisible" first argument. */
1672 size = GET_MODE_SIZE (Pmode);
1673 if (struct_value)
1674 {
1675 rtx value = gen_reg_rtx (Pmode);
1676 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1677 emit_move_insn (struct_value, value);
1678 if (REG_P (struct_value))
1679 use_reg (&call_fusage, struct_value);
1680 size += GET_MODE_SIZE (Pmode);
1681 }
1682
1683 /* All arguments and registers used for the call are set up by now! */
1684 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1685
1686 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1687 and we don't want to load it into a register as an optimization,
1688 because prepare_call_address already did it if it should be done. */
1689 if (GET_CODE (function) != SYMBOL_REF)
1690 function = memory_address (FUNCTION_MODE, function);
1691
1692 /* Generate the actual call instruction and save the return value. */
1693 #ifdef HAVE_untyped_call
1694 if (HAVE_untyped_call)
1695 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1696 result, result_vector (1, result)));
1697 else
1698 #endif
1699 #ifdef HAVE_call_value
1700 if (HAVE_call_value)
1701 {
1702 rtx valreg = 0;
1703
1704 /* Locate the unique return register. It is not possible to
1705 express a call that sets more than one return register using
1706 call_value; use untyped_call for that. In fact, untyped_call
1707 only needs to save the return registers in the given block. */
1708 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1709 if ((mode = apply_result_mode[regno]) != VOIDmode)
1710 {
1711 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1712
1713 valreg = gen_rtx_REG (mode, regno);
1714 }
1715
1716 emit_call_insn (GEN_CALL_VALUE (valreg,
1717 gen_rtx_MEM (FUNCTION_MODE, function),
1718 const0_rtx, NULL_RTX, const0_rtx));
1719
1720 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1721 }
1722 else
1723 #endif
1724 gcc_unreachable ();
1725
1726 /* Find the CALL insn we just emitted, and attach the register usage
1727 information. */
1728 call_insn = last_call_insn ();
1729 add_function_usage_to (call_insn, call_fusage);
1730
1731 /* Restore the stack. */
1732 #ifdef HAVE_save_stack_nonlocal
1733 if (HAVE_save_stack_nonlocal)
1734 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1735 else
1736 #endif
1737 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1738 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1739
1740 OK_DEFER_POP;
1741
1742 /* Return the address of the result block. */
1743 result = copy_addr_to_reg (XEXP (result, 0));
1744 return convert_memory_address (ptr_mode, result);
1745 }
1746
1747 /* Perform an untyped return. */
1748
1749 static void
1750 expand_builtin_return (rtx result)
1751 {
1752 int size, align, regno;
1753 machine_mode mode;
1754 rtx reg;
1755 rtx_insn *call_fusage = 0;
1756
1757 result = convert_memory_address (Pmode, result);
1758
1759 apply_result_size ();
1760 result = gen_rtx_MEM (BLKmode, result);
1761
1762 #ifdef HAVE_untyped_return
1763 if (HAVE_untyped_return)
1764 {
1765 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1766 emit_barrier ();
1767 return;
1768 }
1769 #endif
1770
1771 /* Restore the return value and note that each value is used. */
1772 size = 0;
1773 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1774 if ((mode = apply_result_mode[regno]) != VOIDmode)
1775 {
1776 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1777 if (size % align != 0)
1778 size = CEIL (size, align) * align;
1779 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1780 emit_move_insn (reg, adjust_address (result, mode, size));
1781
1782 push_to_sequence (call_fusage);
1783 emit_use (reg);
1784 call_fusage = get_insns ();
1785 end_sequence ();
1786 size += GET_MODE_SIZE (mode);
1787 }
1788
1789 /* Put the USE insns before the return. */
1790 emit_insn (call_fusage);
1791
1792 /* Return whatever values was restored by jumping directly to the end
1793 of the function. */
1794 expand_naked_return ();
1795 }
1796
1797 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1798
1799 static enum type_class
1800 type_to_class (tree type)
1801 {
1802 switch (TREE_CODE (type))
1803 {
1804 case VOID_TYPE: return void_type_class;
1805 case INTEGER_TYPE: return integer_type_class;
1806 case ENUMERAL_TYPE: return enumeral_type_class;
1807 case BOOLEAN_TYPE: return boolean_type_class;
1808 case POINTER_TYPE: return pointer_type_class;
1809 case REFERENCE_TYPE: return reference_type_class;
1810 case OFFSET_TYPE: return offset_type_class;
1811 case REAL_TYPE: return real_type_class;
1812 case COMPLEX_TYPE: return complex_type_class;
1813 case FUNCTION_TYPE: return function_type_class;
1814 case METHOD_TYPE: return method_type_class;
1815 case RECORD_TYPE: return record_type_class;
1816 case UNION_TYPE:
1817 case QUAL_UNION_TYPE: return union_type_class;
1818 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1819 ? string_type_class : array_type_class);
1820 case LANG_TYPE: return lang_type_class;
1821 default: return no_type_class;
1822 }
1823 }
1824
1825 /* Expand a call EXP to __builtin_classify_type. */
1826
1827 static rtx
1828 expand_builtin_classify_type (tree exp)
1829 {
1830 if (call_expr_nargs (exp))
1831 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1832 return GEN_INT (no_type_class);
1833 }
1834
1835 /* This helper macro, meant to be used in mathfn_built_in below,
1836 determines which among a set of three builtin math functions is
1837 appropriate for a given type mode. The `F' and `L' cases are
1838 automatically generated from the `double' case. */
1839 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1840 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1841 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1842 fcodel = BUILT_IN_MATHFN##L ; break;
1843 /* Similar to above, but appends _R after any F/L suffix. */
1844 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1845 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1846 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1847 fcodel = BUILT_IN_MATHFN##L_R ; break;
1848
1849 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1850 if available. If IMPLICIT is true use the implicit builtin declaration,
1851 otherwise use the explicit declaration. If we can't do the conversion,
1852 return zero. */
1853
1854 static tree
1855 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1856 {
1857 enum built_in_function fcode, fcodef, fcodel, fcode2;
1858
1859 switch (fn)
1860 {
1861 CASE_MATHFN (BUILT_IN_ACOS)
1862 CASE_MATHFN (BUILT_IN_ACOSH)
1863 CASE_MATHFN (BUILT_IN_ASIN)
1864 CASE_MATHFN (BUILT_IN_ASINH)
1865 CASE_MATHFN (BUILT_IN_ATAN)
1866 CASE_MATHFN (BUILT_IN_ATAN2)
1867 CASE_MATHFN (BUILT_IN_ATANH)
1868 CASE_MATHFN (BUILT_IN_CBRT)
1869 CASE_MATHFN (BUILT_IN_CEIL)
1870 CASE_MATHFN (BUILT_IN_CEXPI)
1871 CASE_MATHFN (BUILT_IN_COPYSIGN)
1872 CASE_MATHFN (BUILT_IN_COS)
1873 CASE_MATHFN (BUILT_IN_COSH)
1874 CASE_MATHFN (BUILT_IN_DREM)
1875 CASE_MATHFN (BUILT_IN_ERF)
1876 CASE_MATHFN (BUILT_IN_ERFC)
1877 CASE_MATHFN (BUILT_IN_EXP)
1878 CASE_MATHFN (BUILT_IN_EXP10)
1879 CASE_MATHFN (BUILT_IN_EXP2)
1880 CASE_MATHFN (BUILT_IN_EXPM1)
1881 CASE_MATHFN (BUILT_IN_FABS)
1882 CASE_MATHFN (BUILT_IN_FDIM)
1883 CASE_MATHFN (BUILT_IN_FLOOR)
1884 CASE_MATHFN (BUILT_IN_FMA)
1885 CASE_MATHFN (BUILT_IN_FMAX)
1886 CASE_MATHFN (BUILT_IN_FMIN)
1887 CASE_MATHFN (BUILT_IN_FMOD)
1888 CASE_MATHFN (BUILT_IN_FREXP)
1889 CASE_MATHFN (BUILT_IN_GAMMA)
1890 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1891 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1892 CASE_MATHFN (BUILT_IN_HYPOT)
1893 CASE_MATHFN (BUILT_IN_ILOGB)
1894 CASE_MATHFN (BUILT_IN_ICEIL)
1895 CASE_MATHFN (BUILT_IN_IFLOOR)
1896 CASE_MATHFN (BUILT_IN_INF)
1897 CASE_MATHFN (BUILT_IN_IRINT)
1898 CASE_MATHFN (BUILT_IN_IROUND)
1899 CASE_MATHFN (BUILT_IN_ISINF)
1900 CASE_MATHFN (BUILT_IN_J0)
1901 CASE_MATHFN (BUILT_IN_J1)
1902 CASE_MATHFN (BUILT_IN_JN)
1903 CASE_MATHFN (BUILT_IN_LCEIL)
1904 CASE_MATHFN (BUILT_IN_LDEXP)
1905 CASE_MATHFN (BUILT_IN_LFLOOR)
1906 CASE_MATHFN (BUILT_IN_LGAMMA)
1907 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1908 CASE_MATHFN (BUILT_IN_LLCEIL)
1909 CASE_MATHFN (BUILT_IN_LLFLOOR)
1910 CASE_MATHFN (BUILT_IN_LLRINT)
1911 CASE_MATHFN (BUILT_IN_LLROUND)
1912 CASE_MATHFN (BUILT_IN_LOG)
1913 CASE_MATHFN (BUILT_IN_LOG10)
1914 CASE_MATHFN (BUILT_IN_LOG1P)
1915 CASE_MATHFN (BUILT_IN_LOG2)
1916 CASE_MATHFN (BUILT_IN_LOGB)
1917 CASE_MATHFN (BUILT_IN_LRINT)
1918 CASE_MATHFN (BUILT_IN_LROUND)
1919 CASE_MATHFN (BUILT_IN_MODF)
1920 CASE_MATHFN (BUILT_IN_NAN)
1921 CASE_MATHFN (BUILT_IN_NANS)
1922 CASE_MATHFN (BUILT_IN_NEARBYINT)
1923 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1924 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1925 CASE_MATHFN (BUILT_IN_POW)
1926 CASE_MATHFN (BUILT_IN_POWI)
1927 CASE_MATHFN (BUILT_IN_POW10)
1928 CASE_MATHFN (BUILT_IN_REMAINDER)
1929 CASE_MATHFN (BUILT_IN_REMQUO)
1930 CASE_MATHFN (BUILT_IN_RINT)
1931 CASE_MATHFN (BUILT_IN_ROUND)
1932 CASE_MATHFN (BUILT_IN_SCALB)
1933 CASE_MATHFN (BUILT_IN_SCALBLN)
1934 CASE_MATHFN (BUILT_IN_SCALBN)
1935 CASE_MATHFN (BUILT_IN_SIGNBIT)
1936 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1937 CASE_MATHFN (BUILT_IN_SIN)
1938 CASE_MATHFN (BUILT_IN_SINCOS)
1939 CASE_MATHFN (BUILT_IN_SINH)
1940 CASE_MATHFN (BUILT_IN_SQRT)
1941 CASE_MATHFN (BUILT_IN_TAN)
1942 CASE_MATHFN (BUILT_IN_TANH)
1943 CASE_MATHFN (BUILT_IN_TGAMMA)
1944 CASE_MATHFN (BUILT_IN_TRUNC)
1945 CASE_MATHFN (BUILT_IN_Y0)
1946 CASE_MATHFN (BUILT_IN_Y1)
1947 CASE_MATHFN (BUILT_IN_YN)
1948
1949 default:
1950 return NULL_TREE;
1951 }
1952
1953 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1954 fcode2 = fcode;
1955 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1956 fcode2 = fcodef;
1957 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1958 fcode2 = fcodel;
1959 else
1960 return NULL_TREE;
1961
1962 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1963 return NULL_TREE;
1964
1965 return builtin_decl_explicit (fcode2);
1966 }
1967
1968 /* Like mathfn_built_in_1(), but always use the implicit array. */
1969
1970 tree
1971 mathfn_built_in (tree type, enum built_in_function fn)
1972 {
1973 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1974 }
1975
1976 /* If errno must be maintained, expand the RTL to check if the result,
1977 TARGET, of a built-in function call, EXP, is NaN, and if so set
1978 errno to EDOM. */
1979
1980 static void
1981 expand_errno_check (tree exp, rtx target)
1982 {
1983 rtx_code_label *lab = gen_label_rtx ();
1984
1985 /* Test the result; if it is NaN, set errno=EDOM because
1986 the argument was not in the domain. */
1987 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1988 NULL_RTX, NULL_RTX, lab,
1989 /* The jump is very likely. */
1990 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1991
1992 #ifdef TARGET_EDOM
1993 /* If this built-in doesn't throw an exception, set errno directly. */
1994 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1995 {
1996 #ifdef GEN_ERRNO_RTX
1997 rtx errno_rtx = GEN_ERRNO_RTX;
1998 #else
1999 rtx errno_rtx
2000 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2001 #endif
2002 emit_move_insn (errno_rtx,
2003 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2004 emit_label (lab);
2005 return;
2006 }
2007 #endif
2008
2009 /* Make sure the library call isn't expanded as a tail call. */
2010 CALL_EXPR_TAILCALL (exp) = 0;
2011
2012 /* We can't set errno=EDOM directly; let the library call do it.
2013 Pop the arguments right away in case the call gets deleted. */
2014 NO_DEFER_POP;
2015 expand_call (exp, target, 0);
2016 OK_DEFER_POP;
2017 emit_label (lab);
2018 }
2019
2020 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2021 Return NULL_RTX if a normal call should be emitted rather than expanding
2022 the function in-line. EXP is the expression that is a call to the builtin
2023 function; if convenient, the result should be placed in TARGET.
2024 SUBTARGET may be used as the target for computing one of EXP's operands. */
2025
2026 static rtx
2027 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2028 {
2029 optab builtin_optab;
2030 rtx op0;
2031 rtx_insn *insns;
2032 tree fndecl = get_callee_fndecl (exp);
2033 machine_mode mode;
2034 bool errno_set = false;
2035 bool try_widening = false;
2036 tree arg;
2037
2038 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2039 return NULL_RTX;
2040
2041 arg = CALL_EXPR_ARG (exp, 0);
2042
2043 switch (DECL_FUNCTION_CODE (fndecl))
2044 {
2045 CASE_FLT_FN (BUILT_IN_SQRT):
2046 errno_set = ! tree_expr_nonnegative_p (arg);
2047 try_widening = true;
2048 builtin_optab = sqrt_optab;
2049 break;
2050 CASE_FLT_FN (BUILT_IN_EXP):
2051 errno_set = true; builtin_optab = exp_optab; break;
2052 CASE_FLT_FN (BUILT_IN_EXP10):
2053 CASE_FLT_FN (BUILT_IN_POW10):
2054 errno_set = true; builtin_optab = exp10_optab; break;
2055 CASE_FLT_FN (BUILT_IN_EXP2):
2056 errno_set = true; builtin_optab = exp2_optab; break;
2057 CASE_FLT_FN (BUILT_IN_EXPM1):
2058 errno_set = true; builtin_optab = expm1_optab; break;
2059 CASE_FLT_FN (BUILT_IN_LOGB):
2060 errno_set = true; builtin_optab = logb_optab; break;
2061 CASE_FLT_FN (BUILT_IN_LOG):
2062 errno_set = true; builtin_optab = log_optab; break;
2063 CASE_FLT_FN (BUILT_IN_LOG10):
2064 errno_set = true; builtin_optab = log10_optab; break;
2065 CASE_FLT_FN (BUILT_IN_LOG2):
2066 errno_set = true; builtin_optab = log2_optab; break;
2067 CASE_FLT_FN (BUILT_IN_LOG1P):
2068 errno_set = true; builtin_optab = log1p_optab; break;
2069 CASE_FLT_FN (BUILT_IN_ASIN):
2070 builtin_optab = asin_optab; break;
2071 CASE_FLT_FN (BUILT_IN_ACOS):
2072 builtin_optab = acos_optab; break;
2073 CASE_FLT_FN (BUILT_IN_TAN):
2074 builtin_optab = tan_optab; break;
2075 CASE_FLT_FN (BUILT_IN_ATAN):
2076 builtin_optab = atan_optab; break;
2077 CASE_FLT_FN (BUILT_IN_FLOOR):
2078 builtin_optab = floor_optab; break;
2079 CASE_FLT_FN (BUILT_IN_CEIL):
2080 builtin_optab = ceil_optab; break;
2081 CASE_FLT_FN (BUILT_IN_TRUNC):
2082 builtin_optab = btrunc_optab; break;
2083 CASE_FLT_FN (BUILT_IN_ROUND):
2084 builtin_optab = round_optab; break;
2085 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2086 builtin_optab = nearbyint_optab;
2087 if (flag_trapping_math)
2088 break;
2089 /* Else fallthrough and expand as rint. */
2090 CASE_FLT_FN (BUILT_IN_RINT):
2091 builtin_optab = rint_optab; break;
2092 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2093 builtin_optab = significand_optab; break;
2094 default:
2095 gcc_unreachable ();
2096 }
2097
2098 /* Make a suitable register to place result in. */
2099 mode = TYPE_MODE (TREE_TYPE (exp));
2100
2101 if (! flag_errno_math || ! HONOR_NANS (mode))
2102 errno_set = false;
2103
2104 /* Before working hard, check whether the instruction is available, but try
2105 to widen the mode for specific operations. */
2106 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2107 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2108 && (!errno_set || !optimize_insn_for_size_p ()))
2109 {
2110 rtx result = gen_reg_rtx (mode);
2111
2112 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2113 need to expand the argument again. This way, we will not perform
2114 side-effects more the once. */
2115 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2116
2117 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2118
2119 start_sequence ();
2120
2121 /* Compute into RESULT.
2122 Set RESULT to wherever the result comes back. */
2123 result = expand_unop (mode, builtin_optab, op0, result, 0);
2124
2125 if (result != 0)
2126 {
2127 if (errno_set)
2128 expand_errno_check (exp, result);
2129
2130 /* Output the entire sequence. */
2131 insns = get_insns ();
2132 end_sequence ();
2133 emit_insn (insns);
2134 return result;
2135 }
2136
2137 /* If we were unable to expand via the builtin, stop the sequence
2138 (without outputting the insns) and call to the library function
2139 with the stabilized argument list. */
2140 end_sequence ();
2141 }
2142
2143 return expand_call (exp, target, target == const0_rtx);
2144 }
2145
2146 /* Expand a call to the builtin binary math functions (pow and atan2).
2147 Return NULL_RTX if a normal call should be emitted rather than expanding the
2148 function in-line. EXP is the expression that is a call to the builtin
2149 function; if convenient, the result should be placed in TARGET.
2150 SUBTARGET may be used as the target for computing one of EXP's
2151 operands. */
2152
2153 static rtx
2154 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2155 {
2156 optab builtin_optab;
2157 rtx op0, op1, result;
2158 rtx_insn *insns;
2159 int op1_type = REAL_TYPE;
2160 tree fndecl = get_callee_fndecl (exp);
2161 tree arg0, arg1;
2162 machine_mode mode;
2163 bool errno_set = true;
2164
2165 switch (DECL_FUNCTION_CODE (fndecl))
2166 {
2167 CASE_FLT_FN (BUILT_IN_SCALBN):
2168 CASE_FLT_FN (BUILT_IN_SCALBLN):
2169 CASE_FLT_FN (BUILT_IN_LDEXP):
2170 op1_type = INTEGER_TYPE;
2171 default:
2172 break;
2173 }
2174
2175 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2176 return NULL_RTX;
2177
2178 arg0 = CALL_EXPR_ARG (exp, 0);
2179 arg1 = CALL_EXPR_ARG (exp, 1);
2180
2181 switch (DECL_FUNCTION_CODE (fndecl))
2182 {
2183 CASE_FLT_FN (BUILT_IN_POW):
2184 builtin_optab = pow_optab; break;
2185 CASE_FLT_FN (BUILT_IN_ATAN2):
2186 builtin_optab = atan2_optab; break;
2187 CASE_FLT_FN (BUILT_IN_SCALB):
2188 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2189 return 0;
2190 builtin_optab = scalb_optab; break;
2191 CASE_FLT_FN (BUILT_IN_SCALBN):
2192 CASE_FLT_FN (BUILT_IN_SCALBLN):
2193 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2194 return 0;
2195 /* Fall through... */
2196 CASE_FLT_FN (BUILT_IN_LDEXP):
2197 builtin_optab = ldexp_optab; break;
2198 CASE_FLT_FN (BUILT_IN_FMOD):
2199 builtin_optab = fmod_optab; break;
2200 CASE_FLT_FN (BUILT_IN_REMAINDER):
2201 CASE_FLT_FN (BUILT_IN_DREM):
2202 builtin_optab = remainder_optab; break;
2203 default:
2204 gcc_unreachable ();
2205 }
2206
2207 /* Make a suitable register to place result in. */
2208 mode = TYPE_MODE (TREE_TYPE (exp));
2209
2210 /* Before working hard, check whether the instruction is available. */
2211 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2212 return NULL_RTX;
2213
2214 result = gen_reg_rtx (mode);
2215
2216 if (! flag_errno_math || ! HONOR_NANS (mode))
2217 errno_set = false;
2218
2219 if (errno_set && optimize_insn_for_size_p ())
2220 return 0;
2221
2222 /* Always stabilize the argument list. */
2223 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2224 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2225
2226 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2227 op1 = expand_normal (arg1);
2228
2229 start_sequence ();
2230
2231 /* Compute into RESULT.
2232 Set RESULT to wherever the result comes back. */
2233 result = expand_binop (mode, builtin_optab, op0, op1,
2234 result, 0, OPTAB_DIRECT);
2235
2236 /* If we were unable to expand via the builtin, stop the sequence
2237 (without outputting the insns) and call to the library function
2238 with the stabilized argument list. */
2239 if (result == 0)
2240 {
2241 end_sequence ();
2242 return expand_call (exp, target, target == const0_rtx);
2243 }
2244
2245 if (errno_set)
2246 expand_errno_check (exp, result);
2247
2248 /* Output the entire sequence. */
2249 insns = get_insns ();
2250 end_sequence ();
2251 emit_insn (insns);
2252
2253 return result;
2254 }
2255
2256 /* Expand a call to the builtin trinary math functions (fma).
2257 Return NULL_RTX if a normal call should be emitted rather than expanding the
2258 function in-line. EXP is the expression that is a call to the builtin
2259 function; if convenient, the result should be placed in TARGET.
2260 SUBTARGET may be used as the target for computing one of EXP's
2261 operands. */
2262
2263 static rtx
2264 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2265 {
2266 optab builtin_optab;
2267 rtx op0, op1, op2, result;
2268 rtx_insn *insns;
2269 tree fndecl = get_callee_fndecl (exp);
2270 tree arg0, arg1, arg2;
2271 machine_mode mode;
2272
2273 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2274 return NULL_RTX;
2275
2276 arg0 = CALL_EXPR_ARG (exp, 0);
2277 arg1 = CALL_EXPR_ARG (exp, 1);
2278 arg2 = CALL_EXPR_ARG (exp, 2);
2279
2280 switch (DECL_FUNCTION_CODE (fndecl))
2281 {
2282 CASE_FLT_FN (BUILT_IN_FMA):
2283 builtin_optab = fma_optab; break;
2284 default:
2285 gcc_unreachable ();
2286 }
2287
2288 /* Make a suitable register to place result in. */
2289 mode = TYPE_MODE (TREE_TYPE (exp));
2290
2291 /* Before working hard, check whether the instruction is available. */
2292 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2293 return NULL_RTX;
2294
2295 result = gen_reg_rtx (mode);
2296
2297 /* Always stabilize the argument list. */
2298 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2299 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2300 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2301
2302 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2303 op1 = expand_normal (arg1);
2304 op2 = expand_normal (arg2);
2305
2306 start_sequence ();
2307
2308 /* Compute into RESULT.
2309 Set RESULT to wherever the result comes back. */
2310 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2311 result, 0);
2312
2313 /* If we were unable to expand via the builtin, stop the sequence
2314 (without outputting the insns) and call to the library function
2315 with the stabilized argument list. */
2316 if (result == 0)
2317 {
2318 end_sequence ();
2319 return expand_call (exp, target, target == const0_rtx);
2320 }
2321
2322 /* Output the entire sequence. */
2323 insns = get_insns ();
2324 end_sequence ();
2325 emit_insn (insns);
2326
2327 return result;
2328 }
2329
2330 /* Expand a call to the builtin sin and cos math functions.
2331 Return NULL_RTX if a normal call should be emitted rather than expanding the
2332 function in-line. EXP is the expression that is a call to the builtin
2333 function; if convenient, the result should be placed in TARGET.
2334 SUBTARGET may be used as the target for computing one of EXP's
2335 operands. */
2336
2337 static rtx
2338 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2339 {
2340 optab builtin_optab;
2341 rtx op0;
2342 rtx_insn *insns;
2343 tree fndecl = get_callee_fndecl (exp);
2344 machine_mode mode;
2345 tree arg;
2346
2347 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2348 return NULL_RTX;
2349
2350 arg = CALL_EXPR_ARG (exp, 0);
2351
2352 switch (DECL_FUNCTION_CODE (fndecl))
2353 {
2354 CASE_FLT_FN (BUILT_IN_SIN):
2355 CASE_FLT_FN (BUILT_IN_COS):
2356 builtin_optab = sincos_optab; break;
2357 default:
2358 gcc_unreachable ();
2359 }
2360
2361 /* Make a suitable register to place result in. */
2362 mode = TYPE_MODE (TREE_TYPE (exp));
2363
2364 /* Check if sincos insn is available, otherwise fallback
2365 to sin or cos insn. */
2366 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2367 switch (DECL_FUNCTION_CODE (fndecl))
2368 {
2369 CASE_FLT_FN (BUILT_IN_SIN):
2370 builtin_optab = sin_optab; break;
2371 CASE_FLT_FN (BUILT_IN_COS):
2372 builtin_optab = cos_optab; break;
2373 default:
2374 gcc_unreachable ();
2375 }
2376
2377 /* Before working hard, check whether the instruction is available. */
2378 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2379 {
2380 rtx result = gen_reg_rtx (mode);
2381
2382 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2383 need to expand the argument again. This way, we will not perform
2384 side-effects more the once. */
2385 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2386
2387 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2388
2389 start_sequence ();
2390
2391 /* Compute into RESULT.
2392 Set RESULT to wherever the result comes back. */
2393 if (builtin_optab == sincos_optab)
2394 {
2395 int ok;
2396
2397 switch (DECL_FUNCTION_CODE (fndecl))
2398 {
2399 CASE_FLT_FN (BUILT_IN_SIN):
2400 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2401 break;
2402 CASE_FLT_FN (BUILT_IN_COS):
2403 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2404 break;
2405 default:
2406 gcc_unreachable ();
2407 }
2408 gcc_assert (ok);
2409 }
2410 else
2411 result = expand_unop (mode, builtin_optab, op0, result, 0);
2412
2413 if (result != 0)
2414 {
2415 /* Output the entire sequence. */
2416 insns = get_insns ();
2417 end_sequence ();
2418 emit_insn (insns);
2419 return result;
2420 }
2421
2422 /* If we were unable to expand via the builtin, stop the sequence
2423 (without outputting the insns) and call to the library function
2424 with the stabilized argument list. */
2425 end_sequence ();
2426 }
2427
2428 return expand_call (exp, target, target == const0_rtx);
2429 }
2430
2431 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2432 return an RTL instruction code that implements the functionality.
2433 If that isn't possible or available return CODE_FOR_nothing. */
2434
2435 static enum insn_code
2436 interclass_mathfn_icode (tree arg, tree fndecl)
2437 {
2438 bool errno_set = false;
2439 optab builtin_optab = unknown_optab;
2440 machine_mode mode;
2441
2442 switch (DECL_FUNCTION_CODE (fndecl))
2443 {
2444 CASE_FLT_FN (BUILT_IN_ILOGB):
2445 errno_set = true; builtin_optab = ilogb_optab; break;
2446 CASE_FLT_FN (BUILT_IN_ISINF):
2447 builtin_optab = isinf_optab; break;
2448 case BUILT_IN_ISNORMAL:
2449 case BUILT_IN_ISFINITE:
2450 CASE_FLT_FN (BUILT_IN_FINITE):
2451 case BUILT_IN_FINITED32:
2452 case BUILT_IN_FINITED64:
2453 case BUILT_IN_FINITED128:
2454 case BUILT_IN_ISINFD32:
2455 case BUILT_IN_ISINFD64:
2456 case BUILT_IN_ISINFD128:
2457 /* These builtins have no optabs (yet). */
2458 break;
2459 default:
2460 gcc_unreachable ();
2461 }
2462
2463 /* There's no easy way to detect the case we need to set EDOM. */
2464 if (flag_errno_math && errno_set)
2465 return CODE_FOR_nothing;
2466
2467 /* Optab mode depends on the mode of the input argument. */
2468 mode = TYPE_MODE (TREE_TYPE (arg));
2469
2470 if (builtin_optab)
2471 return optab_handler (builtin_optab, mode);
2472 return CODE_FOR_nothing;
2473 }
2474
2475 /* Expand a call to one of the builtin math functions that operate on
2476 floating point argument and output an integer result (ilogb, isinf,
2477 isnan, etc).
2478 Return 0 if a normal call should be emitted rather than expanding the
2479 function in-line. EXP is the expression that is a call to the builtin
2480 function; if convenient, the result should be placed in TARGET. */
2481
2482 static rtx
2483 expand_builtin_interclass_mathfn (tree exp, rtx target)
2484 {
2485 enum insn_code icode = CODE_FOR_nothing;
2486 rtx op0;
2487 tree fndecl = get_callee_fndecl (exp);
2488 machine_mode mode;
2489 tree arg;
2490
2491 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2492 return NULL_RTX;
2493
2494 arg = CALL_EXPR_ARG (exp, 0);
2495 icode = interclass_mathfn_icode (arg, fndecl);
2496 mode = TYPE_MODE (TREE_TYPE (arg));
2497
2498 if (icode != CODE_FOR_nothing)
2499 {
2500 struct expand_operand ops[1];
2501 rtx_insn *last = get_last_insn ();
2502 tree orig_arg = arg;
2503
2504 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2505 need to expand the argument again. This way, we will not perform
2506 side-effects more the once. */
2507 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2508
2509 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2510
2511 if (mode != GET_MODE (op0))
2512 op0 = convert_to_mode (mode, op0, 0);
2513
2514 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2515 if (maybe_legitimize_operands (icode, 0, 1, ops)
2516 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2517 return ops[0].value;
2518
2519 delete_insns_since (last);
2520 CALL_EXPR_ARG (exp, 0) = orig_arg;
2521 }
2522
2523 return NULL_RTX;
2524 }
2525
2526 /* Expand a call to the builtin sincos math function.
2527 Return NULL_RTX if a normal call should be emitted rather than expanding the
2528 function in-line. EXP is the expression that is a call to the builtin
2529 function. */
2530
2531 static rtx
2532 expand_builtin_sincos (tree exp)
2533 {
2534 rtx op0, op1, op2, target1, target2;
2535 machine_mode mode;
2536 tree arg, sinp, cosp;
2537 int result;
2538 location_t loc = EXPR_LOCATION (exp);
2539 tree alias_type, alias_off;
2540
2541 if (!validate_arglist (exp, REAL_TYPE,
2542 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2543 return NULL_RTX;
2544
2545 arg = CALL_EXPR_ARG (exp, 0);
2546 sinp = CALL_EXPR_ARG (exp, 1);
2547 cosp = CALL_EXPR_ARG (exp, 2);
2548
2549 /* Make a suitable register to place result in. */
2550 mode = TYPE_MODE (TREE_TYPE (arg));
2551
2552 /* Check if sincos insn is available, otherwise emit the call. */
2553 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2554 return NULL_RTX;
2555
2556 target1 = gen_reg_rtx (mode);
2557 target2 = gen_reg_rtx (mode);
2558
2559 op0 = expand_normal (arg);
2560 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2561 alias_off = build_int_cst (alias_type, 0);
2562 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2563 sinp, alias_off));
2564 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2565 cosp, alias_off));
2566
2567 /* Compute into target1 and target2.
2568 Set TARGET to wherever the result comes back. */
2569 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2570 gcc_assert (result);
2571
2572 /* Move target1 and target2 to the memory locations indicated
2573 by op1 and op2. */
2574 emit_move_insn (op1, target1);
2575 emit_move_insn (op2, target2);
2576
2577 return const0_rtx;
2578 }
2579
2580 /* Expand a call to the internal cexpi builtin to the sincos math function.
2581 EXP is the expression that is a call to the builtin function; if convenient,
2582 the result should be placed in TARGET. */
2583
2584 static rtx
2585 expand_builtin_cexpi (tree exp, rtx target)
2586 {
2587 tree fndecl = get_callee_fndecl (exp);
2588 tree arg, type;
2589 machine_mode mode;
2590 rtx op0, op1, op2;
2591 location_t loc = EXPR_LOCATION (exp);
2592
2593 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2594 return NULL_RTX;
2595
2596 arg = CALL_EXPR_ARG (exp, 0);
2597 type = TREE_TYPE (arg);
2598 mode = TYPE_MODE (TREE_TYPE (arg));
2599
2600 /* Try expanding via a sincos optab, fall back to emitting a libcall
2601 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2602 is only generated from sincos, cexp or if we have either of them. */
2603 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2604 {
2605 op1 = gen_reg_rtx (mode);
2606 op2 = gen_reg_rtx (mode);
2607
2608 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2609
2610 /* Compute into op1 and op2. */
2611 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2612 }
2613 else if (targetm.libc_has_function (function_sincos))
2614 {
2615 tree call, fn = NULL_TREE;
2616 tree top1, top2;
2617 rtx op1a, op2a;
2618
2619 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2620 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2621 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2622 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2623 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2624 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2625 else
2626 gcc_unreachable ();
2627
2628 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2629 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2630 op1a = copy_addr_to_reg (XEXP (op1, 0));
2631 op2a = copy_addr_to_reg (XEXP (op2, 0));
2632 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2633 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2634
2635 /* Make sure not to fold the sincos call again. */
2636 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2637 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2638 call, 3, arg, top1, top2));
2639 }
2640 else
2641 {
2642 tree call, fn = NULL_TREE, narg;
2643 tree ctype = build_complex_type (type);
2644
2645 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2646 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2647 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2648 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2649 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2650 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2651 else
2652 gcc_unreachable ();
2653
2654 /* If we don't have a decl for cexp create one. This is the
2655 friendliest fallback if the user calls __builtin_cexpi
2656 without full target C99 function support. */
2657 if (fn == NULL_TREE)
2658 {
2659 tree fntype;
2660 const char *name = NULL;
2661
2662 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2663 name = "cexpf";
2664 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2665 name = "cexp";
2666 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2667 name = "cexpl";
2668
2669 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2670 fn = build_fn_decl (name, fntype);
2671 }
2672
2673 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2674 build_real (type, dconst0), arg);
2675
2676 /* Make sure not to fold the cexp call again. */
2677 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2678 return expand_expr (build_call_nary (ctype, call, 1, narg),
2679 target, VOIDmode, EXPAND_NORMAL);
2680 }
2681
2682 /* Now build the proper return type. */
2683 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2684 make_tree (TREE_TYPE (arg), op2),
2685 make_tree (TREE_TYPE (arg), op1)),
2686 target, VOIDmode, EXPAND_NORMAL);
2687 }
2688
2689 /* Conveniently construct a function call expression. FNDECL names the
2690 function to be called, N is the number of arguments, and the "..."
2691 parameters are the argument expressions. Unlike build_call_exr
2692 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2693
2694 static tree
2695 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2696 {
2697 va_list ap;
2698 tree fntype = TREE_TYPE (fndecl);
2699 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2700
2701 va_start (ap, n);
2702 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2703 va_end (ap);
2704 SET_EXPR_LOCATION (fn, loc);
2705 return fn;
2706 }
2707
2708 /* Expand a call to one of the builtin rounding functions gcc defines
2709 as an extension (lfloor and lceil). As these are gcc extensions we
2710 do not need to worry about setting errno to EDOM.
2711 If expanding via optab fails, lower expression to (int)(floor(x)).
2712 EXP is the expression that is a call to the builtin function;
2713 if convenient, the result should be placed in TARGET. */
2714
2715 static rtx
2716 expand_builtin_int_roundingfn (tree exp, rtx target)
2717 {
2718 convert_optab builtin_optab;
2719 rtx op0, tmp;
2720 rtx_insn *insns;
2721 tree fndecl = get_callee_fndecl (exp);
2722 enum built_in_function fallback_fn;
2723 tree fallback_fndecl;
2724 machine_mode mode;
2725 tree arg;
2726
2727 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2728 gcc_unreachable ();
2729
2730 arg = CALL_EXPR_ARG (exp, 0);
2731
2732 switch (DECL_FUNCTION_CODE (fndecl))
2733 {
2734 CASE_FLT_FN (BUILT_IN_ICEIL):
2735 CASE_FLT_FN (BUILT_IN_LCEIL):
2736 CASE_FLT_FN (BUILT_IN_LLCEIL):
2737 builtin_optab = lceil_optab;
2738 fallback_fn = BUILT_IN_CEIL;
2739 break;
2740
2741 CASE_FLT_FN (BUILT_IN_IFLOOR):
2742 CASE_FLT_FN (BUILT_IN_LFLOOR):
2743 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2744 builtin_optab = lfloor_optab;
2745 fallback_fn = BUILT_IN_FLOOR;
2746 break;
2747
2748 default:
2749 gcc_unreachable ();
2750 }
2751
2752 /* Make a suitable register to place result in. */
2753 mode = TYPE_MODE (TREE_TYPE (exp));
2754
2755 target = gen_reg_rtx (mode);
2756
2757 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2758 need to expand the argument again. This way, we will not perform
2759 side-effects more the once. */
2760 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2761
2762 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2763
2764 start_sequence ();
2765
2766 /* Compute into TARGET. */
2767 if (expand_sfix_optab (target, op0, builtin_optab))
2768 {
2769 /* Output the entire sequence. */
2770 insns = get_insns ();
2771 end_sequence ();
2772 emit_insn (insns);
2773 return target;
2774 }
2775
2776 /* If we were unable to expand via the builtin, stop the sequence
2777 (without outputting the insns). */
2778 end_sequence ();
2779
2780 /* Fall back to floating point rounding optab. */
2781 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2782
2783 /* For non-C99 targets we may end up without a fallback fndecl here
2784 if the user called __builtin_lfloor directly. In this case emit
2785 a call to the floor/ceil variants nevertheless. This should result
2786 in the best user experience for not full C99 targets. */
2787 if (fallback_fndecl == NULL_TREE)
2788 {
2789 tree fntype;
2790 const char *name = NULL;
2791
2792 switch (DECL_FUNCTION_CODE (fndecl))
2793 {
2794 case BUILT_IN_ICEIL:
2795 case BUILT_IN_LCEIL:
2796 case BUILT_IN_LLCEIL:
2797 name = "ceil";
2798 break;
2799 case BUILT_IN_ICEILF:
2800 case BUILT_IN_LCEILF:
2801 case BUILT_IN_LLCEILF:
2802 name = "ceilf";
2803 break;
2804 case BUILT_IN_ICEILL:
2805 case BUILT_IN_LCEILL:
2806 case BUILT_IN_LLCEILL:
2807 name = "ceill";
2808 break;
2809 case BUILT_IN_IFLOOR:
2810 case BUILT_IN_LFLOOR:
2811 case BUILT_IN_LLFLOOR:
2812 name = "floor";
2813 break;
2814 case BUILT_IN_IFLOORF:
2815 case BUILT_IN_LFLOORF:
2816 case BUILT_IN_LLFLOORF:
2817 name = "floorf";
2818 break;
2819 case BUILT_IN_IFLOORL:
2820 case BUILT_IN_LFLOORL:
2821 case BUILT_IN_LLFLOORL:
2822 name = "floorl";
2823 break;
2824 default:
2825 gcc_unreachable ();
2826 }
2827
2828 fntype = build_function_type_list (TREE_TYPE (arg),
2829 TREE_TYPE (arg), NULL_TREE);
2830 fallback_fndecl = build_fn_decl (name, fntype);
2831 }
2832
2833 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2834
2835 tmp = expand_normal (exp);
2836 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2837
2838 /* Truncate the result of floating point optab to integer
2839 via expand_fix (). */
2840 target = gen_reg_rtx (mode);
2841 expand_fix (target, tmp, 0);
2842
2843 return target;
2844 }
2845
2846 /* Expand a call to one of the builtin math functions doing integer
2847 conversion (lrint).
2848 Return 0 if a normal call should be emitted rather than expanding the
2849 function in-line. EXP is the expression that is a call to the builtin
2850 function; if convenient, the result should be placed in TARGET. */
2851
2852 static rtx
2853 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2854 {
2855 convert_optab builtin_optab;
2856 rtx op0;
2857 rtx_insn *insns;
2858 tree fndecl = get_callee_fndecl (exp);
2859 tree arg;
2860 machine_mode mode;
2861 enum built_in_function fallback_fn = BUILT_IN_NONE;
2862
2863 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2864 gcc_unreachable ();
2865
2866 arg = CALL_EXPR_ARG (exp, 0);
2867
2868 switch (DECL_FUNCTION_CODE (fndecl))
2869 {
2870 CASE_FLT_FN (BUILT_IN_IRINT):
2871 fallback_fn = BUILT_IN_LRINT;
2872 /* FALLTHRU */
2873 CASE_FLT_FN (BUILT_IN_LRINT):
2874 CASE_FLT_FN (BUILT_IN_LLRINT):
2875 builtin_optab = lrint_optab;
2876 break;
2877
2878 CASE_FLT_FN (BUILT_IN_IROUND):
2879 fallback_fn = BUILT_IN_LROUND;
2880 /* FALLTHRU */
2881 CASE_FLT_FN (BUILT_IN_LROUND):
2882 CASE_FLT_FN (BUILT_IN_LLROUND):
2883 builtin_optab = lround_optab;
2884 break;
2885
2886 default:
2887 gcc_unreachable ();
2888 }
2889
2890 /* There's no easy way to detect the case we need to set EDOM. */
2891 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2892 return NULL_RTX;
2893
2894 /* Make a suitable register to place result in. */
2895 mode = TYPE_MODE (TREE_TYPE (exp));
2896
2897 /* There's no easy way to detect the case we need to set EDOM. */
2898 if (!flag_errno_math)
2899 {
2900 rtx result = gen_reg_rtx (mode);
2901
2902 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2903 need to expand the argument again. This way, we will not perform
2904 side-effects more the once. */
2905 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2906
2907 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2908
2909 start_sequence ();
2910
2911 if (expand_sfix_optab (result, op0, builtin_optab))
2912 {
2913 /* Output the entire sequence. */
2914 insns = get_insns ();
2915 end_sequence ();
2916 emit_insn (insns);
2917 return result;
2918 }
2919
2920 /* If we were unable to expand via the builtin, stop the sequence
2921 (without outputting the insns) and call to the library function
2922 with the stabilized argument list. */
2923 end_sequence ();
2924 }
2925
2926 if (fallback_fn != BUILT_IN_NONE)
2927 {
2928 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2929 targets, (int) round (x) should never be transformed into
2930 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2931 a call to lround in the hope that the target provides at least some
2932 C99 functions. This should result in the best user experience for
2933 not full C99 targets. */
2934 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2935 fallback_fn, 0);
2936
2937 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2938 fallback_fndecl, 1, arg);
2939
2940 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2941 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2942 return convert_to_mode (mode, target, 0);
2943 }
2944
2945 return expand_call (exp, target, target == const0_rtx);
2946 }
2947
2948 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2949 a normal call should be emitted rather than expanding the function
2950 in-line. EXP is the expression that is a call to the builtin
2951 function; if convenient, the result should be placed in TARGET. */
2952
2953 static rtx
2954 expand_builtin_powi (tree exp, rtx target)
2955 {
2956 tree arg0, arg1;
2957 rtx op0, op1;
2958 machine_mode mode;
2959 machine_mode mode2;
2960
2961 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2962 return NULL_RTX;
2963
2964 arg0 = CALL_EXPR_ARG (exp, 0);
2965 arg1 = CALL_EXPR_ARG (exp, 1);
2966 mode = TYPE_MODE (TREE_TYPE (exp));
2967
2968 /* Emit a libcall to libgcc. */
2969
2970 /* Mode of the 2nd argument must match that of an int. */
2971 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2972
2973 if (target == NULL_RTX)
2974 target = gen_reg_rtx (mode);
2975
2976 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2977 if (GET_MODE (op0) != mode)
2978 op0 = convert_to_mode (mode, op0, 0);
2979 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2980 if (GET_MODE (op1) != mode2)
2981 op1 = convert_to_mode (mode2, op1, 0);
2982
2983 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2984 target, LCT_CONST, mode, 2,
2985 op0, mode, op1, mode2);
2986
2987 return target;
2988 }
2989
2990 /* Expand expression EXP which is a call to the strlen builtin. Return
2991 NULL_RTX if we failed the caller should emit a normal call, otherwise
2992 try to get the result in TARGET, if convenient. */
2993
2994 static rtx
2995 expand_builtin_strlen (tree exp, rtx target,
2996 machine_mode target_mode)
2997 {
2998 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2999 return NULL_RTX;
3000 else
3001 {
3002 struct expand_operand ops[4];
3003 rtx pat;
3004 tree len;
3005 tree src = CALL_EXPR_ARG (exp, 0);
3006 rtx src_reg;
3007 rtx_insn *before_strlen;
3008 machine_mode insn_mode = target_mode;
3009 enum insn_code icode = CODE_FOR_nothing;
3010 unsigned int align;
3011
3012 /* If the length can be computed at compile-time, return it. */
3013 len = c_strlen (src, 0);
3014 if (len)
3015 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3016
3017 /* If the length can be computed at compile-time and is constant
3018 integer, but there are side-effects in src, evaluate
3019 src for side-effects, then return len.
3020 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3021 can be optimized into: i++; x = 3; */
3022 len = c_strlen (src, 1);
3023 if (len && TREE_CODE (len) == INTEGER_CST)
3024 {
3025 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3026 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3027 }
3028
3029 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3030
3031 /* If SRC is not a pointer type, don't do this operation inline. */
3032 if (align == 0)
3033 return NULL_RTX;
3034
3035 /* Bail out if we can't compute strlen in the right mode. */
3036 while (insn_mode != VOIDmode)
3037 {
3038 icode = optab_handler (strlen_optab, insn_mode);
3039 if (icode != CODE_FOR_nothing)
3040 break;
3041
3042 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3043 }
3044 if (insn_mode == VOIDmode)
3045 return NULL_RTX;
3046
3047 /* Make a place to hold the source address. We will not expand
3048 the actual source until we are sure that the expansion will
3049 not fail -- there are trees that cannot be expanded twice. */
3050 src_reg = gen_reg_rtx (Pmode);
3051
3052 /* Mark the beginning of the strlen sequence so we can emit the
3053 source operand later. */
3054 before_strlen = get_last_insn ();
3055
3056 create_output_operand (&ops[0], target, insn_mode);
3057 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3058 create_integer_operand (&ops[2], 0);
3059 create_integer_operand (&ops[3], align);
3060 if (!maybe_expand_insn (icode, 4, ops))
3061 return NULL_RTX;
3062
3063 /* Now that we are assured of success, expand the source. */
3064 start_sequence ();
3065 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3066 if (pat != src_reg)
3067 {
3068 #ifdef POINTERS_EXTEND_UNSIGNED
3069 if (GET_MODE (pat) != Pmode)
3070 pat = convert_to_mode (Pmode, pat,
3071 POINTERS_EXTEND_UNSIGNED);
3072 #endif
3073 emit_move_insn (src_reg, pat);
3074 }
3075 pat = get_insns ();
3076 end_sequence ();
3077
3078 if (before_strlen)
3079 emit_insn_after (pat, before_strlen);
3080 else
3081 emit_insn_before (pat, get_insns ());
3082
3083 /* Return the value in the proper mode for this function. */
3084 if (GET_MODE (ops[0].value) == target_mode)
3085 target = ops[0].value;
3086 else if (target != 0)
3087 convert_move (target, ops[0].value, 0);
3088 else
3089 target = convert_to_mode (target_mode, ops[0].value, 0);
3090
3091 return target;
3092 }
3093 }
3094
3095 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3096 bytes from constant string DATA + OFFSET and return it as target
3097 constant. */
3098
3099 static rtx
3100 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3101 machine_mode mode)
3102 {
3103 const char *str = (const char *) data;
3104
3105 gcc_assert (offset >= 0
3106 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3107 <= strlen (str) + 1));
3108
3109 return c_readstr (str + offset, mode);
3110 }
3111
3112 /* LEN specify length of the block of memcpy/memset operation.
3113 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3114 In some cases we can make very likely guess on max size, then we
3115 set it into PROBABLE_MAX_SIZE. */
3116
3117 static void
3118 determine_block_size (tree len, rtx len_rtx,
3119 unsigned HOST_WIDE_INT *min_size,
3120 unsigned HOST_WIDE_INT *max_size,
3121 unsigned HOST_WIDE_INT *probable_max_size)
3122 {
3123 if (CONST_INT_P (len_rtx))
3124 {
3125 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3126 return;
3127 }
3128 else
3129 {
3130 wide_int min, max;
3131 enum value_range_type range_type = VR_UNDEFINED;
3132
3133 /* Determine bounds from the type. */
3134 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3135 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3136 else
3137 *min_size = 0;
3138 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3139 *probable_max_size = *max_size
3140 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3141 else
3142 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3143
3144 if (TREE_CODE (len) == SSA_NAME)
3145 range_type = get_range_info (len, &min, &max);
3146 if (range_type == VR_RANGE)
3147 {
3148 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3149 *min_size = min.to_uhwi ();
3150 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3151 *probable_max_size = *max_size = max.to_uhwi ();
3152 }
3153 else if (range_type == VR_ANTI_RANGE)
3154 {
3155 /* Anti range 0...N lets us to determine minimal size to N+1. */
3156 if (min == 0)
3157 {
3158 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3159 *min_size = max.to_uhwi () + 1;
3160 }
3161 /* Code like
3162
3163 int n;
3164 if (n < 100)
3165 memcpy (a, b, n)
3166
3167 Produce anti range allowing negative values of N. We still
3168 can use the information and make a guess that N is not negative.
3169 */
3170 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3171 *probable_max_size = min.to_uhwi () - 1;
3172 }
3173 }
3174 gcc_checking_assert (*max_size <=
3175 (unsigned HOST_WIDE_INT)
3176 GET_MODE_MASK (GET_MODE (len_rtx)));
3177 }
3178
3179 /* Helper function to do the actual work for expand_builtin_memcpy. */
3180
3181 static rtx
3182 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3183 {
3184 const char *src_str;
3185 unsigned int src_align = get_pointer_alignment (src);
3186 unsigned int dest_align = get_pointer_alignment (dest);
3187 rtx dest_mem, src_mem, dest_addr, len_rtx;
3188 HOST_WIDE_INT expected_size = -1;
3189 unsigned int expected_align = 0;
3190 unsigned HOST_WIDE_INT min_size;
3191 unsigned HOST_WIDE_INT max_size;
3192 unsigned HOST_WIDE_INT probable_max_size;
3193
3194 /* If DEST is not a pointer type, call the normal function. */
3195 if (dest_align == 0)
3196 return NULL_RTX;
3197
3198 /* If either SRC is not a pointer type, don't do this
3199 operation in-line. */
3200 if (src_align == 0)
3201 return NULL_RTX;
3202
3203 if (currently_expanding_gimple_stmt)
3204 stringop_block_profile (currently_expanding_gimple_stmt,
3205 &expected_align, &expected_size);
3206
3207 if (expected_align < dest_align)
3208 expected_align = dest_align;
3209 dest_mem = get_memory_rtx (dest, len);
3210 set_mem_align (dest_mem, dest_align);
3211 len_rtx = expand_normal (len);
3212 determine_block_size (len, len_rtx, &min_size, &max_size,
3213 &probable_max_size);
3214 src_str = c_getstr (src);
3215
3216 /* If SRC is a string constant and block move would be done
3217 by pieces, we can avoid loading the string from memory
3218 and only stored the computed constants. */
3219 if (src_str
3220 && CONST_INT_P (len_rtx)
3221 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3222 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3223 CONST_CAST (char *, src_str),
3224 dest_align, false))
3225 {
3226 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3227 builtin_memcpy_read_str,
3228 CONST_CAST (char *, src_str),
3229 dest_align, false, 0);
3230 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3231 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3232 return dest_mem;
3233 }
3234
3235 src_mem = get_memory_rtx (src, len);
3236 set_mem_align (src_mem, src_align);
3237
3238 /* Copy word part most expediently. */
3239 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3240 CALL_EXPR_TAILCALL (exp)
3241 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3242 expected_align, expected_size,
3243 min_size, max_size, probable_max_size);
3244
3245 if (dest_addr == 0)
3246 {
3247 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3248 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3249 }
3250
3251 return dest_addr;
3252 }
3253
3254 /* Expand a call EXP to the memcpy builtin.
3255 Return NULL_RTX if we failed, the caller should emit a normal call,
3256 otherwise try to get the result in TARGET, if convenient (and in
3257 mode MODE if that's convenient). */
3258
3259 static rtx
3260 expand_builtin_memcpy (tree exp, rtx target)
3261 {
3262 if (!validate_arglist (exp,
3263 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3264 return NULL_RTX;
3265 else
3266 {
3267 tree dest = CALL_EXPR_ARG (exp, 0);
3268 tree src = CALL_EXPR_ARG (exp, 1);
3269 tree len = CALL_EXPR_ARG (exp, 2);
3270 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3271 }
3272 }
3273
3274 /* Expand an instrumented call EXP to the memcpy builtin.
3275 Return NULL_RTX if we failed, the caller should emit a normal call,
3276 otherwise try to get the result in TARGET, if convenient (and in
3277 mode MODE if that's convenient). */
3278
3279 static rtx
3280 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3281 {
3282 if (!validate_arglist (exp,
3283 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3284 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3285 INTEGER_TYPE, VOID_TYPE))
3286 return NULL_RTX;
3287 else
3288 {
3289 tree dest = CALL_EXPR_ARG (exp, 0);
3290 tree src = CALL_EXPR_ARG (exp, 2);
3291 tree len = CALL_EXPR_ARG (exp, 4);
3292 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3293
3294 /* Return src bounds with the result. */
3295 if (res)
3296 {
3297 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3298 expand_normal (CALL_EXPR_ARG (exp, 1)));
3299 res = chkp_join_splitted_slot (res, bnd);
3300 }
3301 return res;
3302 }
3303 }
3304
3305 /* Expand a call EXP to the mempcpy builtin.
3306 Return NULL_RTX if we failed; the caller should emit a normal call,
3307 otherwise try to get the result in TARGET, if convenient (and in
3308 mode MODE if that's convenient). If ENDP is 0 return the
3309 destination pointer, if ENDP is 1 return the end pointer ala
3310 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3311 stpcpy. */
3312
3313 static rtx
3314 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3315 {
3316 if (!validate_arglist (exp,
3317 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3318 return NULL_RTX;
3319 else
3320 {
3321 tree dest = CALL_EXPR_ARG (exp, 0);
3322 tree src = CALL_EXPR_ARG (exp, 1);
3323 tree len = CALL_EXPR_ARG (exp, 2);
3324 return expand_builtin_mempcpy_args (dest, src, len,
3325 target, mode, /*endp=*/ 1,
3326 exp);
3327 }
3328 }
3329
3330 /* Expand an instrumented call EXP to the mempcpy builtin.
3331 Return NULL_RTX if we failed, the caller should emit a normal call,
3332 otherwise try to get the result in TARGET, if convenient (and in
3333 mode MODE if that's convenient). */
3334
3335 static rtx
3336 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3337 {
3338 if (!validate_arglist (exp,
3339 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3340 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3341 INTEGER_TYPE, VOID_TYPE))
3342 return NULL_RTX;
3343 else
3344 {
3345 tree dest = CALL_EXPR_ARG (exp, 0);
3346 tree src = CALL_EXPR_ARG (exp, 2);
3347 tree len = CALL_EXPR_ARG (exp, 4);
3348 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3349 mode, 1, exp);
3350
3351 /* Return src bounds with the result. */
3352 if (res)
3353 {
3354 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3355 expand_normal (CALL_EXPR_ARG (exp, 1)));
3356 res = chkp_join_splitted_slot (res, bnd);
3357 }
3358 return res;
3359 }
3360 }
3361
3362 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3363 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3364 so that this can also be called without constructing an actual CALL_EXPR.
3365 The other arguments and return value are the same as for
3366 expand_builtin_mempcpy. */
3367
3368 static rtx
3369 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3370 rtx target, machine_mode mode, int endp,
3371 tree orig_exp)
3372 {
3373 tree fndecl = get_callee_fndecl (orig_exp);
3374
3375 /* If return value is ignored, transform mempcpy into memcpy. */
3376 if (target == const0_rtx
3377 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3378 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3379 {
3380 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3381 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3382 dest, src, len);
3383 return expand_expr (result, target, mode, EXPAND_NORMAL);
3384 }
3385 else if (target == const0_rtx
3386 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3387 {
3388 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3389 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3390 dest, src, len);
3391 return expand_expr (result, target, mode, EXPAND_NORMAL);
3392 }
3393 else
3394 {
3395 const char *src_str;
3396 unsigned int src_align = get_pointer_alignment (src);
3397 unsigned int dest_align = get_pointer_alignment (dest);
3398 rtx dest_mem, src_mem, len_rtx;
3399
3400 /* If either SRC or DEST is not a pointer type, don't do this
3401 operation in-line. */
3402 if (dest_align == 0 || src_align == 0)
3403 return NULL_RTX;
3404
3405 /* If LEN is not constant, call the normal function. */
3406 if (! tree_fits_uhwi_p (len))
3407 return NULL_RTX;
3408
3409 len_rtx = expand_normal (len);
3410 src_str = c_getstr (src);
3411
3412 /* If SRC is a string constant and block move would be done
3413 by pieces, we can avoid loading the string from memory
3414 and only stored the computed constants. */
3415 if (src_str
3416 && CONST_INT_P (len_rtx)
3417 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3418 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3419 CONST_CAST (char *, src_str),
3420 dest_align, false))
3421 {
3422 dest_mem = get_memory_rtx (dest, len);
3423 set_mem_align (dest_mem, dest_align);
3424 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3425 builtin_memcpy_read_str,
3426 CONST_CAST (char *, src_str),
3427 dest_align, false, endp);
3428 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3429 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3430 return dest_mem;
3431 }
3432
3433 if (CONST_INT_P (len_rtx)
3434 && can_move_by_pieces (INTVAL (len_rtx),
3435 MIN (dest_align, src_align)))
3436 {
3437 dest_mem = get_memory_rtx (dest, len);
3438 set_mem_align (dest_mem, dest_align);
3439 src_mem = get_memory_rtx (src, len);
3440 set_mem_align (src_mem, src_align);
3441 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3442 MIN (dest_align, src_align), endp);
3443 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3444 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3445 return dest_mem;
3446 }
3447
3448 return NULL_RTX;
3449 }
3450 }
3451
3452 #ifndef HAVE_movstr
3453 # define HAVE_movstr 0
3454 # define CODE_FOR_movstr CODE_FOR_nothing
3455 #endif
3456
3457 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3458 we failed, the caller should emit a normal call, otherwise try to
3459 get the result in TARGET, if convenient. If ENDP is 0 return the
3460 destination pointer, if ENDP is 1 return the end pointer ala
3461 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3462 stpcpy. */
3463
3464 static rtx
3465 expand_movstr (tree dest, tree src, rtx target, int endp)
3466 {
3467 struct expand_operand ops[3];
3468 rtx dest_mem;
3469 rtx src_mem;
3470
3471 if (!HAVE_movstr)
3472 return NULL_RTX;
3473
3474 dest_mem = get_memory_rtx (dest, NULL);
3475 src_mem = get_memory_rtx (src, NULL);
3476 if (!endp)
3477 {
3478 target = force_reg (Pmode, XEXP (dest_mem, 0));
3479 dest_mem = replace_equiv_address (dest_mem, target);
3480 }
3481
3482 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3483 create_fixed_operand (&ops[1], dest_mem);
3484 create_fixed_operand (&ops[2], src_mem);
3485 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3486 return NULL_RTX;
3487
3488 if (endp && target != const0_rtx)
3489 {
3490 target = ops[0].value;
3491 /* movstr is supposed to set end to the address of the NUL
3492 terminator. If the caller requested a mempcpy-like return value,
3493 adjust it. */
3494 if (endp == 1)
3495 {
3496 rtx tem = plus_constant (GET_MODE (target),
3497 gen_lowpart (GET_MODE (target), target), 1);
3498 emit_move_insn (target, force_operand (tem, NULL_RTX));
3499 }
3500 }
3501 return target;
3502 }
3503
3504 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3505 NULL_RTX if we failed the caller should emit a normal call, otherwise
3506 try to get the result in TARGET, if convenient (and in mode MODE if that's
3507 convenient). */
3508
3509 static rtx
3510 expand_builtin_strcpy (tree exp, rtx target)
3511 {
3512 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3513 {
3514 tree dest = CALL_EXPR_ARG (exp, 0);
3515 tree src = CALL_EXPR_ARG (exp, 1);
3516 return expand_builtin_strcpy_args (dest, src, target);
3517 }
3518 return NULL_RTX;
3519 }
3520
3521 /* Helper function to do the actual work for expand_builtin_strcpy. The
3522 arguments to the builtin_strcpy call DEST and SRC are broken out
3523 so that this can also be called without constructing an actual CALL_EXPR.
3524 The other arguments and return value are the same as for
3525 expand_builtin_strcpy. */
3526
3527 static rtx
3528 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3529 {
3530 return expand_movstr (dest, src, target, /*endp=*/0);
3531 }
3532
3533 /* Expand a call EXP to the stpcpy builtin.
3534 Return NULL_RTX if we failed the caller should emit a normal call,
3535 otherwise try to get the result in TARGET, if convenient (and in
3536 mode MODE if that's convenient). */
3537
3538 static rtx
3539 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3540 {
3541 tree dst, src;
3542 location_t loc = EXPR_LOCATION (exp);
3543
3544 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3545 return NULL_RTX;
3546
3547 dst = CALL_EXPR_ARG (exp, 0);
3548 src = CALL_EXPR_ARG (exp, 1);
3549
3550 /* If return value is ignored, transform stpcpy into strcpy. */
3551 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3552 {
3553 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3554 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3555 return expand_expr (result, target, mode, EXPAND_NORMAL);
3556 }
3557 else
3558 {
3559 tree len, lenp1;
3560 rtx ret;
3561
3562 /* Ensure we get an actual string whose length can be evaluated at
3563 compile-time, not an expression containing a string. This is
3564 because the latter will potentially produce pessimized code
3565 when used to produce the return value. */
3566 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3567 return expand_movstr (dst, src, target, /*endp=*/2);
3568
3569 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3570 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3571 target, mode, /*endp=*/2,
3572 exp);
3573
3574 if (ret)
3575 return ret;
3576
3577 if (TREE_CODE (len) == INTEGER_CST)
3578 {
3579 rtx len_rtx = expand_normal (len);
3580
3581 if (CONST_INT_P (len_rtx))
3582 {
3583 ret = expand_builtin_strcpy_args (dst, src, target);
3584
3585 if (ret)
3586 {
3587 if (! target)
3588 {
3589 if (mode != VOIDmode)
3590 target = gen_reg_rtx (mode);
3591 else
3592 target = gen_reg_rtx (GET_MODE (ret));
3593 }
3594 if (GET_MODE (target) != GET_MODE (ret))
3595 ret = gen_lowpart (GET_MODE (target), ret);
3596
3597 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3598 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3599 gcc_assert (ret);
3600
3601 return target;
3602 }
3603 }
3604 }
3605
3606 return expand_movstr (dst, src, target, /*endp=*/2);
3607 }
3608 }
3609
3610 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3611 bytes from constant string DATA + OFFSET and return it as target
3612 constant. */
3613
3614 rtx
3615 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3616 machine_mode mode)
3617 {
3618 const char *str = (const char *) data;
3619
3620 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3621 return const0_rtx;
3622
3623 return c_readstr (str + offset, mode);
3624 }
3625
3626 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3627 NULL_RTX if we failed the caller should emit a normal call. */
3628
3629 static rtx
3630 expand_builtin_strncpy (tree exp, rtx target)
3631 {
3632 location_t loc = EXPR_LOCATION (exp);
3633
3634 if (validate_arglist (exp,
3635 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3636 {
3637 tree dest = CALL_EXPR_ARG (exp, 0);
3638 tree src = CALL_EXPR_ARG (exp, 1);
3639 tree len = CALL_EXPR_ARG (exp, 2);
3640 tree slen = c_strlen (src, 1);
3641
3642 /* We must be passed a constant len and src parameter. */
3643 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3644 return NULL_RTX;
3645
3646 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3647
3648 /* We're required to pad with trailing zeros if the requested
3649 len is greater than strlen(s2)+1. In that case try to
3650 use store_by_pieces, if it fails, punt. */
3651 if (tree_int_cst_lt (slen, len))
3652 {
3653 unsigned int dest_align = get_pointer_alignment (dest);
3654 const char *p = c_getstr (src);
3655 rtx dest_mem;
3656
3657 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3658 || !can_store_by_pieces (tree_to_uhwi (len),
3659 builtin_strncpy_read_str,
3660 CONST_CAST (char *, p),
3661 dest_align, false))
3662 return NULL_RTX;
3663
3664 dest_mem = get_memory_rtx (dest, len);
3665 store_by_pieces (dest_mem, tree_to_uhwi (len),
3666 builtin_strncpy_read_str,
3667 CONST_CAST (char *, p), dest_align, false, 0);
3668 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3669 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3670 return dest_mem;
3671 }
3672 }
3673 return NULL_RTX;
3674 }
3675
3676 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3677 bytes from constant string DATA + OFFSET and return it as target
3678 constant. */
3679
3680 rtx
3681 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3682 machine_mode mode)
3683 {
3684 const char *c = (const char *) data;
3685 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3686
3687 memset (p, *c, GET_MODE_SIZE (mode));
3688
3689 return c_readstr (p, mode);
3690 }
3691
3692 /* Callback routine for store_by_pieces. Return the RTL of a register
3693 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3694 char value given in the RTL register data. For example, if mode is
3695 4 bytes wide, return the RTL for 0x01010101*data. */
3696
3697 static rtx
3698 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3699 machine_mode mode)
3700 {
3701 rtx target, coeff;
3702 size_t size;
3703 char *p;
3704
3705 size = GET_MODE_SIZE (mode);
3706 if (size == 1)
3707 return (rtx) data;
3708
3709 p = XALLOCAVEC (char, size);
3710 memset (p, 1, size);
3711 coeff = c_readstr (p, mode);
3712
3713 target = convert_to_mode (mode, (rtx) data, 1);
3714 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3715 return force_reg (mode, target);
3716 }
3717
3718 /* Expand expression EXP, which is a call to the memset builtin. Return
3719 NULL_RTX if we failed the caller should emit a normal call, otherwise
3720 try to get the result in TARGET, if convenient (and in mode MODE if that's
3721 convenient). */
3722
3723 static rtx
3724 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3725 {
3726 if (!validate_arglist (exp,
3727 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3728 return NULL_RTX;
3729 else
3730 {
3731 tree dest = CALL_EXPR_ARG (exp, 0);
3732 tree val = CALL_EXPR_ARG (exp, 1);
3733 tree len = CALL_EXPR_ARG (exp, 2);
3734 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3735 }
3736 }
3737
3738 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3739 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3740 try to get the result in TARGET, if convenient (and in mode MODE if that's
3741 convenient). */
3742
3743 static rtx
3744 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3745 {
3746 if (!validate_arglist (exp,
3747 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3748 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3749 return NULL_RTX;
3750 else
3751 {
3752 tree dest = CALL_EXPR_ARG (exp, 0);
3753 tree val = CALL_EXPR_ARG (exp, 2);
3754 tree len = CALL_EXPR_ARG (exp, 3);
3755 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3756
3757 /* Return src bounds with the result. */
3758 if (res)
3759 {
3760 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3761 expand_normal (CALL_EXPR_ARG (exp, 1)));
3762 res = chkp_join_splitted_slot (res, bnd);
3763 }
3764 return res;
3765 }
3766 }
3767
3768 /* Helper function to do the actual work for expand_builtin_memset. The
3769 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3770 so that this can also be called without constructing an actual CALL_EXPR.
3771 The other arguments and return value are the same as for
3772 expand_builtin_memset. */
3773
3774 static rtx
3775 expand_builtin_memset_args (tree dest, tree val, tree len,
3776 rtx target, machine_mode mode, tree orig_exp)
3777 {
3778 tree fndecl, fn;
3779 enum built_in_function fcode;
3780 machine_mode val_mode;
3781 char c;
3782 unsigned int dest_align;
3783 rtx dest_mem, dest_addr, len_rtx;
3784 HOST_WIDE_INT expected_size = -1;
3785 unsigned int expected_align = 0;
3786 unsigned HOST_WIDE_INT min_size;
3787 unsigned HOST_WIDE_INT max_size;
3788 unsigned HOST_WIDE_INT probable_max_size;
3789
3790 dest_align = get_pointer_alignment (dest);
3791
3792 /* If DEST is not a pointer type, don't do this operation in-line. */
3793 if (dest_align == 0)
3794 return NULL_RTX;
3795
3796 if (currently_expanding_gimple_stmt)
3797 stringop_block_profile (currently_expanding_gimple_stmt,
3798 &expected_align, &expected_size);
3799
3800 if (expected_align < dest_align)
3801 expected_align = dest_align;
3802
3803 /* If the LEN parameter is zero, return DEST. */
3804 if (integer_zerop (len))
3805 {
3806 /* Evaluate and ignore VAL in case it has side-effects. */
3807 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3808 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3809 }
3810
3811 /* Stabilize the arguments in case we fail. */
3812 dest = builtin_save_expr (dest);
3813 val = builtin_save_expr (val);
3814 len = builtin_save_expr (len);
3815
3816 len_rtx = expand_normal (len);
3817 determine_block_size (len, len_rtx, &min_size, &max_size,
3818 &probable_max_size);
3819 dest_mem = get_memory_rtx (dest, len);
3820 val_mode = TYPE_MODE (unsigned_char_type_node);
3821
3822 if (TREE_CODE (val) != INTEGER_CST)
3823 {
3824 rtx val_rtx;
3825
3826 val_rtx = expand_normal (val);
3827 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3828
3829 /* Assume that we can memset by pieces if we can store
3830 * the coefficients by pieces (in the required modes).
3831 * We can't pass builtin_memset_gen_str as that emits RTL. */
3832 c = 1;
3833 if (tree_fits_uhwi_p (len)
3834 && can_store_by_pieces (tree_to_uhwi (len),
3835 builtin_memset_read_str, &c, dest_align,
3836 true))
3837 {
3838 val_rtx = force_reg (val_mode, val_rtx);
3839 store_by_pieces (dest_mem, tree_to_uhwi (len),
3840 builtin_memset_gen_str, val_rtx, dest_align,
3841 true, 0);
3842 }
3843 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3844 dest_align, expected_align,
3845 expected_size, min_size, max_size,
3846 probable_max_size))
3847 goto do_libcall;
3848
3849 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3850 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3851 return dest_mem;
3852 }
3853
3854 if (target_char_cast (val, &c))
3855 goto do_libcall;
3856
3857 if (c)
3858 {
3859 if (tree_fits_uhwi_p (len)
3860 && can_store_by_pieces (tree_to_uhwi (len),
3861 builtin_memset_read_str, &c, dest_align,
3862 true))
3863 store_by_pieces (dest_mem, tree_to_uhwi (len),
3864 builtin_memset_read_str, &c, dest_align, true, 0);
3865 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3866 gen_int_mode (c, val_mode),
3867 dest_align, expected_align,
3868 expected_size, min_size, max_size,
3869 probable_max_size))
3870 goto do_libcall;
3871
3872 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3873 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3874 return dest_mem;
3875 }
3876
3877 set_mem_align (dest_mem, dest_align);
3878 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3879 CALL_EXPR_TAILCALL (orig_exp)
3880 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3881 expected_align, expected_size,
3882 min_size, max_size,
3883 probable_max_size);
3884
3885 if (dest_addr == 0)
3886 {
3887 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3888 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3889 }
3890
3891 return dest_addr;
3892
3893 do_libcall:
3894 fndecl = get_callee_fndecl (orig_exp);
3895 fcode = DECL_FUNCTION_CODE (fndecl);
3896 if (fcode == BUILT_IN_MEMSET
3897 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3898 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3899 dest, val, len);
3900 else if (fcode == BUILT_IN_BZERO)
3901 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3902 dest, len);
3903 else
3904 gcc_unreachable ();
3905 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3906 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3907 return expand_call (fn, target, target == const0_rtx);
3908 }
3909
3910 /* Expand expression EXP, which is a call to the bzero builtin. Return
3911 NULL_RTX if we failed the caller should emit a normal call. */
3912
3913 static rtx
3914 expand_builtin_bzero (tree exp)
3915 {
3916 tree dest, size;
3917 location_t loc = EXPR_LOCATION (exp);
3918
3919 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3920 return NULL_RTX;
3921
3922 dest = CALL_EXPR_ARG (exp, 0);
3923 size = CALL_EXPR_ARG (exp, 1);
3924
3925 /* New argument list transforming bzero(ptr x, int y) to
3926 memset(ptr x, int 0, size_t y). This is done this way
3927 so that if it isn't expanded inline, we fallback to
3928 calling bzero instead of memset. */
3929
3930 return expand_builtin_memset_args (dest, integer_zero_node,
3931 fold_convert_loc (loc,
3932 size_type_node, size),
3933 const0_rtx, VOIDmode, exp);
3934 }
3935
3936 /* Expand expression EXP, which is a call to the memcmp built-in function.
3937 Return NULL_RTX if we failed and the caller should emit a normal call,
3938 otherwise try to get the result in TARGET, if convenient (and in mode
3939 MODE, if that's convenient). */
3940
3941 static rtx
3942 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3943 ATTRIBUTE_UNUSED machine_mode mode)
3944 {
3945 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3946
3947 if (!validate_arglist (exp,
3948 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3949 return NULL_RTX;
3950
3951 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3952 implementing memcmp because it will stop if it encounters two
3953 zero bytes. */
3954 #if defined HAVE_cmpmemsi
3955 {
3956 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3957 rtx result;
3958 rtx insn;
3959 tree arg1 = CALL_EXPR_ARG (exp, 0);
3960 tree arg2 = CALL_EXPR_ARG (exp, 1);
3961 tree len = CALL_EXPR_ARG (exp, 2);
3962
3963 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3964 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3965 machine_mode insn_mode;
3966
3967 if (HAVE_cmpmemsi)
3968 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3969 else
3970 return NULL_RTX;
3971
3972 /* If we don't have POINTER_TYPE, call the function. */
3973 if (arg1_align == 0 || arg2_align == 0)
3974 return NULL_RTX;
3975
3976 /* Make a place to write the result of the instruction. */
3977 result = target;
3978 if (! (result != 0
3979 && REG_P (result) && GET_MODE (result) == insn_mode
3980 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3981 result = gen_reg_rtx (insn_mode);
3982
3983 arg1_rtx = get_memory_rtx (arg1, len);
3984 arg2_rtx = get_memory_rtx (arg2, len);
3985 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3986
3987 /* Set MEM_SIZE as appropriate. */
3988 if (CONST_INT_P (arg3_rtx))
3989 {
3990 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3991 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3992 }
3993
3994 if (HAVE_cmpmemsi)
3995 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3996 GEN_INT (MIN (arg1_align, arg2_align)));
3997 else
3998 gcc_unreachable ();
3999
4000 if (insn)
4001 emit_insn (insn);
4002 else
4003 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4004 TYPE_MODE (integer_type_node), 3,
4005 XEXP (arg1_rtx, 0), Pmode,
4006 XEXP (arg2_rtx, 0), Pmode,
4007 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4008 TYPE_UNSIGNED (sizetype)),
4009 TYPE_MODE (sizetype));
4010
4011 /* Return the value in the proper mode for this function. */
4012 mode = TYPE_MODE (TREE_TYPE (exp));
4013 if (GET_MODE (result) == mode)
4014 return result;
4015 else if (target != 0)
4016 {
4017 convert_move (target, result, 0);
4018 return target;
4019 }
4020 else
4021 return convert_to_mode (mode, result, 0);
4022 }
4023 #endif /* HAVE_cmpmemsi. */
4024
4025 return NULL_RTX;
4026 }
4027
4028 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4029 if we failed the caller should emit a normal call, otherwise try to get
4030 the result in TARGET, if convenient. */
4031
4032 static rtx
4033 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4034 {
4035 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4036 return NULL_RTX;
4037
4038 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4039 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4040 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4041 {
4042 rtx arg1_rtx, arg2_rtx;
4043 rtx result, insn = NULL_RTX;
4044 tree fndecl, fn;
4045 tree arg1 = CALL_EXPR_ARG (exp, 0);
4046 tree arg2 = CALL_EXPR_ARG (exp, 1);
4047
4048 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4049 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4050
4051 /* If we don't have POINTER_TYPE, call the function. */
4052 if (arg1_align == 0 || arg2_align == 0)
4053 return NULL_RTX;
4054
4055 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4056 arg1 = builtin_save_expr (arg1);
4057 arg2 = builtin_save_expr (arg2);
4058
4059 arg1_rtx = get_memory_rtx (arg1, NULL);
4060 arg2_rtx = get_memory_rtx (arg2, NULL);
4061
4062 #ifdef HAVE_cmpstrsi
4063 /* Try to call cmpstrsi. */
4064 if (HAVE_cmpstrsi)
4065 {
4066 machine_mode insn_mode
4067 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4068
4069 /* Make a place to write the result of the instruction. */
4070 result = target;
4071 if (! (result != 0
4072 && REG_P (result) && GET_MODE (result) == insn_mode
4073 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4074 result = gen_reg_rtx (insn_mode);
4075
4076 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4077 GEN_INT (MIN (arg1_align, arg2_align)));
4078 }
4079 #endif
4080 #ifdef HAVE_cmpstrnsi
4081 /* Try to determine at least one length and call cmpstrnsi. */
4082 if (!insn && HAVE_cmpstrnsi)
4083 {
4084 tree len;
4085 rtx arg3_rtx;
4086
4087 machine_mode insn_mode
4088 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4089 tree len1 = c_strlen (arg1, 1);
4090 tree len2 = c_strlen (arg2, 1);
4091
4092 if (len1)
4093 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4094 if (len2)
4095 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4096
4097 /* If we don't have a constant length for the first, use the length
4098 of the second, if we know it. We don't require a constant for
4099 this case; some cost analysis could be done if both are available
4100 but neither is constant. For now, assume they're equally cheap,
4101 unless one has side effects. If both strings have constant lengths,
4102 use the smaller. */
4103
4104 if (!len1)
4105 len = len2;
4106 else if (!len2)
4107 len = len1;
4108 else if (TREE_SIDE_EFFECTS (len1))
4109 len = len2;
4110 else if (TREE_SIDE_EFFECTS (len2))
4111 len = len1;
4112 else if (TREE_CODE (len1) != INTEGER_CST)
4113 len = len2;
4114 else if (TREE_CODE (len2) != INTEGER_CST)
4115 len = len1;
4116 else if (tree_int_cst_lt (len1, len2))
4117 len = len1;
4118 else
4119 len = len2;
4120
4121 /* If both arguments have side effects, we cannot optimize. */
4122 if (!len || TREE_SIDE_EFFECTS (len))
4123 goto do_libcall;
4124
4125 arg3_rtx = expand_normal (len);
4126
4127 /* Make a place to write the result of the instruction. */
4128 result = target;
4129 if (! (result != 0
4130 && REG_P (result) && GET_MODE (result) == insn_mode
4131 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4132 result = gen_reg_rtx (insn_mode);
4133
4134 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4135 GEN_INT (MIN (arg1_align, arg2_align)));
4136 }
4137 #endif
4138
4139 if (insn)
4140 {
4141 machine_mode mode;
4142 emit_insn (insn);
4143
4144 /* Return the value in the proper mode for this function. */
4145 mode = TYPE_MODE (TREE_TYPE (exp));
4146 if (GET_MODE (result) == mode)
4147 return result;
4148 if (target == 0)
4149 return convert_to_mode (mode, result, 0);
4150 convert_move (target, result, 0);
4151 return target;
4152 }
4153
4154 /* Expand the library call ourselves using a stabilized argument
4155 list to avoid re-evaluating the function's arguments twice. */
4156 #ifdef HAVE_cmpstrnsi
4157 do_libcall:
4158 #endif
4159 fndecl = get_callee_fndecl (exp);
4160 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4161 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4162 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4163 return expand_call (fn, target, target == const0_rtx);
4164 }
4165 #endif
4166 return NULL_RTX;
4167 }
4168
4169 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4170 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4171 the result in TARGET, if convenient. */
4172
4173 static rtx
4174 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4175 ATTRIBUTE_UNUSED machine_mode mode)
4176 {
4177 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4178
4179 if (!validate_arglist (exp,
4180 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4181 return NULL_RTX;
4182
4183 /* If c_strlen can determine an expression for one of the string
4184 lengths, and it doesn't have side effects, then emit cmpstrnsi
4185 using length MIN(strlen(string)+1, arg3). */
4186 #ifdef HAVE_cmpstrnsi
4187 if (HAVE_cmpstrnsi)
4188 {
4189 tree len, len1, len2;
4190 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4191 rtx result, insn;
4192 tree fndecl, fn;
4193 tree arg1 = CALL_EXPR_ARG (exp, 0);
4194 tree arg2 = CALL_EXPR_ARG (exp, 1);
4195 tree arg3 = CALL_EXPR_ARG (exp, 2);
4196
4197 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4198 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4199 machine_mode insn_mode
4200 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4201
4202 len1 = c_strlen (arg1, 1);
4203 len2 = c_strlen (arg2, 1);
4204
4205 if (len1)
4206 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4207 if (len2)
4208 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4209
4210 /* If we don't have a constant length for the first, use the length
4211 of the second, if we know it. We don't require a constant for
4212 this case; some cost analysis could be done if both are available
4213 but neither is constant. For now, assume they're equally cheap,
4214 unless one has side effects. If both strings have constant lengths,
4215 use the smaller. */
4216
4217 if (!len1)
4218 len = len2;
4219 else if (!len2)
4220 len = len1;
4221 else if (TREE_SIDE_EFFECTS (len1))
4222 len = len2;
4223 else if (TREE_SIDE_EFFECTS (len2))
4224 len = len1;
4225 else if (TREE_CODE (len1) != INTEGER_CST)
4226 len = len2;
4227 else if (TREE_CODE (len2) != INTEGER_CST)
4228 len = len1;
4229 else if (tree_int_cst_lt (len1, len2))
4230 len = len1;
4231 else
4232 len = len2;
4233
4234 /* If both arguments have side effects, we cannot optimize. */
4235 if (!len || TREE_SIDE_EFFECTS (len))
4236 return NULL_RTX;
4237
4238 /* The actual new length parameter is MIN(len,arg3). */
4239 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4240 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4241
4242 /* If we don't have POINTER_TYPE, call the function. */
4243 if (arg1_align == 0 || arg2_align == 0)
4244 return NULL_RTX;
4245
4246 /* Make a place to write the result of the instruction. */
4247 result = target;
4248 if (! (result != 0
4249 && REG_P (result) && GET_MODE (result) == insn_mode
4250 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4251 result = gen_reg_rtx (insn_mode);
4252
4253 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4254 arg1 = builtin_save_expr (arg1);
4255 arg2 = builtin_save_expr (arg2);
4256 len = builtin_save_expr (len);
4257
4258 arg1_rtx = get_memory_rtx (arg1, len);
4259 arg2_rtx = get_memory_rtx (arg2, len);
4260 arg3_rtx = expand_normal (len);
4261 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4262 GEN_INT (MIN (arg1_align, arg2_align)));
4263 if (insn)
4264 {
4265 emit_insn (insn);
4266
4267 /* Return the value in the proper mode for this function. */
4268 mode = TYPE_MODE (TREE_TYPE (exp));
4269 if (GET_MODE (result) == mode)
4270 return result;
4271 if (target == 0)
4272 return convert_to_mode (mode, result, 0);
4273 convert_move (target, result, 0);
4274 return target;
4275 }
4276
4277 /* Expand the library call ourselves using a stabilized argument
4278 list to avoid re-evaluating the function's arguments twice. */
4279 fndecl = get_callee_fndecl (exp);
4280 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4281 arg1, arg2, len);
4282 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4283 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4284 return expand_call (fn, target, target == const0_rtx);
4285 }
4286 #endif
4287 return NULL_RTX;
4288 }
4289
4290 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4291 if that's convenient. */
4292
4293 rtx
4294 expand_builtin_saveregs (void)
4295 {
4296 rtx val;
4297 rtx_insn *seq;
4298
4299 /* Don't do __builtin_saveregs more than once in a function.
4300 Save the result of the first call and reuse it. */
4301 if (saveregs_value != 0)
4302 return saveregs_value;
4303
4304 /* When this function is called, it means that registers must be
4305 saved on entry to this function. So we migrate the call to the
4306 first insn of this function. */
4307
4308 start_sequence ();
4309
4310 /* Do whatever the machine needs done in this case. */
4311 val = targetm.calls.expand_builtin_saveregs ();
4312
4313 seq = get_insns ();
4314 end_sequence ();
4315
4316 saveregs_value = val;
4317
4318 /* Put the insns after the NOTE that starts the function. If this
4319 is inside a start_sequence, make the outer-level insn chain current, so
4320 the code is placed at the start of the function. */
4321 push_topmost_sequence ();
4322 emit_insn_after (seq, entry_of_function ());
4323 pop_topmost_sequence ();
4324
4325 return val;
4326 }
4327
4328 /* Expand a call to __builtin_next_arg. */
4329
4330 static rtx
4331 expand_builtin_next_arg (void)
4332 {
4333 /* Checking arguments is already done in fold_builtin_next_arg
4334 that must be called before this function. */
4335 return expand_binop (ptr_mode, add_optab,
4336 crtl->args.internal_arg_pointer,
4337 crtl->args.arg_offset_rtx,
4338 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4339 }
4340
4341 /* Make it easier for the backends by protecting the valist argument
4342 from multiple evaluations. */
4343
4344 static tree
4345 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4346 {
4347 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4348
4349 /* The current way of determining the type of valist is completely
4350 bogus. We should have the information on the va builtin instead. */
4351 if (!vatype)
4352 vatype = targetm.fn_abi_va_list (cfun->decl);
4353
4354 if (TREE_CODE (vatype) == ARRAY_TYPE)
4355 {
4356 if (TREE_SIDE_EFFECTS (valist))
4357 valist = save_expr (valist);
4358
4359 /* For this case, the backends will be expecting a pointer to
4360 vatype, but it's possible we've actually been given an array
4361 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4362 So fix it. */
4363 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4364 {
4365 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4366 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4367 }
4368 }
4369 else
4370 {
4371 tree pt = build_pointer_type (vatype);
4372
4373 if (! needs_lvalue)
4374 {
4375 if (! TREE_SIDE_EFFECTS (valist))
4376 return valist;
4377
4378 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4379 TREE_SIDE_EFFECTS (valist) = 1;
4380 }
4381
4382 if (TREE_SIDE_EFFECTS (valist))
4383 valist = save_expr (valist);
4384 valist = fold_build2_loc (loc, MEM_REF,
4385 vatype, valist, build_int_cst (pt, 0));
4386 }
4387
4388 return valist;
4389 }
4390
4391 /* The "standard" definition of va_list is void*. */
4392
4393 tree
4394 std_build_builtin_va_list (void)
4395 {
4396 return ptr_type_node;
4397 }
4398
4399 /* The "standard" abi va_list is va_list_type_node. */
4400
4401 tree
4402 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4403 {
4404 return va_list_type_node;
4405 }
4406
4407 /* The "standard" type of va_list is va_list_type_node. */
4408
4409 tree
4410 std_canonical_va_list_type (tree type)
4411 {
4412 tree wtype, htype;
4413
4414 if (INDIRECT_REF_P (type))
4415 type = TREE_TYPE (type);
4416 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4417 type = TREE_TYPE (type);
4418 wtype = va_list_type_node;
4419 htype = type;
4420 /* Treat structure va_list types. */
4421 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4422 htype = TREE_TYPE (htype);
4423 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4424 {
4425 /* If va_list is an array type, the argument may have decayed
4426 to a pointer type, e.g. by being passed to another function.
4427 In that case, unwrap both types so that we can compare the
4428 underlying records. */
4429 if (TREE_CODE (htype) == ARRAY_TYPE
4430 || POINTER_TYPE_P (htype))
4431 {
4432 wtype = TREE_TYPE (wtype);
4433 htype = TREE_TYPE (htype);
4434 }
4435 }
4436 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4437 return va_list_type_node;
4438
4439 return NULL_TREE;
4440 }
4441
4442 /* The "standard" implementation of va_start: just assign `nextarg' to
4443 the variable. */
4444
4445 void
4446 std_expand_builtin_va_start (tree valist, rtx nextarg)
4447 {
4448 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4449 convert_move (va_r, nextarg, 0);
4450
4451 /* We do not have any valid bounds for the pointer, so
4452 just store zero bounds for it. */
4453 if (chkp_function_instrumented_p (current_function_decl))
4454 chkp_expand_bounds_reset_for_mem (valist,
4455 make_tree (TREE_TYPE (valist),
4456 nextarg));
4457 }
4458
4459 /* Expand EXP, a call to __builtin_va_start. */
4460
4461 static rtx
4462 expand_builtin_va_start (tree exp)
4463 {
4464 rtx nextarg;
4465 tree valist;
4466 location_t loc = EXPR_LOCATION (exp);
4467
4468 if (call_expr_nargs (exp) < 2)
4469 {
4470 error_at (loc, "too few arguments to function %<va_start%>");
4471 return const0_rtx;
4472 }
4473
4474 if (fold_builtin_next_arg (exp, true))
4475 return const0_rtx;
4476
4477 nextarg = expand_builtin_next_arg ();
4478 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4479
4480 if (targetm.expand_builtin_va_start)
4481 targetm.expand_builtin_va_start (valist, nextarg);
4482 else
4483 std_expand_builtin_va_start (valist, nextarg);
4484
4485 return const0_rtx;
4486 }
4487
4488 /* Expand EXP, a call to __builtin_va_end. */
4489
4490 static rtx
4491 expand_builtin_va_end (tree exp)
4492 {
4493 tree valist = CALL_EXPR_ARG (exp, 0);
4494
4495 /* Evaluate for side effects, if needed. I hate macros that don't
4496 do that. */
4497 if (TREE_SIDE_EFFECTS (valist))
4498 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4499
4500 return const0_rtx;
4501 }
4502
4503 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4504 builtin rather than just as an assignment in stdarg.h because of the
4505 nastiness of array-type va_list types. */
4506
4507 static rtx
4508 expand_builtin_va_copy (tree exp)
4509 {
4510 tree dst, src, t;
4511 location_t loc = EXPR_LOCATION (exp);
4512
4513 dst = CALL_EXPR_ARG (exp, 0);
4514 src = CALL_EXPR_ARG (exp, 1);
4515
4516 dst = stabilize_va_list_loc (loc, dst, 1);
4517 src = stabilize_va_list_loc (loc, src, 0);
4518
4519 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4520
4521 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4522 {
4523 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4524 TREE_SIDE_EFFECTS (t) = 1;
4525 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4526 }
4527 else
4528 {
4529 rtx dstb, srcb, size;
4530
4531 /* Evaluate to pointers. */
4532 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4533 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4534 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4535 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4536
4537 dstb = convert_memory_address (Pmode, dstb);
4538 srcb = convert_memory_address (Pmode, srcb);
4539
4540 /* "Dereference" to BLKmode memories. */
4541 dstb = gen_rtx_MEM (BLKmode, dstb);
4542 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4543 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4544 srcb = gen_rtx_MEM (BLKmode, srcb);
4545 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4546 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4547
4548 /* Copy. */
4549 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4550 }
4551
4552 return const0_rtx;
4553 }
4554
4555 /* Expand a call to one of the builtin functions __builtin_frame_address or
4556 __builtin_return_address. */
4557
4558 static rtx
4559 expand_builtin_frame_address (tree fndecl, tree exp)
4560 {
4561 /* The argument must be a nonnegative integer constant.
4562 It counts the number of frames to scan up the stack.
4563 The value is the return address saved in that frame. */
4564 if (call_expr_nargs (exp) == 0)
4565 /* Warning about missing arg was already issued. */
4566 return const0_rtx;
4567 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4568 {
4569 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4570 error ("invalid argument to %<__builtin_frame_address%>");
4571 else
4572 error ("invalid argument to %<__builtin_return_address%>");
4573 return const0_rtx;
4574 }
4575 else
4576 {
4577 rtx tem
4578 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4579 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4580
4581 /* Some ports cannot access arbitrary stack frames. */
4582 if (tem == NULL)
4583 {
4584 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4585 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4586 else
4587 warning (0, "unsupported argument to %<__builtin_return_address%>");
4588 return const0_rtx;
4589 }
4590
4591 /* For __builtin_frame_address, return what we've got. */
4592 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4593 return tem;
4594
4595 if (!REG_P (tem)
4596 && ! CONSTANT_P (tem))
4597 tem = copy_addr_to_reg (tem);
4598 return tem;
4599 }
4600 }
4601
4602 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4603 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4604 is the same as for allocate_dynamic_stack_space. */
4605
4606 static rtx
4607 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4608 {
4609 rtx op0;
4610 rtx result;
4611 bool valid_arglist;
4612 unsigned int align;
4613 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4614 == BUILT_IN_ALLOCA_WITH_ALIGN);
4615
4616 valid_arglist
4617 = (alloca_with_align
4618 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4619 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4620
4621 if (!valid_arglist)
4622 return NULL_RTX;
4623
4624 /* Compute the argument. */
4625 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4626
4627 /* Compute the alignment. */
4628 align = (alloca_with_align
4629 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4630 : BIGGEST_ALIGNMENT);
4631
4632 /* Allocate the desired space. */
4633 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4634 result = convert_memory_address (ptr_mode, result);
4635
4636 return result;
4637 }
4638
4639 /* Expand a call to bswap builtin in EXP.
4640 Return NULL_RTX if a normal call should be emitted rather than expanding the
4641 function in-line. If convenient, the result should be placed in TARGET.
4642 SUBTARGET may be used as the target for computing one of EXP's operands. */
4643
4644 static rtx
4645 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4646 rtx subtarget)
4647 {
4648 tree arg;
4649 rtx op0;
4650
4651 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4652 return NULL_RTX;
4653
4654 arg = CALL_EXPR_ARG (exp, 0);
4655 op0 = expand_expr (arg,
4656 subtarget && GET_MODE (subtarget) == target_mode
4657 ? subtarget : NULL_RTX,
4658 target_mode, EXPAND_NORMAL);
4659 if (GET_MODE (op0) != target_mode)
4660 op0 = convert_to_mode (target_mode, op0, 1);
4661
4662 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4663
4664 gcc_assert (target);
4665
4666 return convert_to_mode (target_mode, target, 1);
4667 }
4668
4669 /* Expand a call to a unary builtin in EXP.
4670 Return NULL_RTX if a normal call should be emitted rather than expanding the
4671 function in-line. If convenient, the result should be placed in TARGET.
4672 SUBTARGET may be used as the target for computing one of EXP's operands. */
4673
4674 static rtx
4675 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4676 rtx subtarget, optab op_optab)
4677 {
4678 rtx op0;
4679
4680 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4681 return NULL_RTX;
4682
4683 /* Compute the argument. */
4684 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4685 (subtarget
4686 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4687 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4688 VOIDmode, EXPAND_NORMAL);
4689 /* Compute op, into TARGET if possible.
4690 Set TARGET to wherever the result comes back. */
4691 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4692 op_optab, op0, target, op_optab != clrsb_optab);
4693 gcc_assert (target);
4694
4695 return convert_to_mode (target_mode, target, 0);
4696 }
4697
4698 /* Expand a call to __builtin_expect. We just return our argument
4699 as the builtin_expect semantic should've been already executed by
4700 tree branch prediction pass. */
4701
4702 static rtx
4703 expand_builtin_expect (tree exp, rtx target)
4704 {
4705 tree arg;
4706
4707 if (call_expr_nargs (exp) < 2)
4708 return const0_rtx;
4709 arg = CALL_EXPR_ARG (exp, 0);
4710
4711 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4712 /* When guessing was done, the hints should be already stripped away. */
4713 gcc_assert (!flag_guess_branch_prob
4714 || optimize == 0 || seen_error ());
4715 return target;
4716 }
4717
4718 /* Expand a call to __builtin_assume_aligned. We just return our first
4719 argument as the builtin_assume_aligned semantic should've been already
4720 executed by CCP. */
4721
4722 static rtx
4723 expand_builtin_assume_aligned (tree exp, rtx target)
4724 {
4725 if (call_expr_nargs (exp) < 2)
4726 return const0_rtx;
4727 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4728 EXPAND_NORMAL);
4729 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4730 && (call_expr_nargs (exp) < 3
4731 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4732 return target;
4733 }
4734
4735 void
4736 expand_builtin_trap (void)
4737 {
4738 #ifdef HAVE_trap
4739 if (HAVE_trap)
4740 {
4741 rtx insn = emit_insn (gen_trap ());
4742 /* For trap insns when not accumulating outgoing args force
4743 REG_ARGS_SIZE note to prevent crossjumping of calls with
4744 different args sizes. */
4745 if (!ACCUMULATE_OUTGOING_ARGS)
4746 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4747 }
4748 else
4749 #endif
4750 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4751 emit_barrier ();
4752 }
4753
4754 /* Expand a call to __builtin_unreachable. We do nothing except emit
4755 a barrier saying that control flow will not pass here.
4756
4757 It is the responsibility of the program being compiled to ensure
4758 that control flow does never reach __builtin_unreachable. */
4759 static void
4760 expand_builtin_unreachable (void)
4761 {
4762 emit_barrier ();
4763 }
4764
4765 /* Expand EXP, a call to fabs, fabsf or fabsl.
4766 Return NULL_RTX if a normal call should be emitted rather than expanding
4767 the function inline. If convenient, the result should be placed
4768 in TARGET. SUBTARGET may be used as the target for computing
4769 the operand. */
4770
4771 static rtx
4772 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4773 {
4774 machine_mode mode;
4775 tree arg;
4776 rtx op0;
4777
4778 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4779 return NULL_RTX;
4780
4781 arg = CALL_EXPR_ARG (exp, 0);
4782 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4783 mode = TYPE_MODE (TREE_TYPE (arg));
4784 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4785 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4786 }
4787
4788 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4789 Return NULL is a normal call should be emitted rather than expanding the
4790 function inline. If convenient, the result should be placed in TARGET.
4791 SUBTARGET may be used as the target for computing the operand. */
4792
4793 static rtx
4794 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4795 {
4796 rtx op0, op1;
4797 tree arg;
4798
4799 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4800 return NULL_RTX;
4801
4802 arg = CALL_EXPR_ARG (exp, 0);
4803 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4804
4805 arg = CALL_EXPR_ARG (exp, 1);
4806 op1 = expand_normal (arg);
4807
4808 return expand_copysign (op0, op1, target);
4809 }
4810
4811 /* Expand a call to __builtin___clear_cache. */
4812
4813 static rtx
4814 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4815 {
4816 #ifndef HAVE_clear_cache
4817 #ifdef CLEAR_INSN_CACHE
4818 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4819 does something. Just do the default expansion to a call to
4820 __clear_cache(). */
4821 return NULL_RTX;
4822 #else
4823 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4824 does nothing. There is no need to call it. Do nothing. */
4825 return const0_rtx;
4826 #endif /* CLEAR_INSN_CACHE */
4827 #else
4828 /* We have a "clear_cache" insn, and it will handle everything. */
4829 tree begin, end;
4830 rtx begin_rtx, end_rtx;
4831
4832 /* We must not expand to a library call. If we did, any
4833 fallback library function in libgcc that might contain a call to
4834 __builtin___clear_cache() would recurse infinitely. */
4835 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4836 {
4837 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4838 return const0_rtx;
4839 }
4840
4841 if (HAVE_clear_cache)
4842 {
4843 struct expand_operand ops[2];
4844
4845 begin = CALL_EXPR_ARG (exp, 0);
4846 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4847
4848 end = CALL_EXPR_ARG (exp, 1);
4849 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4850
4851 create_address_operand (&ops[0], begin_rtx);
4852 create_address_operand (&ops[1], end_rtx);
4853 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4854 return const0_rtx;
4855 }
4856 return const0_rtx;
4857 #endif /* HAVE_clear_cache */
4858 }
4859
4860 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4861
4862 static rtx
4863 round_trampoline_addr (rtx tramp)
4864 {
4865 rtx temp, addend, mask;
4866
4867 /* If we don't need too much alignment, we'll have been guaranteed
4868 proper alignment by get_trampoline_type. */
4869 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4870 return tramp;
4871
4872 /* Round address up to desired boundary. */
4873 temp = gen_reg_rtx (Pmode);
4874 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4875 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4876
4877 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4878 temp, 0, OPTAB_LIB_WIDEN);
4879 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4880 temp, 0, OPTAB_LIB_WIDEN);
4881
4882 return tramp;
4883 }
4884
4885 static rtx
4886 expand_builtin_init_trampoline (tree exp, bool onstack)
4887 {
4888 tree t_tramp, t_func, t_chain;
4889 rtx m_tramp, r_tramp, r_chain, tmp;
4890
4891 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4892 POINTER_TYPE, VOID_TYPE))
4893 return NULL_RTX;
4894
4895 t_tramp = CALL_EXPR_ARG (exp, 0);
4896 t_func = CALL_EXPR_ARG (exp, 1);
4897 t_chain = CALL_EXPR_ARG (exp, 2);
4898
4899 r_tramp = expand_normal (t_tramp);
4900 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4901 MEM_NOTRAP_P (m_tramp) = 1;
4902
4903 /* If ONSTACK, the TRAMP argument should be the address of a field
4904 within the local function's FRAME decl. Either way, let's see if
4905 we can fill in the MEM_ATTRs for this memory. */
4906 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4907 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4908
4909 /* Creator of a heap trampoline is responsible for making sure the
4910 address is aligned to at least STACK_BOUNDARY. Normally malloc
4911 will ensure this anyhow. */
4912 tmp = round_trampoline_addr (r_tramp);
4913 if (tmp != r_tramp)
4914 {
4915 m_tramp = change_address (m_tramp, BLKmode, tmp);
4916 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4917 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4918 }
4919
4920 /* The FUNC argument should be the address of the nested function.
4921 Extract the actual function decl to pass to the hook. */
4922 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4923 t_func = TREE_OPERAND (t_func, 0);
4924 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4925
4926 r_chain = expand_normal (t_chain);
4927
4928 /* Generate insns to initialize the trampoline. */
4929 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4930
4931 if (onstack)
4932 {
4933 trampolines_created = 1;
4934
4935 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4936 "trampoline generated for nested function %qD", t_func);
4937 }
4938
4939 return const0_rtx;
4940 }
4941
4942 static rtx
4943 expand_builtin_adjust_trampoline (tree exp)
4944 {
4945 rtx tramp;
4946
4947 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4948 return NULL_RTX;
4949
4950 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4951 tramp = round_trampoline_addr (tramp);
4952 if (targetm.calls.trampoline_adjust_address)
4953 tramp = targetm.calls.trampoline_adjust_address (tramp);
4954
4955 return tramp;
4956 }
4957
4958 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4959 function. The function first checks whether the back end provides
4960 an insn to implement signbit for the respective mode. If not, it
4961 checks whether the floating point format of the value is such that
4962 the sign bit can be extracted. If that is not the case, the
4963 function returns NULL_RTX to indicate that a normal call should be
4964 emitted rather than expanding the function in-line. EXP is the
4965 expression that is a call to the builtin function; if convenient,
4966 the result should be placed in TARGET. */
4967 static rtx
4968 expand_builtin_signbit (tree exp, rtx target)
4969 {
4970 const struct real_format *fmt;
4971 machine_mode fmode, imode, rmode;
4972 tree arg;
4973 int word, bitpos;
4974 enum insn_code icode;
4975 rtx temp;
4976 location_t loc = EXPR_LOCATION (exp);
4977
4978 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4979 return NULL_RTX;
4980
4981 arg = CALL_EXPR_ARG (exp, 0);
4982 fmode = TYPE_MODE (TREE_TYPE (arg));
4983 rmode = TYPE_MODE (TREE_TYPE (exp));
4984 fmt = REAL_MODE_FORMAT (fmode);
4985
4986 arg = builtin_save_expr (arg);
4987
4988 /* Expand the argument yielding a RTX expression. */
4989 temp = expand_normal (arg);
4990
4991 /* Check if the back end provides an insn that handles signbit for the
4992 argument's mode. */
4993 icode = optab_handler (signbit_optab, fmode);
4994 if (icode != CODE_FOR_nothing)
4995 {
4996 rtx_insn *last = get_last_insn ();
4997 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4998 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4999 return target;
5000 delete_insns_since (last);
5001 }
5002
5003 /* For floating point formats without a sign bit, implement signbit
5004 as "ARG < 0.0". */
5005 bitpos = fmt->signbit_ro;
5006 if (bitpos < 0)
5007 {
5008 /* But we can't do this if the format supports signed zero. */
5009 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5010 return NULL_RTX;
5011
5012 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5013 build_real (TREE_TYPE (arg), dconst0));
5014 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5015 }
5016
5017 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5018 {
5019 imode = int_mode_for_mode (fmode);
5020 if (imode == BLKmode)
5021 return NULL_RTX;
5022 temp = gen_lowpart (imode, temp);
5023 }
5024 else
5025 {
5026 imode = word_mode;
5027 /* Handle targets with different FP word orders. */
5028 if (FLOAT_WORDS_BIG_ENDIAN)
5029 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5030 else
5031 word = bitpos / BITS_PER_WORD;
5032 temp = operand_subword_force (temp, word, fmode);
5033 bitpos = bitpos % BITS_PER_WORD;
5034 }
5035
5036 /* Force the intermediate word_mode (or narrower) result into a
5037 register. This avoids attempting to create paradoxical SUBREGs
5038 of floating point modes below. */
5039 temp = force_reg (imode, temp);
5040
5041 /* If the bitpos is within the "result mode" lowpart, the operation
5042 can be implement with a single bitwise AND. Otherwise, we need
5043 a right shift and an AND. */
5044
5045 if (bitpos < GET_MODE_BITSIZE (rmode))
5046 {
5047 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5048
5049 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5050 temp = gen_lowpart (rmode, temp);
5051 temp = expand_binop (rmode, and_optab, temp,
5052 immed_wide_int_const (mask, rmode),
5053 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5054 }
5055 else
5056 {
5057 /* Perform a logical right shift to place the signbit in the least
5058 significant bit, then truncate the result to the desired mode
5059 and mask just this bit. */
5060 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5061 temp = gen_lowpart (rmode, temp);
5062 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5063 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5064 }
5065
5066 return temp;
5067 }
5068
5069 /* Expand fork or exec calls. TARGET is the desired target of the
5070 call. EXP is the call. FN is the
5071 identificator of the actual function. IGNORE is nonzero if the
5072 value is to be ignored. */
5073
5074 static rtx
5075 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5076 {
5077 tree id, decl;
5078 tree call;
5079
5080 /* If we are not profiling, just call the function. */
5081 if (!profile_arc_flag)
5082 return NULL_RTX;
5083
5084 /* Otherwise call the wrapper. This should be equivalent for the rest of
5085 compiler, so the code does not diverge, and the wrapper may run the
5086 code necessary for keeping the profiling sane. */
5087
5088 switch (DECL_FUNCTION_CODE (fn))
5089 {
5090 case BUILT_IN_FORK:
5091 id = get_identifier ("__gcov_fork");
5092 break;
5093
5094 case BUILT_IN_EXECL:
5095 id = get_identifier ("__gcov_execl");
5096 break;
5097
5098 case BUILT_IN_EXECV:
5099 id = get_identifier ("__gcov_execv");
5100 break;
5101
5102 case BUILT_IN_EXECLP:
5103 id = get_identifier ("__gcov_execlp");
5104 break;
5105
5106 case BUILT_IN_EXECLE:
5107 id = get_identifier ("__gcov_execle");
5108 break;
5109
5110 case BUILT_IN_EXECVP:
5111 id = get_identifier ("__gcov_execvp");
5112 break;
5113
5114 case BUILT_IN_EXECVE:
5115 id = get_identifier ("__gcov_execve");
5116 break;
5117
5118 default:
5119 gcc_unreachable ();
5120 }
5121
5122 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5123 FUNCTION_DECL, id, TREE_TYPE (fn));
5124 DECL_EXTERNAL (decl) = 1;
5125 TREE_PUBLIC (decl) = 1;
5126 DECL_ARTIFICIAL (decl) = 1;
5127 TREE_NOTHROW (decl) = 1;
5128 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5129 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5130 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5131 return expand_call (call, target, ignore);
5132 }
5133
5134
5135 \f
5136 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5137 the pointer in these functions is void*, the tree optimizers may remove
5138 casts. The mode computed in expand_builtin isn't reliable either, due
5139 to __sync_bool_compare_and_swap.
5140
5141 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5142 group of builtins. This gives us log2 of the mode size. */
5143
5144 static inline machine_mode
5145 get_builtin_sync_mode (int fcode_diff)
5146 {
5147 /* The size is not negotiable, so ask not to get BLKmode in return
5148 if the target indicates that a smaller size would be better. */
5149 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5150 }
5151
5152 /* Expand the memory expression LOC and return the appropriate memory operand
5153 for the builtin_sync operations. */
5154
5155 static rtx
5156 get_builtin_sync_mem (tree loc, machine_mode mode)
5157 {
5158 rtx addr, mem;
5159
5160 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5161 addr = convert_memory_address (Pmode, addr);
5162
5163 /* Note that we explicitly do not want any alias information for this
5164 memory, so that we kill all other live memories. Otherwise we don't
5165 satisfy the full barrier semantics of the intrinsic. */
5166 mem = validize_mem (gen_rtx_MEM (mode, addr));
5167
5168 /* The alignment needs to be at least according to that of the mode. */
5169 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5170 get_pointer_alignment (loc)));
5171 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5172 MEM_VOLATILE_P (mem) = 1;
5173
5174 return mem;
5175 }
5176
5177 /* Make sure an argument is in the right mode.
5178 EXP is the tree argument.
5179 MODE is the mode it should be in. */
5180
5181 static rtx
5182 expand_expr_force_mode (tree exp, machine_mode mode)
5183 {
5184 rtx val;
5185 machine_mode old_mode;
5186
5187 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5188 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5189 of CONST_INTs, where we know the old_mode only from the call argument. */
5190
5191 old_mode = GET_MODE (val);
5192 if (old_mode == VOIDmode)
5193 old_mode = TYPE_MODE (TREE_TYPE (exp));
5194 val = convert_modes (mode, old_mode, val, 1);
5195 return val;
5196 }
5197
5198
5199 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5200 EXP is the CALL_EXPR. CODE is the rtx code
5201 that corresponds to the arithmetic or logical operation from the name;
5202 an exception here is that NOT actually means NAND. TARGET is an optional
5203 place for us to store the results; AFTER is true if this is the
5204 fetch_and_xxx form. */
5205
5206 static rtx
5207 expand_builtin_sync_operation (machine_mode mode, tree exp,
5208 enum rtx_code code, bool after,
5209 rtx target)
5210 {
5211 rtx val, mem;
5212 location_t loc = EXPR_LOCATION (exp);
5213
5214 if (code == NOT && warn_sync_nand)
5215 {
5216 tree fndecl = get_callee_fndecl (exp);
5217 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5218
5219 static bool warned_f_a_n, warned_n_a_f;
5220
5221 switch (fcode)
5222 {
5223 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5224 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5225 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5226 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5227 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5228 if (warned_f_a_n)
5229 break;
5230
5231 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5232 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5233 warned_f_a_n = true;
5234 break;
5235
5236 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5237 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5238 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5239 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5240 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5241 if (warned_n_a_f)
5242 break;
5243
5244 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5245 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5246 warned_n_a_f = true;
5247 break;
5248
5249 default:
5250 gcc_unreachable ();
5251 }
5252 }
5253
5254 /* Expand the operands. */
5255 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5256 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5257
5258 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5259 after);
5260 }
5261
5262 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5263 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5264 true if this is the boolean form. TARGET is a place for us to store the
5265 results; this is NOT optional if IS_BOOL is true. */
5266
5267 static rtx
5268 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5269 bool is_bool, rtx target)
5270 {
5271 rtx old_val, new_val, mem;
5272 rtx *pbool, *poval;
5273
5274 /* Expand the operands. */
5275 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5276 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5277 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5278
5279 pbool = poval = NULL;
5280 if (target != const0_rtx)
5281 {
5282 if (is_bool)
5283 pbool = &target;
5284 else
5285 poval = &target;
5286 }
5287 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5288 false, MEMMODEL_SEQ_CST,
5289 MEMMODEL_SEQ_CST))
5290 return NULL_RTX;
5291
5292 return target;
5293 }
5294
5295 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5296 general form is actually an atomic exchange, and some targets only
5297 support a reduced form with the second argument being a constant 1.
5298 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5299 the results. */
5300
5301 static rtx
5302 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5303 rtx target)
5304 {
5305 rtx val, mem;
5306
5307 /* Expand the operands. */
5308 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5309 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5310
5311 return expand_sync_lock_test_and_set (target, mem, val);
5312 }
5313
5314 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5315
5316 static void
5317 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5318 {
5319 rtx mem;
5320
5321 /* Expand the operands. */
5322 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5323
5324 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5325 }
5326
5327 /* Given an integer representing an ``enum memmodel'', verify its
5328 correctness and return the memory model enum. */
5329
5330 static enum memmodel
5331 get_memmodel (tree exp)
5332 {
5333 rtx op;
5334 unsigned HOST_WIDE_INT val;
5335
5336 /* If the parameter is not a constant, it's a run time value so we'll just
5337 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5338 if (TREE_CODE (exp) != INTEGER_CST)
5339 return MEMMODEL_SEQ_CST;
5340
5341 op = expand_normal (exp);
5342
5343 val = INTVAL (op);
5344 if (targetm.memmodel_check)
5345 val = targetm.memmodel_check (val);
5346 else if (val & ~MEMMODEL_MASK)
5347 {
5348 warning (OPT_Winvalid_memory_model,
5349 "Unknown architecture specifier in memory model to builtin.");
5350 return MEMMODEL_SEQ_CST;
5351 }
5352
5353 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5354 {
5355 warning (OPT_Winvalid_memory_model,
5356 "invalid memory model argument to builtin");
5357 return MEMMODEL_SEQ_CST;
5358 }
5359
5360 return (enum memmodel) val;
5361 }
5362
5363 /* Expand the __atomic_exchange intrinsic:
5364 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5365 EXP is the CALL_EXPR.
5366 TARGET is an optional place for us to store the results. */
5367
5368 static rtx
5369 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5370 {
5371 rtx val, mem;
5372 enum memmodel model;
5373
5374 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5375 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5376 {
5377 error ("invalid memory model for %<__atomic_exchange%>");
5378 return NULL_RTX;
5379 }
5380
5381 if (!flag_inline_atomics)
5382 return NULL_RTX;
5383
5384 /* Expand the operands. */
5385 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5386 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5387
5388 return expand_atomic_exchange (target, mem, val, model);
5389 }
5390
5391 /* Expand the __atomic_compare_exchange intrinsic:
5392 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5393 TYPE desired, BOOL weak,
5394 enum memmodel success,
5395 enum memmodel failure)
5396 EXP is the CALL_EXPR.
5397 TARGET is an optional place for us to store the results. */
5398
5399 static rtx
5400 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5401 rtx target)
5402 {
5403 rtx expect, desired, mem, oldval;
5404 rtx_code_label *label;
5405 enum memmodel success, failure;
5406 tree weak;
5407 bool is_weak;
5408
5409 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5410 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5411
5412 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5413 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5414 {
5415 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5416 return NULL_RTX;
5417 }
5418
5419 if (failure > success)
5420 {
5421 error ("failure memory model cannot be stronger than success "
5422 "memory model for %<__atomic_compare_exchange%>");
5423 return NULL_RTX;
5424 }
5425
5426 if (!flag_inline_atomics)
5427 return NULL_RTX;
5428
5429 /* Expand the operands. */
5430 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5431
5432 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5433 expect = convert_memory_address (Pmode, expect);
5434 expect = gen_rtx_MEM (mode, expect);
5435 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5436
5437 weak = CALL_EXPR_ARG (exp, 3);
5438 is_weak = false;
5439 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5440 is_weak = true;
5441
5442 if (target == const0_rtx)
5443 target = NULL;
5444
5445 /* Lest the rtl backend create a race condition with an imporoper store
5446 to memory, always create a new pseudo for OLDVAL. */
5447 oldval = NULL;
5448
5449 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5450 is_weak, success, failure))
5451 return NULL_RTX;
5452
5453 /* Conditionally store back to EXPECT, lest we create a race condition
5454 with an improper store to memory. */
5455 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5456 the normal case where EXPECT is totally private, i.e. a register. At
5457 which point the store can be unconditional. */
5458 label = gen_label_rtx ();
5459 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5460 emit_move_insn (expect, oldval);
5461 emit_label (label);
5462
5463 return target;
5464 }
5465
5466 /* Expand the __atomic_load intrinsic:
5467 TYPE __atomic_load (TYPE *object, enum memmodel)
5468 EXP is the CALL_EXPR.
5469 TARGET is an optional place for us to store the results. */
5470
5471 static rtx
5472 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5473 {
5474 rtx mem;
5475 enum memmodel model;
5476
5477 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5478 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5479 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5480 {
5481 error ("invalid memory model for %<__atomic_load%>");
5482 return NULL_RTX;
5483 }
5484
5485 if (!flag_inline_atomics)
5486 return NULL_RTX;
5487
5488 /* Expand the operand. */
5489 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5490
5491 return expand_atomic_load (target, mem, model);
5492 }
5493
5494
5495 /* Expand the __atomic_store intrinsic:
5496 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5497 EXP is the CALL_EXPR.
5498 TARGET is an optional place for us to store the results. */
5499
5500 static rtx
5501 expand_builtin_atomic_store (machine_mode mode, tree exp)
5502 {
5503 rtx mem, val;
5504 enum memmodel model;
5505
5506 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5507 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5508 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5509 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5510 {
5511 error ("invalid memory model for %<__atomic_store%>");
5512 return NULL_RTX;
5513 }
5514
5515 if (!flag_inline_atomics)
5516 return NULL_RTX;
5517
5518 /* Expand the operands. */
5519 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5520 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5521
5522 return expand_atomic_store (mem, val, model, false);
5523 }
5524
5525 /* Expand the __atomic_fetch_XXX intrinsic:
5526 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5527 EXP is the CALL_EXPR.
5528 TARGET is an optional place for us to store the results.
5529 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5530 FETCH_AFTER is true if returning the result of the operation.
5531 FETCH_AFTER is false if returning the value before the operation.
5532 IGNORE is true if the result is not used.
5533 EXT_CALL is the correct builtin for an external call if this cannot be
5534 resolved to an instruction sequence. */
5535
5536 static rtx
5537 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5538 enum rtx_code code, bool fetch_after,
5539 bool ignore, enum built_in_function ext_call)
5540 {
5541 rtx val, mem, ret;
5542 enum memmodel model;
5543 tree fndecl;
5544 tree addr;
5545
5546 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5547
5548 /* Expand the operands. */
5549 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5550 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5551
5552 /* Only try generating instructions if inlining is turned on. */
5553 if (flag_inline_atomics)
5554 {
5555 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5556 if (ret)
5557 return ret;
5558 }
5559
5560 /* Return if a different routine isn't needed for the library call. */
5561 if (ext_call == BUILT_IN_NONE)
5562 return NULL_RTX;
5563
5564 /* Change the call to the specified function. */
5565 fndecl = get_callee_fndecl (exp);
5566 addr = CALL_EXPR_FN (exp);
5567 STRIP_NOPS (addr);
5568
5569 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5570 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5571
5572 /* Expand the call here so we can emit trailing code. */
5573 ret = expand_call (exp, target, ignore);
5574
5575 /* Replace the original function just in case it matters. */
5576 TREE_OPERAND (addr, 0) = fndecl;
5577
5578 /* Then issue the arithmetic correction to return the right result. */
5579 if (!ignore)
5580 {
5581 if (code == NOT)
5582 {
5583 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5584 OPTAB_LIB_WIDEN);
5585 ret = expand_simple_unop (mode, NOT, ret, target, true);
5586 }
5587 else
5588 ret = expand_simple_binop (mode, code, ret, val, target, true,
5589 OPTAB_LIB_WIDEN);
5590 }
5591 return ret;
5592 }
5593
5594
5595 #ifndef HAVE_atomic_clear
5596 # define HAVE_atomic_clear 0
5597 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5598 #endif
5599
5600 /* Expand an atomic clear operation.
5601 void _atomic_clear (BOOL *obj, enum memmodel)
5602 EXP is the call expression. */
5603
5604 static rtx
5605 expand_builtin_atomic_clear (tree exp)
5606 {
5607 machine_mode mode;
5608 rtx mem, ret;
5609 enum memmodel model;
5610
5611 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5612 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5613 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5614
5615 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5616 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5617 {
5618 error ("invalid memory model for %<__atomic_store%>");
5619 return const0_rtx;
5620 }
5621
5622 if (HAVE_atomic_clear)
5623 {
5624 emit_insn (gen_atomic_clear (mem, model));
5625 return const0_rtx;
5626 }
5627
5628 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5629 Failing that, a store is issued by __atomic_store. The only way this can
5630 fail is if the bool type is larger than a word size. Unlikely, but
5631 handle it anyway for completeness. Assume a single threaded model since
5632 there is no atomic support in this case, and no barriers are required. */
5633 ret = expand_atomic_store (mem, const0_rtx, model, true);
5634 if (!ret)
5635 emit_move_insn (mem, const0_rtx);
5636 return const0_rtx;
5637 }
5638
5639 /* Expand an atomic test_and_set operation.
5640 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5641 EXP is the call expression. */
5642
5643 static rtx
5644 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5645 {
5646 rtx mem;
5647 enum memmodel model;
5648 machine_mode mode;
5649
5650 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5651 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5652 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5653
5654 return expand_atomic_test_and_set (target, mem, model);
5655 }
5656
5657
5658 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5659 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5660
5661 static tree
5662 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5663 {
5664 int size;
5665 machine_mode mode;
5666 unsigned int mode_align, type_align;
5667
5668 if (TREE_CODE (arg0) != INTEGER_CST)
5669 return NULL_TREE;
5670
5671 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5672 mode = mode_for_size (size, MODE_INT, 0);
5673 mode_align = GET_MODE_ALIGNMENT (mode);
5674
5675 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5676 type_align = mode_align;
5677 else
5678 {
5679 tree ttype = TREE_TYPE (arg1);
5680
5681 /* This function is usually invoked and folded immediately by the front
5682 end before anything else has a chance to look at it. The pointer
5683 parameter at this point is usually cast to a void *, so check for that
5684 and look past the cast. */
5685 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5686 && VOID_TYPE_P (TREE_TYPE (ttype)))
5687 arg1 = TREE_OPERAND (arg1, 0);
5688
5689 ttype = TREE_TYPE (arg1);
5690 gcc_assert (POINTER_TYPE_P (ttype));
5691
5692 /* Get the underlying type of the object. */
5693 ttype = TREE_TYPE (ttype);
5694 type_align = TYPE_ALIGN (ttype);
5695 }
5696
5697 /* If the object has smaller alignment, the the lock free routines cannot
5698 be used. */
5699 if (type_align < mode_align)
5700 return boolean_false_node;
5701
5702 /* Check if a compare_and_swap pattern exists for the mode which represents
5703 the required size. The pattern is not allowed to fail, so the existence
5704 of the pattern indicates support is present. */
5705 if (can_compare_and_swap_p (mode, true))
5706 return boolean_true_node;
5707 else
5708 return boolean_false_node;
5709 }
5710
5711 /* Return true if the parameters to call EXP represent an object which will
5712 always generate lock free instructions. The first argument represents the
5713 size of the object, and the second parameter is a pointer to the object
5714 itself. If NULL is passed for the object, then the result is based on
5715 typical alignment for an object of the specified size. Otherwise return
5716 false. */
5717
5718 static rtx
5719 expand_builtin_atomic_always_lock_free (tree exp)
5720 {
5721 tree size;
5722 tree arg0 = CALL_EXPR_ARG (exp, 0);
5723 tree arg1 = CALL_EXPR_ARG (exp, 1);
5724
5725 if (TREE_CODE (arg0) != INTEGER_CST)
5726 {
5727 error ("non-constant argument 1 to __atomic_always_lock_free");
5728 return const0_rtx;
5729 }
5730
5731 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5732 if (size == boolean_true_node)
5733 return const1_rtx;
5734 return const0_rtx;
5735 }
5736
5737 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5738 is lock free on this architecture. */
5739
5740 static tree
5741 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5742 {
5743 if (!flag_inline_atomics)
5744 return NULL_TREE;
5745
5746 /* If it isn't always lock free, don't generate a result. */
5747 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5748 return boolean_true_node;
5749
5750 return NULL_TREE;
5751 }
5752
5753 /* Return true if the parameters to call EXP represent an object which will
5754 always generate lock free instructions. The first argument represents the
5755 size of the object, and the second parameter is a pointer to the object
5756 itself. If NULL is passed for the object, then the result is based on
5757 typical alignment for an object of the specified size. Otherwise return
5758 NULL*/
5759
5760 static rtx
5761 expand_builtin_atomic_is_lock_free (tree exp)
5762 {
5763 tree size;
5764 tree arg0 = CALL_EXPR_ARG (exp, 0);
5765 tree arg1 = CALL_EXPR_ARG (exp, 1);
5766
5767 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5768 {
5769 error ("non-integer argument 1 to __atomic_is_lock_free");
5770 return NULL_RTX;
5771 }
5772
5773 if (!flag_inline_atomics)
5774 return NULL_RTX;
5775
5776 /* If the value is known at compile time, return the RTX for it. */
5777 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5778 if (size == boolean_true_node)
5779 return const1_rtx;
5780
5781 return NULL_RTX;
5782 }
5783
5784 /* Expand the __atomic_thread_fence intrinsic:
5785 void __atomic_thread_fence (enum memmodel)
5786 EXP is the CALL_EXPR. */
5787
5788 static void
5789 expand_builtin_atomic_thread_fence (tree exp)
5790 {
5791 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5792 expand_mem_thread_fence (model);
5793 }
5794
5795 /* Expand the __atomic_signal_fence intrinsic:
5796 void __atomic_signal_fence (enum memmodel)
5797 EXP is the CALL_EXPR. */
5798
5799 static void
5800 expand_builtin_atomic_signal_fence (tree exp)
5801 {
5802 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5803 expand_mem_signal_fence (model);
5804 }
5805
5806 /* Expand the __sync_synchronize intrinsic. */
5807
5808 static void
5809 expand_builtin_sync_synchronize (void)
5810 {
5811 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5812 }
5813
5814 static rtx
5815 expand_builtin_thread_pointer (tree exp, rtx target)
5816 {
5817 enum insn_code icode;
5818 if (!validate_arglist (exp, VOID_TYPE))
5819 return const0_rtx;
5820 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5821 if (icode != CODE_FOR_nothing)
5822 {
5823 struct expand_operand op;
5824 /* If the target is not sutitable then create a new target. */
5825 if (target == NULL_RTX
5826 || !REG_P (target)
5827 || GET_MODE (target) != Pmode)
5828 target = gen_reg_rtx (Pmode);
5829 create_output_operand (&op, target, Pmode);
5830 expand_insn (icode, 1, &op);
5831 return target;
5832 }
5833 error ("__builtin_thread_pointer is not supported on this target");
5834 return const0_rtx;
5835 }
5836
5837 static void
5838 expand_builtin_set_thread_pointer (tree exp)
5839 {
5840 enum insn_code icode;
5841 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5842 return;
5843 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5844 if (icode != CODE_FOR_nothing)
5845 {
5846 struct expand_operand op;
5847 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5848 Pmode, EXPAND_NORMAL);
5849 create_input_operand (&op, val, Pmode);
5850 expand_insn (icode, 1, &op);
5851 return;
5852 }
5853 error ("__builtin_set_thread_pointer is not supported on this target");
5854 }
5855
5856 \f
5857 /* Emit code to restore the current value of stack. */
5858
5859 static void
5860 expand_stack_restore (tree var)
5861 {
5862 rtx_insn *prev;
5863 rtx sa = expand_normal (var);
5864
5865 sa = convert_memory_address (Pmode, sa);
5866
5867 prev = get_last_insn ();
5868 emit_stack_restore (SAVE_BLOCK, sa);
5869 fixup_args_size_notes (prev, get_last_insn (), 0);
5870 }
5871
5872
5873 /* Emit code to save the current value of stack. */
5874
5875 static rtx
5876 expand_stack_save (void)
5877 {
5878 rtx ret = NULL_RTX;
5879
5880 do_pending_stack_adjust ();
5881 emit_stack_save (SAVE_BLOCK, &ret);
5882 return ret;
5883 }
5884
5885 /* Expand an expression EXP that calls a built-in function,
5886 with result going to TARGET if that's convenient
5887 (and in mode MODE if that's convenient).
5888 SUBTARGET may be used as the target for computing one of EXP's operands.
5889 IGNORE is nonzero if the value is to be ignored. */
5890
5891 rtx
5892 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5893 int ignore)
5894 {
5895 tree fndecl = get_callee_fndecl (exp);
5896 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5897 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5898 int flags;
5899
5900 /* When ASan is enabled, we don't want to expand some memory/string
5901 builtins and rely on libsanitizer's hooks. This allows us to avoid
5902 redundant checks and be sure, that possible overflow will be detected
5903 by ASan. */
5904
5905 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5906 return expand_call (exp, target, ignore);
5907
5908 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5909 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5910
5911 /* When not optimizing, generate calls to library functions for a certain
5912 set of builtins. */
5913 if (!optimize
5914 && !called_as_built_in (fndecl)
5915 && fcode != BUILT_IN_FORK
5916 && fcode != BUILT_IN_EXECL
5917 && fcode != BUILT_IN_EXECV
5918 && fcode != BUILT_IN_EXECLP
5919 && fcode != BUILT_IN_EXECLE
5920 && fcode != BUILT_IN_EXECVP
5921 && fcode != BUILT_IN_EXECVE
5922 && fcode != BUILT_IN_ALLOCA
5923 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5924 && fcode != BUILT_IN_FREE
5925 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5926 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5927 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5928 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5929 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5930 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5931 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5932 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5933 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5934 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5935 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5936 && fcode != BUILT_IN_CHKP_BNDRET)
5937 return expand_call (exp, target, ignore);
5938
5939 /* The built-in function expanders test for target == const0_rtx
5940 to determine whether the function's result will be ignored. */
5941 if (ignore)
5942 target = const0_rtx;
5943
5944 /* If the result of a pure or const built-in function is ignored, and
5945 none of its arguments are volatile, we can avoid expanding the
5946 built-in call and just evaluate the arguments for side-effects. */
5947 if (target == const0_rtx
5948 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5949 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5950 {
5951 bool volatilep = false;
5952 tree arg;
5953 call_expr_arg_iterator iter;
5954
5955 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5956 if (TREE_THIS_VOLATILE (arg))
5957 {
5958 volatilep = true;
5959 break;
5960 }
5961
5962 if (! volatilep)
5963 {
5964 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5965 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5966 return const0_rtx;
5967 }
5968 }
5969
5970 /* expand_builtin_with_bounds is supposed to be used for
5971 instrumented builtin calls. */
5972 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5973
5974 switch (fcode)
5975 {
5976 CASE_FLT_FN (BUILT_IN_FABS):
5977 case BUILT_IN_FABSD32:
5978 case BUILT_IN_FABSD64:
5979 case BUILT_IN_FABSD128:
5980 target = expand_builtin_fabs (exp, target, subtarget);
5981 if (target)
5982 return target;
5983 break;
5984
5985 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5986 target = expand_builtin_copysign (exp, target, subtarget);
5987 if (target)
5988 return target;
5989 break;
5990
5991 /* Just do a normal library call if we were unable to fold
5992 the values. */
5993 CASE_FLT_FN (BUILT_IN_CABS):
5994 break;
5995
5996 CASE_FLT_FN (BUILT_IN_EXP):
5997 CASE_FLT_FN (BUILT_IN_EXP10):
5998 CASE_FLT_FN (BUILT_IN_POW10):
5999 CASE_FLT_FN (BUILT_IN_EXP2):
6000 CASE_FLT_FN (BUILT_IN_EXPM1):
6001 CASE_FLT_FN (BUILT_IN_LOGB):
6002 CASE_FLT_FN (BUILT_IN_LOG):
6003 CASE_FLT_FN (BUILT_IN_LOG10):
6004 CASE_FLT_FN (BUILT_IN_LOG2):
6005 CASE_FLT_FN (BUILT_IN_LOG1P):
6006 CASE_FLT_FN (BUILT_IN_TAN):
6007 CASE_FLT_FN (BUILT_IN_ASIN):
6008 CASE_FLT_FN (BUILT_IN_ACOS):
6009 CASE_FLT_FN (BUILT_IN_ATAN):
6010 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6011 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6012 because of possible accuracy problems. */
6013 if (! flag_unsafe_math_optimizations)
6014 break;
6015 CASE_FLT_FN (BUILT_IN_SQRT):
6016 CASE_FLT_FN (BUILT_IN_FLOOR):
6017 CASE_FLT_FN (BUILT_IN_CEIL):
6018 CASE_FLT_FN (BUILT_IN_TRUNC):
6019 CASE_FLT_FN (BUILT_IN_ROUND):
6020 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6021 CASE_FLT_FN (BUILT_IN_RINT):
6022 target = expand_builtin_mathfn (exp, target, subtarget);
6023 if (target)
6024 return target;
6025 break;
6026
6027 CASE_FLT_FN (BUILT_IN_FMA):
6028 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6029 if (target)
6030 return target;
6031 break;
6032
6033 CASE_FLT_FN (BUILT_IN_ILOGB):
6034 if (! flag_unsafe_math_optimizations)
6035 break;
6036 CASE_FLT_FN (BUILT_IN_ISINF):
6037 CASE_FLT_FN (BUILT_IN_FINITE):
6038 case BUILT_IN_ISFINITE:
6039 case BUILT_IN_ISNORMAL:
6040 target = expand_builtin_interclass_mathfn (exp, target);
6041 if (target)
6042 return target;
6043 break;
6044
6045 CASE_FLT_FN (BUILT_IN_ICEIL):
6046 CASE_FLT_FN (BUILT_IN_LCEIL):
6047 CASE_FLT_FN (BUILT_IN_LLCEIL):
6048 CASE_FLT_FN (BUILT_IN_LFLOOR):
6049 CASE_FLT_FN (BUILT_IN_IFLOOR):
6050 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6051 target = expand_builtin_int_roundingfn (exp, target);
6052 if (target)
6053 return target;
6054 break;
6055
6056 CASE_FLT_FN (BUILT_IN_IRINT):
6057 CASE_FLT_FN (BUILT_IN_LRINT):
6058 CASE_FLT_FN (BUILT_IN_LLRINT):
6059 CASE_FLT_FN (BUILT_IN_IROUND):
6060 CASE_FLT_FN (BUILT_IN_LROUND):
6061 CASE_FLT_FN (BUILT_IN_LLROUND):
6062 target = expand_builtin_int_roundingfn_2 (exp, target);
6063 if (target)
6064 return target;
6065 break;
6066
6067 CASE_FLT_FN (BUILT_IN_POWI):
6068 target = expand_builtin_powi (exp, target);
6069 if (target)
6070 return target;
6071 break;
6072
6073 CASE_FLT_FN (BUILT_IN_ATAN2):
6074 CASE_FLT_FN (BUILT_IN_LDEXP):
6075 CASE_FLT_FN (BUILT_IN_SCALB):
6076 CASE_FLT_FN (BUILT_IN_SCALBN):
6077 CASE_FLT_FN (BUILT_IN_SCALBLN):
6078 if (! flag_unsafe_math_optimizations)
6079 break;
6080
6081 CASE_FLT_FN (BUILT_IN_FMOD):
6082 CASE_FLT_FN (BUILT_IN_REMAINDER):
6083 CASE_FLT_FN (BUILT_IN_DREM):
6084 CASE_FLT_FN (BUILT_IN_POW):
6085 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6086 if (target)
6087 return target;
6088 break;
6089
6090 CASE_FLT_FN (BUILT_IN_CEXPI):
6091 target = expand_builtin_cexpi (exp, target);
6092 gcc_assert (target);
6093 return target;
6094
6095 CASE_FLT_FN (BUILT_IN_SIN):
6096 CASE_FLT_FN (BUILT_IN_COS):
6097 if (! flag_unsafe_math_optimizations)
6098 break;
6099 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6100 if (target)
6101 return target;
6102 break;
6103
6104 CASE_FLT_FN (BUILT_IN_SINCOS):
6105 if (! flag_unsafe_math_optimizations)
6106 break;
6107 target = expand_builtin_sincos (exp);
6108 if (target)
6109 return target;
6110 break;
6111
6112 case BUILT_IN_APPLY_ARGS:
6113 return expand_builtin_apply_args ();
6114
6115 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6116 FUNCTION with a copy of the parameters described by
6117 ARGUMENTS, and ARGSIZE. It returns a block of memory
6118 allocated on the stack into which is stored all the registers
6119 that might possibly be used for returning the result of a
6120 function. ARGUMENTS is the value returned by
6121 __builtin_apply_args. ARGSIZE is the number of bytes of
6122 arguments that must be copied. ??? How should this value be
6123 computed? We'll also need a safe worst case value for varargs
6124 functions. */
6125 case BUILT_IN_APPLY:
6126 if (!validate_arglist (exp, POINTER_TYPE,
6127 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6128 && !validate_arglist (exp, REFERENCE_TYPE,
6129 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6130 return const0_rtx;
6131 else
6132 {
6133 rtx ops[3];
6134
6135 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6136 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6137 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6138
6139 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6140 }
6141
6142 /* __builtin_return (RESULT) causes the function to return the
6143 value described by RESULT. RESULT is address of the block of
6144 memory returned by __builtin_apply. */
6145 case BUILT_IN_RETURN:
6146 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6147 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6148 return const0_rtx;
6149
6150 case BUILT_IN_SAVEREGS:
6151 return expand_builtin_saveregs ();
6152
6153 case BUILT_IN_VA_ARG_PACK:
6154 /* All valid uses of __builtin_va_arg_pack () are removed during
6155 inlining. */
6156 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6157 return const0_rtx;
6158
6159 case BUILT_IN_VA_ARG_PACK_LEN:
6160 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6161 inlining. */
6162 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6163 return const0_rtx;
6164
6165 /* Return the address of the first anonymous stack arg. */
6166 case BUILT_IN_NEXT_ARG:
6167 if (fold_builtin_next_arg (exp, false))
6168 return const0_rtx;
6169 return expand_builtin_next_arg ();
6170
6171 case BUILT_IN_CLEAR_CACHE:
6172 target = expand_builtin___clear_cache (exp);
6173 if (target)
6174 return target;
6175 break;
6176
6177 case BUILT_IN_CLASSIFY_TYPE:
6178 return expand_builtin_classify_type (exp);
6179
6180 case BUILT_IN_CONSTANT_P:
6181 return const0_rtx;
6182
6183 case BUILT_IN_FRAME_ADDRESS:
6184 case BUILT_IN_RETURN_ADDRESS:
6185 return expand_builtin_frame_address (fndecl, exp);
6186
6187 /* Returns the address of the area where the structure is returned.
6188 0 otherwise. */
6189 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6190 if (call_expr_nargs (exp) != 0
6191 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6192 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6193 return const0_rtx;
6194 else
6195 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6196
6197 case BUILT_IN_ALLOCA:
6198 case BUILT_IN_ALLOCA_WITH_ALIGN:
6199 /* If the allocation stems from the declaration of a variable-sized
6200 object, it cannot accumulate. */
6201 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6202 if (target)
6203 return target;
6204 break;
6205
6206 case BUILT_IN_STACK_SAVE:
6207 return expand_stack_save ();
6208
6209 case BUILT_IN_STACK_RESTORE:
6210 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6211 return const0_rtx;
6212
6213 case BUILT_IN_BSWAP16:
6214 case BUILT_IN_BSWAP32:
6215 case BUILT_IN_BSWAP64:
6216 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6217 if (target)
6218 return target;
6219 break;
6220
6221 CASE_INT_FN (BUILT_IN_FFS):
6222 target = expand_builtin_unop (target_mode, exp, target,
6223 subtarget, ffs_optab);
6224 if (target)
6225 return target;
6226 break;
6227
6228 CASE_INT_FN (BUILT_IN_CLZ):
6229 target = expand_builtin_unop (target_mode, exp, target,
6230 subtarget, clz_optab);
6231 if (target)
6232 return target;
6233 break;
6234
6235 CASE_INT_FN (BUILT_IN_CTZ):
6236 target = expand_builtin_unop (target_mode, exp, target,
6237 subtarget, ctz_optab);
6238 if (target)
6239 return target;
6240 break;
6241
6242 CASE_INT_FN (BUILT_IN_CLRSB):
6243 target = expand_builtin_unop (target_mode, exp, target,
6244 subtarget, clrsb_optab);
6245 if (target)
6246 return target;
6247 break;
6248
6249 CASE_INT_FN (BUILT_IN_POPCOUNT):
6250 target = expand_builtin_unop (target_mode, exp, target,
6251 subtarget, popcount_optab);
6252 if (target)
6253 return target;
6254 break;
6255
6256 CASE_INT_FN (BUILT_IN_PARITY):
6257 target = expand_builtin_unop (target_mode, exp, target,
6258 subtarget, parity_optab);
6259 if (target)
6260 return target;
6261 break;
6262
6263 case BUILT_IN_STRLEN:
6264 target = expand_builtin_strlen (exp, target, target_mode);
6265 if (target)
6266 return target;
6267 break;
6268
6269 case BUILT_IN_STRCPY:
6270 target = expand_builtin_strcpy (exp, target);
6271 if (target)
6272 return target;
6273 break;
6274
6275 case BUILT_IN_STRNCPY:
6276 target = expand_builtin_strncpy (exp, target);
6277 if (target)
6278 return target;
6279 break;
6280
6281 case BUILT_IN_STPCPY:
6282 target = expand_builtin_stpcpy (exp, target, mode);
6283 if (target)
6284 return target;
6285 break;
6286
6287 case BUILT_IN_MEMCPY:
6288 target = expand_builtin_memcpy (exp, target);
6289 if (target)
6290 return target;
6291 break;
6292
6293 case BUILT_IN_MEMPCPY:
6294 target = expand_builtin_mempcpy (exp, target, mode);
6295 if (target)
6296 return target;
6297 break;
6298
6299 case BUILT_IN_MEMSET:
6300 target = expand_builtin_memset (exp, target, mode);
6301 if (target)
6302 return target;
6303 break;
6304
6305 case BUILT_IN_BZERO:
6306 target = expand_builtin_bzero (exp);
6307 if (target)
6308 return target;
6309 break;
6310
6311 case BUILT_IN_STRCMP:
6312 target = expand_builtin_strcmp (exp, target);
6313 if (target)
6314 return target;
6315 break;
6316
6317 case BUILT_IN_STRNCMP:
6318 target = expand_builtin_strncmp (exp, target, mode);
6319 if (target)
6320 return target;
6321 break;
6322
6323 case BUILT_IN_BCMP:
6324 case BUILT_IN_MEMCMP:
6325 target = expand_builtin_memcmp (exp, target, mode);
6326 if (target)
6327 return target;
6328 break;
6329
6330 case BUILT_IN_SETJMP:
6331 /* This should have been lowered to the builtins below. */
6332 gcc_unreachable ();
6333
6334 case BUILT_IN_SETJMP_SETUP:
6335 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6336 and the receiver label. */
6337 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6338 {
6339 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6340 VOIDmode, EXPAND_NORMAL);
6341 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6342 rtx label_r = label_rtx (label);
6343
6344 /* This is copied from the handling of non-local gotos. */
6345 expand_builtin_setjmp_setup (buf_addr, label_r);
6346 nonlocal_goto_handler_labels
6347 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6348 nonlocal_goto_handler_labels);
6349 /* ??? Do not let expand_label treat us as such since we would
6350 not want to be both on the list of non-local labels and on
6351 the list of forced labels. */
6352 FORCED_LABEL (label) = 0;
6353 return const0_rtx;
6354 }
6355 break;
6356
6357 case BUILT_IN_SETJMP_RECEIVER:
6358 /* __builtin_setjmp_receiver is passed the receiver label. */
6359 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6360 {
6361 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6362 rtx label_r = label_rtx (label);
6363
6364 expand_builtin_setjmp_receiver (label_r);
6365 return const0_rtx;
6366 }
6367 break;
6368
6369 /* __builtin_longjmp is passed a pointer to an array of five words.
6370 It's similar to the C library longjmp function but works with
6371 __builtin_setjmp above. */
6372 case BUILT_IN_LONGJMP:
6373 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6374 {
6375 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6376 VOIDmode, EXPAND_NORMAL);
6377 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6378
6379 if (value != const1_rtx)
6380 {
6381 error ("%<__builtin_longjmp%> second argument must be 1");
6382 return const0_rtx;
6383 }
6384
6385 expand_builtin_longjmp (buf_addr, value);
6386 return const0_rtx;
6387 }
6388 break;
6389
6390 case BUILT_IN_NONLOCAL_GOTO:
6391 target = expand_builtin_nonlocal_goto (exp);
6392 if (target)
6393 return target;
6394 break;
6395
6396 /* This updates the setjmp buffer that is its argument with the value
6397 of the current stack pointer. */
6398 case BUILT_IN_UPDATE_SETJMP_BUF:
6399 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6400 {
6401 rtx buf_addr
6402 = expand_normal (CALL_EXPR_ARG (exp, 0));
6403
6404 expand_builtin_update_setjmp_buf (buf_addr);
6405 return const0_rtx;
6406 }
6407 break;
6408
6409 case BUILT_IN_TRAP:
6410 expand_builtin_trap ();
6411 return const0_rtx;
6412
6413 case BUILT_IN_UNREACHABLE:
6414 expand_builtin_unreachable ();
6415 return const0_rtx;
6416
6417 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6418 case BUILT_IN_SIGNBITD32:
6419 case BUILT_IN_SIGNBITD64:
6420 case BUILT_IN_SIGNBITD128:
6421 target = expand_builtin_signbit (exp, target);
6422 if (target)
6423 return target;
6424 break;
6425
6426 /* Various hooks for the DWARF 2 __throw routine. */
6427 case BUILT_IN_UNWIND_INIT:
6428 expand_builtin_unwind_init ();
6429 return const0_rtx;
6430 case BUILT_IN_DWARF_CFA:
6431 return virtual_cfa_rtx;
6432 #ifdef DWARF2_UNWIND_INFO
6433 case BUILT_IN_DWARF_SP_COLUMN:
6434 return expand_builtin_dwarf_sp_column ();
6435 case BUILT_IN_INIT_DWARF_REG_SIZES:
6436 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6437 return const0_rtx;
6438 #endif
6439 case BUILT_IN_FROB_RETURN_ADDR:
6440 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6441 case BUILT_IN_EXTRACT_RETURN_ADDR:
6442 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6443 case BUILT_IN_EH_RETURN:
6444 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6445 CALL_EXPR_ARG (exp, 1));
6446 return const0_rtx;
6447 #ifdef EH_RETURN_DATA_REGNO
6448 case BUILT_IN_EH_RETURN_DATA_REGNO:
6449 return expand_builtin_eh_return_data_regno (exp);
6450 #endif
6451 case BUILT_IN_EXTEND_POINTER:
6452 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6453 case BUILT_IN_EH_POINTER:
6454 return expand_builtin_eh_pointer (exp);
6455 case BUILT_IN_EH_FILTER:
6456 return expand_builtin_eh_filter (exp);
6457 case BUILT_IN_EH_COPY_VALUES:
6458 return expand_builtin_eh_copy_values (exp);
6459
6460 case BUILT_IN_VA_START:
6461 return expand_builtin_va_start (exp);
6462 case BUILT_IN_VA_END:
6463 return expand_builtin_va_end (exp);
6464 case BUILT_IN_VA_COPY:
6465 return expand_builtin_va_copy (exp);
6466 case BUILT_IN_EXPECT:
6467 return expand_builtin_expect (exp, target);
6468 case BUILT_IN_ASSUME_ALIGNED:
6469 return expand_builtin_assume_aligned (exp, target);
6470 case BUILT_IN_PREFETCH:
6471 expand_builtin_prefetch (exp);
6472 return const0_rtx;
6473
6474 case BUILT_IN_INIT_TRAMPOLINE:
6475 return expand_builtin_init_trampoline (exp, true);
6476 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6477 return expand_builtin_init_trampoline (exp, false);
6478 case BUILT_IN_ADJUST_TRAMPOLINE:
6479 return expand_builtin_adjust_trampoline (exp);
6480
6481 case BUILT_IN_FORK:
6482 case BUILT_IN_EXECL:
6483 case BUILT_IN_EXECV:
6484 case BUILT_IN_EXECLP:
6485 case BUILT_IN_EXECLE:
6486 case BUILT_IN_EXECVP:
6487 case BUILT_IN_EXECVE:
6488 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6489 if (target)
6490 return target;
6491 break;
6492
6493 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6494 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6495 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6496 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6497 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6498 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6499 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6500 if (target)
6501 return target;
6502 break;
6503
6504 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6505 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6506 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6507 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6508 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6509 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6510 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6511 if (target)
6512 return target;
6513 break;
6514
6515 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6516 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6517 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6518 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6519 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6520 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6521 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6522 if (target)
6523 return target;
6524 break;
6525
6526 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6527 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6528 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6529 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6530 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6531 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6532 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6533 if (target)
6534 return target;
6535 break;
6536
6537 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6538 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6539 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6540 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6541 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6542 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6543 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6544 if (target)
6545 return target;
6546 break;
6547
6548 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6549 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6550 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6551 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6552 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6553 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6554 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6555 if (target)
6556 return target;
6557 break;
6558
6559 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6560 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6561 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6562 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6563 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6564 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6565 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6566 if (target)
6567 return target;
6568 break;
6569
6570 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6571 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6572 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6573 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6574 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6575 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6576 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6577 if (target)
6578 return target;
6579 break;
6580
6581 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6582 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6583 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6584 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6585 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6586 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6587 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6588 if (target)
6589 return target;
6590 break;
6591
6592 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6593 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6594 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6595 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6596 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6597 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6598 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6599 if (target)
6600 return target;
6601 break;
6602
6603 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6604 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6605 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6606 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6607 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6608 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6609 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6610 if (target)
6611 return target;
6612 break;
6613
6614 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6615 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6616 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6617 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6618 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6619 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6620 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6621 if (target)
6622 return target;
6623 break;
6624
6625 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6626 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6627 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6628 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6629 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6630 if (mode == VOIDmode)
6631 mode = TYPE_MODE (boolean_type_node);
6632 if (!target || !register_operand (target, mode))
6633 target = gen_reg_rtx (mode);
6634
6635 mode = get_builtin_sync_mode
6636 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6637 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6638 if (target)
6639 return target;
6640 break;
6641
6642 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6643 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6644 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6645 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6646 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6647 mode = get_builtin_sync_mode
6648 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6649 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6650 if (target)
6651 return target;
6652 break;
6653
6654 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6655 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6656 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6657 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6658 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6659 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6660 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6661 if (target)
6662 return target;
6663 break;
6664
6665 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6666 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6667 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6668 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6669 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6670 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6671 expand_builtin_sync_lock_release (mode, exp);
6672 return const0_rtx;
6673
6674 case BUILT_IN_SYNC_SYNCHRONIZE:
6675 expand_builtin_sync_synchronize ();
6676 return const0_rtx;
6677
6678 case BUILT_IN_ATOMIC_EXCHANGE_1:
6679 case BUILT_IN_ATOMIC_EXCHANGE_2:
6680 case BUILT_IN_ATOMIC_EXCHANGE_4:
6681 case BUILT_IN_ATOMIC_EXCHANGE_8:
6682 case BUILT_IN_ATOMIC_EXCHANGE_16:
6683 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6684 target = expand_builtin_atomic_exchange (mode, exp, target);
6685 if (target)
6686 return target;
6687 break;
6688
6689 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6690 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6691 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6692 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6693 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6694 {
6695 unsigned int nargs, z;
6696 vec<tree, va_gc> *vec;
6697
6698 mode =
6699 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6700 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6701 if (target)
6702 return target;
6703
6704 /* If this is turned into an external library call, the weak parameter
6705 must be dropped to match the expected parameter list. */
6706 nargs = call_expr_nargs (exp);
6707 vec_alloc (vec, nargs - 1);
6708 for (z = 0; z < 3; z++)
6709 vec->quick_push (CALL_EXPR_ARG (exp, z));
6710 /* Skip the boolean weak parameter. */
6711 for (z = 4; z < 6; z++)
6712 vec->quick_push (CALL_EXPR_ARG (exp, z));
6713 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6714 break;
6715 }
6716
6717 case BUILT_IN_ATOMIC_LOAD_1:
6718 case BUILT_IN_ATOMIC_LOAD_2:
6719 case BUILT_IN_ATOMIC_LOAD_4:
6720 case BUILT_IN_ATOMIC_LOAD_8:
6721 case BUILT_IN_ATOMIC_LOAD_16:
6722 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6723 target = expand_builtin_atomic_load (mode, exp, target);
6724 if (target)
6725 return target;
6726 break;
6727
6728 case BUILT_IN_ATOMIC_STORE_1:
6729 case BUILT_IN_ATOMIC_STORE_2:
6730 case BUILT_IN_ATOMIC_STORE_4:
6731 case BUILT_IN_ATOMIC_STORE_8:
6732 case BUILT_IN_ATOMIC_STORE_16:
6733 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6734 target = expand_builtin_atomic_store (mode, exp);
6735 if (target)
6736 return const0_rtx;
6737 break;
6738
6739 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6740 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6741 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6742 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6743 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6744 {
6745 enum built_in_function lib;
6746 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6747 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6748 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6749 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6750 ignore, lib);
6751 if (target)
6752 return target;
6753 break;
6754 }
6755 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6756 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6757 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6758 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6759 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6760 {
6761 enum built_in_function lib;
6762 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6763 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6764 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6765 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6766 ignore, lib);
6767 if (target)
6768 return target;
6769 break;
6770 }
6771 case BUILT_IN_ATOMIC_AND_FETCH_1:
6772 case BUILT_IN_ATOMIC_AND_FETCH_2:
6773 case BUILT_IN_ATOMIC_AND_FETCH_4:
6774 case BUILT_IN_ATOMIC_AND_FETCH_8:
6775 case BUILT_IN_ATOMIC_AND_FETCH_16:
6776 {
6777 enum built_in_function lib;
6778 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6779 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6780 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6781 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6782 ignore, lib);
6783 if (target)
6784 return target;
6785 break;
6786 }
6787 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6788 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6789 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6790 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6791 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6792 {
6793 enum built_in_function lib;
6794 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6795 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6796 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6797 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6798 ignore, lib);
6799 if (target)
6800 return target;
6801 break;
6802 }
6803 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6804 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6805 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6806 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6807 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6808 {
6809 enum built_in_function lib;
6810 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6811 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6812 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6813 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6814 ignore, lib);
6815 if (target)
6816 return target;
6817 break;
6818 }
6819 case BUILT_IN_ATOMIC_OR_FETCH_1:
6820 case BUILT_IN_ATOMIC_OR_FETCH_2:
6821 case BUILT_IN_ATOMIC_OR_FETCH_4:
6822 case BUILT_IN_ATOMIC_OR_FETCH_8:
6823 case BUILT_IN_ATOMIC_OR_FETCH_16:
6824 {
6825 enum built_in_function lib;
6826 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6827 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6828 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6829 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6830 ignore, lib);
6831 if (target)
6832 return target;
6833 break;
6834 }
6835 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6836 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6837 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6838 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6839 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6840 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6841 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6842 ignore, BUILT_IN_NONE);
6843 if (target)
6844 return target;
6845 break;
6846
6847 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6848 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6849 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6850 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6851 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6852 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6853 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6854 ignore, BUILT_IN_NONE);
6855 if (target)
6856 return target;
6857 break;
6858
6859 case BUILT_IN_ATOMIC_FETCH_AND_1:
6860 case BUILT_IN_ATOMIC_FETCH_AND_2:
6861 case BUILT_IN_ATOMIC_FETCH_AND_4:
6862 case BUILT_IN_ATOMIC_FETCH_AND_8:
6863 case BUILT_IN_ATOMIC_FETCH_AND_16:
6864 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6865 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6866 ignore, BUILT_IN_NONE);
6867 if (target)
6868 return target;
6869 break;
6870
6871 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6872 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6873 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6874 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6875 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6876 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6877 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6878 ignore, BUILT_IN_NONE);
6879 if (target)
6880 return target;
6881 break;
6882
6883 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6884 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6885 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6886 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6887 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6888 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6889 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6890 ignore, BUILT_IN_NONE);
6891 if (target)
6892 return target;
6893 break;
6894
6895 case BUILT_IN_ATOMIC_FETCH_OR_1:
6896 case BUILT_IN_ATOMIC_FETCH_OR_2:
6897 case BUILT_IN_ATOMIC_FETCH_OR_4:
6898 case BUILT_IN_ATOMIC_FETCH_OR_8:
6899 case BUILT_IN_ATOMIC_FETCH_OR_16:
6900 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6901 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6902 ignore, BUILT_IN_NONE);
6903 if (target)
6904 return target;
6905 break;
6906
6907 case BUILT_IN_ATOMIC_TEST_AND_SET:
6908 return expand_builtin_atomic_test_and_set (exp, target);
6909
6910 case BUILT_IN_ATOMIC_CLEAR:
6911 return expand_builtin_atomic_clear (exp);
6912
6913 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6914 return expand_builtin_atomic_always_lock_free (exp);
6915
6916 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6917 target = expand_builtin_atomic_is_lock_free (exp);
6918 if (target)
6919 return target;
6920 break;
6921
6922 case BUILT_IN_ATOMIC_THREAD_FENCE:
6923 expand_builtin_atomic_thread_fence (exp);
6924 return const0_rtx;
6925
6926 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6927 expand_builtin_atomic_signal_fence (exp);
6928 return const0_rtx;
6929
6930 case BUILT_IN_OBJECT_SIZE:
6931 return expand_builtin_object_size (exp);
6932
6933 case BUILT_IN_MEMCPY_CHK:
6934 case BUILT_IN_MEMPCPY_CHK:
6935 case BUILT_IN_MEMMOVE_CHK:
6936 case BUILT_IN_MEMSET_CHK:
6937 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6938 if (target)
6939 return target;
6940 break;
6941
6942 case BUILT_IN_STRCPY_CHK:
6943 case BUILT_IN_STPCPY_CHK:
6944 case BUILT_IN_STRNCPY_CHK:
6945 case BUILT_IN_STPNCPY_CHK:
6946 case BUILT_IN_STRCAT_CHK:
6947 case BUILT_IN_STRNCAT_CHK:
6948 case BUILT_IN_SNPRINTF_CHK:
6949 case BUILT_IN_VSNPRINTF_CHK:
6950 maybe_emit_chk_warning (exp, fcode);
6951 break;
6952
6953 case BUILT_IN_SPRINTF_CHK:
6954 case BUILT_IN_VSPRINTF_CHK:
6955 maybe_emit_sprintf_chk_warning (exp, fcode);
6956 break;
6957
6958 case BUILT_IN_FREE:
6959 if (warn_free_nonheap_object)
6960 maybe_emit_free_warning (exp);
6961 break;
6962
6963 case BUILT_IN_THREAD_POINTER:
6964 return expand_builtin_thread_pointer (exp, target);
6965
6966 case BUILT_IN_SET_THREAD_POINTER:
6967 expand_builtin_set_thread_pointer (exp);
6968 return const0_rtx;
6969
6970 case BUILT_IN_CILK_DETACH:
6971 expand_builtin_cilk_detach (exp);
6972 return const0_rtx;
6973
6974 case BUILT_IN_CILK_POP_FRAME:
6975 expand_builtin_cilk_pop_frame (exp);
6976 return const0_rtx;
6977
6978 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6979 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6980 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6981 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6982 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6983 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6984 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6985 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6986 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6987 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6988 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6989 /* We allow user CHKP builtins if Pointer Bounds
6990 Checker is off. */
6991 if (!chkp_function_instrumented_p (current_function_decl))
6992 {
6993 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6994 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6995 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6996 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6997 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6998 return expand_normal (CALL_EXPR_ARG (exp, 0));
6999 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7000 return expand_normal (size_zero_node);
7001 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7002 return expand_normal (size_int (-1));
7003 else
7004 return const0_rtx;
7005 }
7006 /* FALLTHROUGH */
7007
7008 case BUILT_IN_CHKP_BNDMK:
7009 case BUILT_IN_CHKP_BNDSTX:
7010 case BUILT_IN_CHKP_BNDCL:
7011 case BUILT_IN_CHKP_BNDCU:
7012 case BUILT_IN_CHKP_BNDLDX:
7013 case BUILT_IN_CHKP_BNDRET:
7014 case BUILT_IN_CHKP_INTERSECT:
7015 case BUILT_IN_CHKP_NARROW:
7016 case BUILT_IN_CHKP_EXTRACT_LOWER:
7017 case BUILT_IN_CHKP_EXTRACT_UPPER:
7018 /* Software implementation of Pointer Bounds Checker is NYI.
7019 Target support is required. */
7020 error ("Your target platform does not support -fcheck-pointer-bounds");
7021 break;
7022
7023 default: /* just do library call, if unknown builtin */
7024 break;
7025 }
7026
7027 /* The switch statement above can drop through to cause the function
7028 to be called normally. */
7029 return expand_call (exp, target, ignore);
7030 }
7031
7032 /* Similar to expand_builtin but is used for instrumented calls. */
7033
7034 rtx
7035 expand_builtin_with_bounds (tree exp, rtx target,
7036 rtx subtarget ATTRIBUTE_UNUSED,
7037 machine_mode mode, int ignore)
7038 {
7039 tree fndecl = get_callee_fndecl (exp);
7040 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7041
7042 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7043
7044 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7045 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7046
7047 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7048 && fcode < END_CHKP_BUILTINS);
7049
7050 switch (fcode)
7051 {
7052 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7053 target = expand_builtin_memcpy_with_bounds (exp, target);
7054 if (target)
7055 return target;
7056 break;
7057
7058 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7059 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7060 if (target)
7061 return target;
7062 break;
7063
7064 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7065 target = expand_builtin_memset_with_bounds (exp, target, mode);
7066 if (target)
7067 return target;
7068 break;
7069
7070 default:
7071 break;
7072 }
7073
7074 /* The switch statement above can drop through to cause the function
7075 to be called normally. */
7076 return expand_call (exp, target, ignore);
7077 }
7078
7079 /* Determine whether a tree node represents a call to a built-in
7080 function. If the tree T is a call to a built-in function with
7081 the right number of arguments of the appropriate types, return
7082 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7083 Otherwise the return value is END_BUILTINS. */
7084
7085 enum built_in_function
7086 builtin_mathfn_code (const_tree t)
7087 {
7088 const_tree fndecl, arg, parmlist;
7089 const_tree argtype, parmtype;
7090 const_call_expr_arg_iterator iter;
7091
7092 if (TREE_CODE (t) != CALL_EXPR
7093 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7094 return END_BUILTINS;
7095
7096 fndecl = get_callee_fndecl (t);
7097 if (fndecl == NULL_TREE
7098 || TREE_CODE (fndecl) != FUNCTION_DECL
7099 || ! DECL_BUILT_IN (fndecl)
7100 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7101 return END_BUILTINS;
7102
7103 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7104 init_const_call_expr_arg_iterator (t, &iter);
7105 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7106 {
7107 /* If a function doesn't take a variable number of arguments,
7108 the last element in the list will have type `void'. */
7109 parmtype = TREE_VALUE (parmlist);
7110 if (VOID_TYPE_P (parmtype))
7111 {
7112 if (more_const_call_expr_args_p (&iter))
7113 return END_BUILTINS;
7114 return DECL_FUNCTION_CODE (fndecl);
7115 }
7116
7117 if (! more_const_call_expr_args_p (&iter))
7118 return END_BUILTINS;
7119
7120 arg = next_const_call_expr_arg (&iter);
7121 argtype = TREE_TYPE (arg);
7122
7123 if (SCALAR_FLOAT_TYPE_P (parmtype))
7124 {
7125 if (! SCALAR_FLOAT_TYPE_P (argtype))
7126 return END_BUILTINS;
7127 }
7128 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7129 {
7130 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7131 return END_BUILTINS;
7132 }
7133 else if (POINTER_TYPE_P (parmtype))
7134 {
7135 if (! POINTER_TYPE_P (argtype))
7136 return END_BUILTINS;
7137 }
7138 else if (INTEGRAL_TYPE_P (parmtype))
7139 {
7140 if (! INTEGRAL_TYPE_P (argtype))
7141 return END_BUILTINS;
7142 }
7143 else
7144 return END_BUILTINS;
7145 }
7146
7147 /* Variable-length argument list. */
7148 return DECL_FUNCTION_CODE (fndecl);
7149 }
7150
7151 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7152 evaluate to a constant. */
7153
7154 static tree
7155 fold_builtin_constant_p (tree arg)
7156 {
7157 /* We return 1 for a numeric type that's known to be a constant
7158 value at compile-time or for an aggregate type that's a
7159 literal constant. */
7160 STRIP_NOPS (arg);
7161
7162 /* If we know this is a constant, emit the constant of one. */
7163 if (CONSTANT_CLASS_P (arg)
7164 || (TREE_CODE (arg) == CONSTRUCTOR
7165 && TREE_CONSTANT (arg)))
7166 return integer_one_node;
7167 if (TREE_CODE (arg) == ADDR_EXPR)
7168 {
7169 tree op = TREE_OPERAND (arg, 0);
7170 if (TREE_CODE (op) == STRING_CST
7171 || (TREE_CODE (op) == ARRAY_REF
7172 && integer_zerop (TREE_OPERAND (op, 1))
7173 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7174 return integer_one_node;
7175 }
7176
7177 /* If this expression has side effects, show we don't know it to be a
7178 constant. Likewise if it's a pointer or aggregate type since in
7179 those case we only want literals, since those are only optimized
7180 when generating RTL, not later.
7181 And finally, if we are compiling an initializer, not code, we
7182 need to return a definite result now; there's not going to be any
7183 more optimization done. */
7184 if (TREE_SIDE_EFFECTS (arg)
7185 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7186 || POINTER_TYPE_P (TREE_TYPE (arg))
7187 || cfun == 0
7188 || folding_initializer
7189 || force_folding_builtin_constant_p)
7190 return integer_zero_node;
7191
7192 return NULL_TREE;
7193 }
7194
7195 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7196 return it as a truthvalue. */
7197
7198 static tree
7199 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7200 tree predictor)
7201 {
7202 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7203
7204 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7205 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7206 ret_type = TREE_TYPE (TREE_TYPE (fn));
7207 pred_type = TREE_VALUE (arg_types);
7208 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7209
7210 pred = fold_convert_loc (loc, pred_type, pred);
7211 expected = fold_convert_loc (loc, expected_type, expected);
7212 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7213 predictor);
7214
7215 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7216 build_int_cst (ret_type, 0));
7217 }
7218
7219 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7220 NULL_TREE if no simplification is possible. */
7221
7222 tree
7223 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7224 {
7225 tree inner, fndecl, inner_arg0;
7226 enum tree_code code;
7227
7228 /* Distribute the expected value over short-circuiting operators.
7229 See through the cast from truthvalue_type_node to long. */
7230 inner_arg0 = arg0;
7231 while (CONVERT_EXPR_P (inner_arg0)
7232 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7233 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7234 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7235
7236 /* If this is a builtin_expect within a builtin_expect keep the
7237 inner one. See through a comparison against a constant. It
7238 might have been added to create a thruthvalue. */
7239 inner = inner_arg0;
7240
7241 if (COMPARISON_CLASS_P (inner)
7242 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7243 inner = TREE_OPERAND (inner, 0);
7244
7245 if (TREE_CODE (inner) == CALL_EXPR
7246 && (fndecl = get_callee_fndecl (inner))
7247 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7248 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7249 return arg0;
7250
7251 inner = inner_arg0;
7252 code = TREE_CODE (inner);
7253 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7254 {
7255 tree op0 = TREE_OPERAND (inner, 0);
7256 tree op1 = TREE_OPERAND (inner, 1);
7257
7258 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7259 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7260 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7261
7262 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7263 }
7264
7265 /* If the argument isn't invariant then there's nothing else we can do. */
7266 if (!TREE_CONSTANT (inner_arg0))
7267 return NULL_TREE;
7268
7269 /* If we expect that a comparison against the argument will fold to
7270 a constant return the constant. In practice, this means a true
7271 constant or the address of a non-weak symbol. */
7272 inner = inner_arg0;
7273 STRIP_NOPS (inner);
7274 if (TREE_CODE (inner) == ADDR_EXPR)
7275 {
7276 do
7277 {
7278 inner = TREE_OPERAND (inner, 0);
7279 }
7280 while (TREE_CODE (inner) == COMPONENT_REF
7281 || TREE_CODE (inner) == ARRAY_REF);
7282 if ((TREE_CODE (inner) == VAR_DECL
7283 || TREE_CODE (inner) == FUNCTION_DECL)
7284 && DECL_WEAK (inner))
7285 return NULL_TREE;
7286 }
7287
7288 /* Otherwise, ARG0 already has the proper type for the return value. */
7289 return arg0;
7290 }
7291
7292 /* Fold a call to __builtin_classify_type with argument ARG. */
7293
7294 static tree
7295 fold_builtin_classify_type (tree arg)
7296 {
7297 if (arg == 0)
7298 return build_int_cst (integer_type_node, no_type_class);
7299
7300 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7301 }
7302
7303 /* Fold a call to __builtin_strlen with argument ARG. */
7304
7305 static tree
7306 fold_builtin_strlen (location_t loc, tree type, tree arg)
7307 {
7308 if (!validate_arg (arg, POINTER_TYPE))
7309 return NULL_TREE;
7310 else
7311 {
7312 tree len = c_strlen (arg, 0);
7313
7314 if (len)
7315 return fold_convert_loc (loc, type, len);
7316
7317 return NULL_TREE;
7318 }
7319 }
7320
7321 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7322
7323 static tree
7324 fold_builtin_inf (location_t loc, tree type, int warn)
7325 {
7326 REAL_VALUE_TYPE real;
7327
7328 /* __builtin_inff is intended to be usable to define INFINITY on all
7329 targets. If an infinity is not available, INFINITY expands "to a
7330 positive constant of type float that overflows at translation
7331 time", footnote "In this case, using INFINITY will violate the
7332 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7333 Thus we pedwarn to ensure this constraint violation is
7334 diagnosed. */
7335 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7336 pedwarn (loc, 0, "target format does not support infinity");
7337
7338 real_inf (&real);
7339 return build_real (type, real);
7340 }
7341
7342 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7343
7344 static tree
7345 fold_builtin_nan (tree arg, tree type, int quiet)
7346 {
7347 REAL_VALUE_TYPE real;
7348 const char *str;
7349
7350 if (!validate_arg (arg, POINTER_TYPE))
7351 return NULL_TREE;
7352 str = c_getstr (arg);
7353 if (!str)
7354 return NULL_TREE;
7355
7356 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7357 return NULL_TREE;
7358
7359 return build_real (type, real);
7360 }
7361
7362 /* Return true if the floating point expression T has an integer value.
7363 We also allow +Inf, -Inf and NaN to be considered integer values. */
7364
7365 static bool
7366 integer_valued_real_p (tree t)
7367 {
7368 switch (TREE_CODE (t))
7369 {
7370 case FLOAT_EXPR:
7371 return true;
7372
7373 case ABS_EXPR:
7374 case SAVE_EXPR:
7375 return integer_valued_real_p (TREE_OPERAND (t, 0));
7376
7377 case COMPOUND_EXPR:
7378 case MODIFY_EXPR:
7379 case BIND_EXPR:
7380 return integer_valued_real_p (TREE_OPERAND (t, 1));
7381
7382 case PLUS_EXPR:
7383 case MINUS_EXPR:
7384 case MULT_EXPR:
7385 case MIN_EXPR:
7386 case MAX_EXPR:
7387 return integer_valued_real_p (TREE_OPERAND (t, 0))
7388 && integer_valued_real_p (TREE_OPERAND (t, 1));
7389
7390 case COND_EXPR:
7391 return integer_valued_real_p (TREE_OPERAND (t, 1))
7392 && integer_valued_real_p (TREE_OPERAND (t, 2));
7393
7394 case REAL_CST:
7395 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7396
7397 CASE_CONVERT:
7398 {
7399 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7400 if (TREE_CODE (type) == INTEGER_TYPE)
7401 return true;
7402 if (TREE_CODE (type) == REAL_TYPE)
7403 return integer_valued_real_p (TREE_OPERAND (t, 0));
7404 break;
7405 }
7406
7407 case CALL_EXPR:
7408 switch (builtin_mathfn_code (t))
7409 {
7410 CASE_FLT_FN (BUILT_IN_CEIL):
7411 CASE_FLT_FN (BUILT_IN_FLOOR):
7412 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7413 CASE_FLT_FN (BUILT_IN_RINT):
7414 CASE_FLT_FN (BUILT_IN_ROUND):
7415 CASE_FLT_FN (BUILT_IN_TRUNC):
7416 return true;
7417
7418 CASE_FLT_FN (BUILT_IN_FMIN):
7419 CASE_FLT_FN (BUILT_IN_FMAX):
7420 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7421 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7422
7423 default:
7424 break;
7425 }
7426 break;
7427
7428 default:
7429 break;
7430 }
7431 return false;
7432 }
7433
7434 /* FNDECL is assumed to be a builtin where truncation can be propagated
7435 across (for instance floor((double)f) == (double)floorf (f).
7436 Do the transformation for a call with argument ARG. */
7437
7438 static tree
7439 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7440 {
7441 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7442
7443 if (!validate_arg (arg, REAL_TYPE))
7444 return NULL_TREE;
7445
7446 /* Integer rounding functions are idempotent. */
7447 if (fcode == builtin_mathfn_code (arg))
7448 return arg;
7449
7450 /* If argument is already integer valued, and we don't need to worry
7451 about setting errno, there's no need to perform rounding. */
7452 if (! flag_errno_math && integer_valued_real_p (arg))
7453 return arg;
7454
7455 if (optimize)
7456 {
7457 tree arg0 = strip_float_extensions (arg);
7458 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7459 tree newtype = TREE_TYPE (arg0);
7460 tree decl;
7461
7462 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7463 && (decl = mathfn_built_in (newtype, fcode)))
7464 return fold_convert_loc (loc, ftype,
7465 build_call_expr_loc (loc, decl, 1,
7466 fold_convert_loc (loc,
7467 newtype,
7468 arg0)));
7469 }
7470 return NULL_TREE;
7471 }
7472
7473 /* FNDECL is assumed to be builtin which can narrow the FP type of
7474 the argument, for instance lround((double)f) -> lroundf (f).
7475 Do the transformation for a call with argument ARG. */
7476
7477 static tree
7478 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7479 {
7480 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7481
7482 if (!validate_arg (arg, REAL_TYPE))
7483 return NULL_TREE;
7484
7485 /* If argument is already integer valued, and we don't need to worry
7486 about setting errno, there's no need to perform rounding. */
7487 if (! flag_errno_math && integer_valued_real_p (arg))
7488 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7489 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7490
7491 if (optimize)
7492 {
7493 tree ftype = TREE_TYPE (arg);
7494 tree arg0 = strip_float_extensions (arg);
7495 tree newtype = TREE_TYPE (arg0);
7496 tree decl;
7497
7498 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7499 && (decl = mathfn_built_in (newtype, fcode)))
7500 return build_call_expr_loc (loc, decl, 1,
7501 fold_convert_loc (loc, newtype, arg0));
7502 }
7503
7504 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7505 sizeof (int) == sizeof (long). */
7506 if (TYPE_PRECISION (integer_type_node)
7507 == TYPE_PRECISION (long_integer_type_node))
7508 {
7509 tree newfn = NULL_TREE;
7510 switch (fcode)
7511 {
7512 CASE_FLT_FN (BUILT_IN_ICEIL):
7513 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7514 break;
7515
7516 CASE_FLT_FN (BUILT_IN_IFLOOR):
7517 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7518 break;
7519
7520 CASE_FLT_FN (BUILT_IN_IROUND):
7521 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7522 break;
7523
7524 CASE_FLT_FN (BUILT_IN_IRINT):
7525 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7526 break;
7527
7528 default:
7529 break;
7530 }
7531
7532 if (newfn)
7533 {
7534 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7535 return fold_convert_loc (loc,
7536 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7537 }
7538 }
7539
7540 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7541 sizeof (long long) == sizeof (long). */
7542 if (TYPE_PRECISION (long_long_integer_type_node)
7543 == TYPE_PRECISION (long_integer_type_node))
7544 {
7545 tree newfn = NULL_TREE;
7546 switch (fcode)
7547 {
7548 CASE_FLT_FN (BUILT_IN_LLCEIL):
7549 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7550 break;
7551
7552 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7553 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7554 break;
7555
7556 CASE_FLT_FN (BUILT_IN_LLROUND):
7557 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7558 break;
7559
7560 CASE_FLT_FN (BUILT_IN_LLRINT):
7561 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7562 break;
7563
7564 default:
7565 break;
7566 }
7567
7568 if (newfn)
7569 {
7570 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7571 return fold_convert_loc (loc,
7572 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7573 }
7574 }
7575
7576 return NULL_TREE;
7577 }
7578
7579 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7580 return type. Return NULL_TREE if no simplification can be made. */
7581
7582 static tree
7583 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7584 {
7585 tree res;
7586
7587 if (!validate_arg (arg, COMPLEX_TYPE)
7588 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7589 return NULL_TREE;
7590
7591 /* Calculate the result when the argument is a constant. */
7592 if (TREE_CODE (arg) == COMPLEX_CST
7593 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7594 type, mpfr_hypot)))
7595 return res;
7596
7597 if (TREE_CODE (arg) == COMPLEX_EXPR)
7598 {
7599 tree real = TREE_OPERAND (arg, 0);
7600 tree imag = TREE_OPERAND (arg, 1);
7601
7602 /* If either part is zero, cabs is fabs of the other. */
7603 if (real_zerop (real))
7604 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7605 if (real_zerop (imag))
7606 return fold_build1_loc (loc, ABS_EXPR, type, real);
7607
7608 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7609 if (flag_unsafe_math_optimizations
7610 && operand_equal_p (real, imag, OEP_PURE_SAME))
7611 {
7612 const REAL_VALUE_TYPE sqrt2_trunc
7613 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7614 STRIP_NOPS (real);
7615 return fold_build2_loc (loc, MULT_EXPR, type,
7616 fold_build1_loc (loc, ABS_EXPR, type, real),
7617 build_real (type, sqrt2_trunc));
7618 }
7619 }
7620
7621 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7622 if (TREE_CODE (arg) == NEGATE_EXPR
7623 || TREE_CODE (arg) == CONJ_EXPR)
7624 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7625
7626 /* Don't do this when optimizing for size. */
7627 if (flag_unsafe_math_optimizations
7628 && optimize && optimize_function_for_speed_p (cfun))
7629 {
7630 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7631
7632 if (sqrtfn != NULL_TREE)
7633 {
7634 tree rpart, ipart, result;
7635
7636 arg = builtin_save_expr (arg);
7637
7638 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7639 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7640
7641 rpart = builtin_save_expr (rpart);
7642 ipart = builtin_save_expr (ipart);
7643
7644 result = fold_build2_loc (loc, PLUS_EXPR, type,
7645 fold_build2_loc (loc, MULT_EXPR, type,
7646 rpart, rpart),
7647 fold_build2_loc (loc, MULT_EXPR, type,
7648 ipart, ipart));
7649
7650 return build_call_expr_loc (loc, sqrtfn, 1, result);
7651 }
7652 }
7653
7654 return NULL_TREE;
7655 }
7656
7657 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7658 complex tree type of the result. If NEG is true, the imaginary
7659 zero is negative. */
7660
7661 static tree
7662 build_complex_cproj (tree type, bool neg)
7663 {
7664 REAL_VALUE_TYPE rinf, rzero = dconst0;
7665
7666 real_inf (&rinf);
7667 rzero.sign = neg;
7668 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7669 build_real (TREE_TYPE (type), rzero));
7670 }
7671
7672 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7673 return type. Return NULL_TREE if no simplification can be made. */
7674
7675 static tree
7676 fold_builtin_cproj (location_t loc, tree arg, tree type)
7677 {
7678 if (!validate_arg (arg, COMPLEX_TYPE)
7679 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7680 return NULL_TREE;
7681
7682 /* If there are no infinities, return arg. */
7683 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7684 return non_lvalue_loc (loc, arg);
7685
7686 /* Calculate the result when the argument is a constant. */
7687 if (TREE_CODE (arg) == COMPLEX_CST)
7688 {
7689 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7690 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7691
7692 if (real_isinf (real) || real_isinf (imag))
7693 return build_complex_cproj (type, imag->sign);
7694 else
7695 return arg;
7696 }
7697 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7698 {
7699 tree real = TREE_OPERAND (arg, 0);
7700 tree imag = TREE_OPERAND (arg, 1);
7701
7702 STRIP_NOPS (real);
7703 STRIP_NOPS (imag);
7704
7705 /* If the real part is inf and the imag part is known to be
7706 nonnegative, return (inf + 0i). Remember side-effects are
7707 possible in the imag part. */
7708 if (TREE_CODE (real) == REAL_CST
7709 && real_isinf (TREE_REAL_CST_PTR (real))
7710 && tree_expr_nonnegative_p (imag))
7711 return omit_one_operand_loc (loc, type,
7712 build_complex_cproj (type, false),
7713 arg);
7714
7715 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7716 Remember side-effects are possible in the real part. */
7717 if (TREE_CODE (imag) == REAL_CST
7718 && real_isinf (TREE_REAL_CST_PTR (imag)))
7719 return
7720 omit_one_operand_loc (loc, type,
7721 build_complex_cproj (type, TREE_REAL_CST_PTR
7722 (imag)->sign), arg);
7723 }
7724
7725 return NULL_TREE;
7726 }
7727
7728 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7729 Return NULL_TREE if no simplification can be made. */
7730
7731 static tree
7732 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7733 {
7734
7735 enum built_in_function fcode;
7736 tree res;
7737
7738 if (!validate_arg (arg, REAL_TYPE))
7739 return NULL_TREE;
7740
7741 /* Calculate the result when the argument is a constant. */
7742 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7743 return res;
7744
7745 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7746 fcode = builtin_mathfn_code (arg);
7747 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7748 {
7749 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7750 arg = fold_build2_loc (loc, MULT_EXPR, type,
7751 CALL_EXPR_ARG (arg, 0),
7752 build_real (type, dconsthalf));
7753 return build_call_expr_loc (loc, expfn, 1, arg);
7754 }
7755
7756 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7757 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7758 {
7759 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7760
7761 if (powfn)
7762 {
7763 tree arg0 = CALL_EXPR_ARG (arg, 0);
7764 tree tree_root;
7765 /* The inner root was either sqrt or cbrt. */
7766 /* This was a conditional expression but it triggered a bug
7767 in Sun C 5.5. */
7768 REAL_VALUE_TYPE dconstroot;
7769 if (BUILTIN_SQRT_P (fcode))
7770 dconstroot = dconsthalf;
7771 else
7772 dconstroot = dconst_third ();
7773
7774 /* Adjust for the outer root. */
7775 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7776 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7777 tree_root = build_real (type, dconstroot);
7778 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7779 }
7780 }
7781
7782 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7783 if (flag_unsafe_math_optimizations
7784 && (fcode == BUILT_IN_POW
7785 || fcode == BUILT_IN_POWF
7786 || fcode == BUILT_IN_POWL))
7787 {
7788 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7789 tree arg0 = CALL_EXPR_ARG (arg, 0);
7790 tree arg1 = CALL_EXPR_ARG (arg, 1);
7791 tree narg1;
7792 if (!tree_expr_nonnegative_p (arg0))
7793 arg0 = build1 (ABS_EXPR, type, arg0);
7794 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7795 build_real (type, dconsthalf));
7796 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7797 }
7798
7799 return NULL_TREE;
7800 }
7801
7802 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7803 Return NULL_TREE if no simplification can be made. */
7804
7805 static tree
7806 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7807 {
7808 const enum built_in_function fcode = builtin_mathfn_code (arg);
7809 tree res;
7810
7811 if (!validate_arg (arg, REAL_TYPE))
7812 return NULL_TREE;
7813
7814 /* Calculate the result when the argument is a constant. */
7815 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7816 return res;
7817
7818 if (flag_unsafe_math_optimizations)
7819 {
7820 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7821 if (BUILTIN_EXPONENT_P (fcode))
7822 {
7823 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7824 const REAL_VALUE_TYPE third_trunc =
7825 real_value_truncate (TYPE_MODE (type), dconst_third ());
7826 arg = fold_build2_loc (loc, MULT_EXPR, type,
7827 CALL_EXPR_ARG (arg, 0),
7828 build_real (type, third_trunc));
7829 return build_call_expr_loc (loc, expfn, 1, arg);
7830 }
7831
7832 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7833 if (BUILTIN_SQRT_P (fcode))
7834 {
7835 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7836
7837 if (powfn)
7838 {
7839 tree arg0 = CALL_EXPR_ARG (arg, 0);
7840 tree tree_root;
7841 REAL_VALUE_TYPE dconstroot = dconst_third ();
7842
7843 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7844 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7845 tree_root = build_real (type, dconstroot);
7846 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7847 }
7848 }
7849
7850 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7851 if (BUILTIN_CBRT_P (fcode))
7852 {
7853 tree arg0 = CALL_EXPR_ARG (arg, 0);
7854 if (tree_expr_nonnegative_p (arg0))
7855 {
7856 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7857
7858 if (powfn)
7859 {
7860 tree tree_root;
7861 REAL_VALUE_TYPE dconstroot;
7862
7863 real_arithmetic (&dconstroot, MULT_EXPR,
7864 dconst_third_ptr (), dconst_third_ptr ());
7865 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7866 tree_root = build_real (type, dconstroot);
7867 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7868 }
7869 }
7870 }
7871
7872 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7873 if (fcode == BUILT_IN_POW
7874 || fcode == BUILT_IN_POWF
7875 || fcode == BUILT_IN_POWL)
7876 {
7877 tree arg00 = CALL_EXPR_ARG (arg, 0);
7878 tree arg01 = CALL_EXPR_ARG (arg, 1);
7879 if (tree_expr_nonnegative_p (arg00))
7880 {
7881 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7882 const REAL_VALUE_TYPE dconstroot
7883 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7884 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7885 build_real (type, dconstroot));
7886 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7887 }
7888 }
7889 }
7890 return NULL_TREE;
7891 }
7892
7893 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7894 TYPE is the type of the return value. Return NULL_TREE if no
7895 simplification can be made. */
7896
7897 static tree
7898 fold_builtin_cos (location_t loc,
7899 tree arg, tree type, tree fndecl)
7900 {
7901 tree res, narg;
7902
7903 if (!validate_arg (arg, REAL_TYPE))
7904 return NULL_TREE;
7905
7906 /* Calculate the result when the argument is a constant. */
7907 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7908 return res;
7909
7910 /* Optimize cos(-x) into cos (x). */
7911 if ((narg = fold_strip_sign_ops (arg)))
7912 return build_call_expr_loc (loc, fndecl, 1, narg);
7913
7914 return NULL_TREE;
7915 }
7916
7917 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7918 Return NULL_TREE if no simplification can be made. */
7919
7920 static tree
7921 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7922 {
7923 if (validate_arg (arg, REAL_TYPE))
7924 {
7925 tree res, narg;
7926
7927 /* Calculate the result when the argument is a constant. */
7928 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7929 return res;
7930
7931 /* Optimize cosh(-x) into cosh (x). */
7932 if ((narg = fold_strip_sign_ops (arg)))
7933 return build_call_expr_loc (loc, fndecl, 1, narg);
7934 }
7935
7936 return NULL_TREE;
7937 }
7938
7939 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7940 argument ARG. TYPE is the type of the return value. Return
7941 NULL_TREE if no simplification can be made. */
7942
7943 static tree
7944 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7945 bool hyper)
7946 {
7947 if (validate_arg (arg, COMPLEX_TYPE)
7948 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7949 {
7950 tree tmp;
7951
7952 /* Calculate the result when the argument is a constant. */
7953 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7954 return tmp;
7955
7956 /* Optimize fn(-x) into fn(x). */
7957 if ((tmp = fold_strip_sign_ops (arg)))
7958 return build_call_expr_loc (loc, fndecl, 1, tmp);
7959 }
7960
7961 return NULL_TREE;
7962 }
7963
7964 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7965 Return NULL_TREE if no simplification can be made. */
7966
7967 static tree
7968 fold_builtin_tan (tree arg, tree type)
7969 {
7970 enum built_in_function fcode;
7971 tree res;
7972
7973 if (!validate_arg (arg, REAL_TYPE))
7974 return NULL_TREE;
7975
7976 /* Calculate the result when the argument is a constant. */
7977 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7978 return res;
7979
7980 /* Optimize tan(atan(x)) = x. */
7981 fcode = builtin_mathfn_code (arg);
7982 if (flag_unsafe_math_optimizations
7983 && (fcode == BUILT_IN_ATAN
7984 || fcode == BUILT_IN_ATANF
7985 || fcode == BUILT_IN_ATANL))
7986 return CALL_EXPR_ARG (arg, 0);
7987
7988 return NULL_TREE;
7989 }
7990
7991 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7992 NULL_TREE if no simplification can be made. */
7993
7994 static tree
7995 fold_builtin_sincos (location_t loc,
7996 tree arg0, tree arg1, tree arg2)
7997 {
7998 tree type;
7999 tree res, fn, call;
8000
8001 if (!validate_arg (arg0, REAL_TYPE)
8002 || !validate_arg (arg1, POINTER_TYPE)
8003 || !validate_arg (arg2, POINTER_TYPE))
8004 return NULL_TREE;
8005
8006 type = TREE_TYPE (arg0);
8007
8008 /* Calculate the result when the argument is a constant. */
8009 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8010 return res;
8011
8012 /* Canonicalize sincos to cexpi. */
8013 if (!targetm.libc_has_function (function_c99_math_complex))
8014 return NULL_TREE;
8015 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8016 if (!fn)
8017 return NULL_TREE;
8018
8019 call = build_call_expr_loc (loc, fn, 1, arg0);
8020 call = builtin_save_expr (call);
8021
8022 return build2 (COMPOUND_EXPR, void_type_node,
8023 build2 (MODIFY_EXPR, void_type_node,
8024 build_fold_indirect_ref_loc (loc, arg1),
8025 build1 (IMAGPART_EXPR, type, call)),
8026 build2 (MODIFY_EXPR, void_type_node,
8027 build_fold_indirect_ref_loc (loc, arg2),
8028 build1 (REALPART_EXPR, type, call)));
8029 }
8030
8031 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8032 NULL_TREE if no simplification can be made. */
8033
8034 static tree
8035 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8036 {
8037 tree rtype;
8038 tree realp, imagp, ifn;
8039 tree res;
8040
8041 if (!validate_arg (arg0, COMPLEX_TYPE)
8042 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
8043 return NULL_TREE;
8044
8045 /* Calculate the result when the argument is a constant. */
8046 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8047 return res;
8048
8049 rtype = TREE_TYPE (TREE_TYPE (arg0));
8050
8051 /* In case we can figure out the real part of arg0 and it is constant zero
8052 fold to cexpi. */
8053 if (!targetm.libc_has_function (function_c99_math_complex))
8054 return NULL_TREE;
8055 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8056 if (!ifn)
8057 return NULL_TREE;
8058
8059 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8060 && real_zerop (realp))
8061 {
8062 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8063 return build_call_expr_loc (loc, ifn, 1, narg);
8064 }
8065
8066 /* In case we can easily decompose real and imaginary parts split cexp
8067 to exp (r) * cexpi (i). */
8068 if (flag_unsafe_math_optimizations
8069 && realp)
8070 {
8071 tree rfn, rcall, icall;
8072
8073 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8074 if (!rfn)
8075 return NULL_TREE;
8076
8077 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8078 if (!imagp)
8079 return NULL_TREE;
8080
8081 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8082 icall = builtin_save_expr (icall);
8083 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8084 rcall = builtin_save_expr (rcall);
8085 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8086 fold_build2_loc (loc, MULT_EXPR, rtype,
8087 rcall,
8088 fold_build1_loc (loc, REALPART_EXPR,
8089 rtype, icall)),
8090 fold_build2_loc (loc, MULT_EXPR, rtype,
8091 rcall,
8092 fold_build1_loc (loc, IMAGPART_EXPR,
8093 rtype, icall)));
8094 }
8095
8096 return NULL_TREE;
8097 }
8098
8099 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8100 Return NULL_TREE if no simplification can be made. */
8101
8102 static tree
8103 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8104 {
8105 if (!validate_arg (arg, REAL_TYPE))
8106 return NULL_TREE;
8107
8108 /* Optimize trunc of constant value. */
8109 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8110 {
8111 REAL_VALUE_TYPE r, x;
8112 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8113
8114 x = TREE_REAL_CST (arg);
8115 real_trunc (&r, TYPE_MODE (type), &x);
8116 return build_real (type, r);
8117 }
8118
8119 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8120 }
8121
8122 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8123 Return NULL_TREE if no simplification can be made. */
8124
8125 static tree
8126 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8127 {
8128 if (!validate_arg (arg, REAL_TYPE))
8129 return NULL_TREE;
8130
8131 /* Optimize floor of constant value. */
8132 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8133 {
8134 REAL_VALUE_TYPE x;
8135
8136 x = TREE_REAL_CST (arg);
8137 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8138 {
8139 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8140 REAL_VALUE_TYPE r;
8141
8142 real_floor (&r, TYPE_MODE (type), &x);
8143 return build_real (type, r);
8144 }
8145 }
8146
8147 /* Fold floor (x) where x is nonnegative to trunc (x). */
8148 if (tree_expr_nonnegative_p (arg))
8149 {
8150 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8151 if (truncfn)
8152 return build_call_expr_loc (loc, truncfn, 1, arg);
8153 }
8154
8155 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8156 }
8157
8158 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8159 Return NULL_TREE if no simplification can be made. */
8160
8161 static tree
8162 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8163 {
8164 if (!validate_arg (arg, REAL_TYPE))
8165 return NULL_TREE;
8166
8167 /* Optimize ceil of constant value. */
8168 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8169 {
8170 REAL_VALUE_TYPE x;
8171
8172 x = TREE_REAL_CST (arg);
8173 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8174 {
8175 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8176 REAL_VALUE_TYPE r;
8177
8178 real_ceil (&r, TYPE_MODE (type), &x);
8179 return build_real (type, r);
8180 }
8181 }
8182
8183 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8184 }
8185
8186 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8187 Return NULL_TREE if no simplification can be made. */
8188
8189 static tree
8190 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8191 {
8192 if (!validate_arg (arg, REAL_TYPE))
8193 return NULL_TREE;
8194
8195 /* Optimize round of constant value. */
8196 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8197 {
8198 REAL_VALUE_TYPE x;
8199
8200 x = TREE_REAL_CST (arg);
8201 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8202 {
8203 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8204 REAL_VALUE_TYPE r;
8205
8206 real_round (&r, TYPE_MODE (type), &x);
8207 return build_real (type, r);
8208 }
8209 }
8210
8211 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8212 }
8213
8214 /* Fold function call to builtin lround, lroundf or lroundl (or the
8215 corresponding long long versions) and other rounding functions. ARG
8216 is the argument to the call. Return NULL_TREE if no simplification
8217 can be made. */
8218
8219 static tree
8220 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8221 {
8222 if (!validate_arg (arg, REAL_TYPE))
8223 return NULL_TREE;
8224
8225 /* Optimize lround of constant value. */
8226 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8227 {
8228 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8229
8230 if (real_isfinite (&x))
8231 {
8232 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8233 tree ftype = TREE_TYPE (arg);
8234 REAL_VALUE_TYPE r;
8235 bool fail = false;
8236
8237 switch (DECL_FUNCTION_CODE (fndecl))
8238 {
8239 CASE_FLT_FN (BUILT_IN_IFLOOR):
8240 CASE_FLT_FN (BUILT_IN_LFLOOR):
8241 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8242 real_floor (&r, TYPE_MODE (ftype), &x);
8243 break;
8244
8245 CASE_FLT_FN (BUILT_IN_ICEIL):
8246 CASE_FLT_FN (BUILT_IN_LCEIL):
8247 CASE_FLT_FN (BUILT_IN_LLCEIL):
8248 real_ceil (&r, TYPE_MODE (ftype), &x);
8249 break;
8250
8251 CASE_FLT_FN (BUILT_IN_IROUND):
8252 CASE_FLT_FN (BUILT_IN_LROUND):
8253 CASE_FLT_FN (BUILT_IN_LLROUND):
8254 real_round (&r, TYPE_MODE (ftype), &x);
8255 break;
8256
8257 default:
8258 gcc_unreachable ();
8259 }
8260
8261 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8262 if (!fail)
8263 return wide_int_to_tree (itype, val);
8264 }
8265 }
8266
8267 switch (DECL_FUNCTION_CODE (fndecl))
8268 {
8269 CASE_FLT_FN (BUILT_IN_LFLOOR):
8270 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8271 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8272 if (tree_expr_nonnegative_p (arg))
8273 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8274 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8275 break;
8276 default:;
8277 }
8278
8279 return fold_fixed_mathfn (loc, fndecl, arg);
8280 }
8281
8282 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8283 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8284 the argument to the call. Return NULL_TREE if no simplification can
8285 be made. */
8286
8287 static tree
8288 fold_builtin_bitop (tree fndecl, tree arg)
8289 {
8290 if (!validate_arg (arg, INTEGER_TYPE))
8291 return NULL_TREE;
8292
8293 /* Optimize for constant argument. */
8294 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8295 {
8296 tree type = TREE_TYPE (arg);
8297 int result;
8298
8299 switch (DECL_FUNCTION_CODE (fndecl))
8300 {
8301 CASE_INT_FN (BUILT_IN_FFS):
8302 result = wi::ffs (arg);
8303 break;
8304
8305 CASE_INT_FN (BUILT_IN_CLZ):
8306 if (wi::ne_p (arg, 0))
8307 result = wi::clz (arg);
8308 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8309 result = TYPE_PRECISION (type);
8310 break;
8311
8312 CASE_INT_FN (BUILT_IN_CTZ):
8313 if (wi::ne_p (arg, 0))
8314 result = wi::ctz (arg);
8315 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8316 result = TYPE_PRECISION (type);
8317 break;
8318
8319 CASE_INT_FN (BUILT_IN_CLRSB):
8320 result = wi::clrsb (arg);
8321 break;
8322
8323 CASE_INT_FN (BUILT_IN_POPCOUNT):
8324 result = wi::popcount (arg);
8325 break;
8326
8327 CASE_INT_FN (BUILT_IN_PARITY):
8328 result = wi::parity (arg);
8329 break;
8330
8331 default:
8332 gcc_unreachable ();
8333 }
8334
8335 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8336 }
8337
8338 return NULL_TREE;
8339 }
8340
8341 /* Fold function call to builtin_bswap and the short, long and long long
8342 variants. Return NULL_TREE if no simplification can be made. */
8343 static tree
8344 fold_builtin_bswap (tree fndecl, tree arg)
8345 {
8346 if (! validate_arg (arg, INTEGER_TYPE))
8347 return NULL_TREE;
8348
8349 /* Optimize constant value. */
8350 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8351 {
8352 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8353
8354 switch (DECL_FUNCTION_CODE (fndecl))
8355 {
8356 case BUILT_IN_BSWAP16:
8357 case BUILT_IN_BSWAP32:
8358 case BUILT_IN_BSWAP64:
8359 {
8360 signop sgn = TYPE_SIGN (type);
8361 tree result =
8362 wide_int_to_tree (type,
8363 wide_int::from (arg, TYPE_PRECISION (type),
8364 sgn).bswap ());
8365 return result;
8366 }
8367 default:
8368 gcc_unreachable ();
8369 }
8370 }
8371
8372 return NULL_TREE;
8373 }
8374
8375 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8376 NULL_TREE if no simplification can be made. */
8377
8378 static tree
8379 fold_builtin_hypot (location_t loc, tree fndecl,
8380 tree arg0, tree arg1, tree type)
8381 {
8382 tree res, narg0, narg1;
8383
8384 if (!validate_arg (arg0, REAL_TYPE)
8385 || !validate_arg (arg1, REAL_TYPE))
8386 return NULL_TREE;
8387
8388 /* Calculate the result when the argument is a constant. */
8389 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8390 return res;
8391
8392 /* If either argument to hypot has a negate or abs, strip that off.
8393 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8394 narg0 = fold_strip_sign_ops (arg0);
8395 narg1 = fold_strip_sign_ops (arg1);
8396 if (narg0 || narg1)
8397 {
8398 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8399 narg1 ? narg1 : arg1);
8400 }
8401
8402 /* If either argument is zero, hypot is fabs of the other. */
8403 if (real_zerop (arg0))
8404 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8405 else if (real_zerop (arg1))
8406 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8407
8408 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8409 if (flag_unsafe_math_optimizations
8410 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8411 {
8412 const REAL_VALUE_TYPE sqrt2_trunc
8413 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8414 return fold_build2_loc (loc, MULT_EXPR, type,
8415 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8416 build_real (type, sqrt2_trunc));
8417 }
8418
8419 return NULL_TREE;
8420 }
8421
8422
8423 /* Fold a builtin function call to pow, powf, or powl. Return
8424 NULL_TREE if no simplification can be made. */
8425 static tree
8426 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8427 {
8428 tree res;
8429
8430 if (!validate_arg (arg0, REAL_TYPE)
8431 || !validate_arg (arg1, REAL_TYPE))
8432 return NULL_TREE;
8433
8434 /* Calculate the result when the argument is a constant. */
8435 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8436 return res;
8437
8438 /* Optimize pow(1.0,y) = 1.0. */
8439 if (real_onep (arg0))
8440 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8441
8442 if (TREE_CODE (arg1) == REAL_CST
8443 && !TREE_OVERFLOW (arg1))
8444 {
8445 REAL_VALUE_TYPE cint;
8446 REAL_VALUE_TYPE c;
8447 HOST_WIDE_INT n;
8448
8449 c = TREE_REAL_CST (arg1);
8450
8451 /* Optimize pow(x,0.0) = 1.0. */
8452 if (REAL_VALUES_EQUAL (c, dconst0))
8453 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8454 arg0);
8455
8456 /* Optimize pow(x,1.0) = x. */
8457 if (REAL_VALUES_EQUAL (c, dconst1))
8458 return arg0;
8459
8460 /* Optimize pow(x,-1.0) = 1.0/x. */
8461 if (REAL_VALUES_EQUAL (c, dconstm1))
8462 return fold_build2_loc (loc, RDIV_EXPR, type,
8463 build_real (type, dconst1), arg0);
8464
8465 /* Optimize pow(x,0.5) = sqrt(x). */
8466 if (flag_unsafe_math_optimizations
8467 && REAL_VALUES_EQUAL (c, dconsthalf))
8468 {
8469 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8470
8471 if (sqrtfn != NULL_TREE)
8472 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8473 }
8474
8475 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8476 if (flag_unsafe_math_optimizations)
8477 {
8478 const REAL_VALUE_TYPE dconstroot
8479 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8480
8481 if (REAL_VALUES_EQUAL (c, dconstroot))
8482 {
8483 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8484 if (cbrtfn != NULL_TREE)
8485 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8486 }
8487 }
8488
8489 /* Check for an integer exponent. */
8490 n = real_to_integer (&c);
8491 real_from_integer (&cint, VOIDmode, n, SIGNED);
8492 if (real_identical (&c, &cint))
8493 {
8494 /* Attempt to evaluate pow at compile-time, unless this should
8495 raise an exception. */
8496 if (TREE_CODE (arg0) == REAL_CST
8497 && !TREE_OVERFLOW (arg0)
8498 && (n > 0
8499 || (!flag_trapping_math && !flag_errno_math)
8500 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8501 {
8502 REAL_VALUE_TYPE x;
8503 bool inexact;
8504
8505 x = TREE_REAL_CST (arg0);
8506 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8507 if (flag_unsafe_math_optimizations || !inexact)
8508 return build_real (type, x);
8509 }
8510
8511 /* Strip sign ops from even integer powers. */
8512 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8513 {
8514 tree narg0 = fold_strip_sign_ops (arg0);
8515 if (narg0)
8516 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8517 }
8518 }
8519 }
8520
8521 if (flag_unsafe_math_optimizations)
8522 {
8523 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8524
8525 /* Optimize pow(expN(x),y) = expN(x*y). */
8526 if (BUILTIN_EXPONENT_P (fcode))
8527 {
8528 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8529 tree arg = CALL_EXPR_ARG (arg0, 0);
8530 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8531 return build_call_expr_loc (loc, expfn, 1, arg);
8532 }
8533
8534 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8535 if (BUILTIN_SQRT_P (fcode))
8536 {
8537 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8538 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8539 build_real (type, dconsthalf));
8540 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8541 }
8542
8543 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8544 if (BUILTIN_CBRT_P (fcode))
8545 {
8546 tree arg = CALL_EXPR_ARG (arg0, 0);
8547 if (tree_expr_nonnegative_p (arg))
8548 {
8549 const REAL_VALUE_TYPE dconstroot
8550 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8551 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8552 build_real (type, dconstroot));
8553 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8554 }
8555 }
8556
8557 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8558 if (fcode == BUILT_IN_POW
8559 || fcode == BUILT_IN_POWF
8560 || fcode == BUILT_IN_POWL)
8561 {
8562 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8563 if (tree_expr_nonnegative_p (arg00))
8564 {
8565 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8566 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8567 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8568 }
8569 }
8570 }
8571
8572 return NULL_TREE;
8573 }
8574
8575 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8576 Return NULL_TREE if no simplification can be made. */
8577 static tree
8578 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8579 tree arg0, tree arg1, tree type)
8580 {
8581 if (!validate_arg (arg0, REAL_TYPE)
8582 || !validate_arg (arg1, INTEGER_TYPE))
8583 return NULL_TREE;
8584
8585 /* Optimize pow(1.0,y) = 1.0. */
8586 if (real_onep (arg0))
8587 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8588
8589 if (tree_fits_shwi_p (arg1))
8590 {
8591 HOST_WIDE_INT c = tree_to_shwi (arg1);
8592
8593 /* Evaluate powi at compile-time. */
8594 if (TREE_CODE (arg0) == REAL_CST
8595 && !TREE_OVERFLOW (arg0))
8596 {
8597 REAL_VALUE_TYPE x;
8598 x = TREE_REAL_CST (arg0);
8599 real_powi (&x, TYPE_MODE (type), &x, c);
8600 return build_real (type, x);
8601 }
8602
8603 /* Optimize pow(x,0) = 1.0. */
8604 if (c == 0)
8605 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8606 arg0);
8607
8608 /* Optimize pow(x,1) = x. */
8609 if (c == 1)
8610 return arg0;
8611
8612 /* Optimize pow(x,-1) = 1.0/x. */
8613 if (c == -1)
8614 return fold_build2_loc (loc, RDIV_EXPR, type,
8615 build_real (type, dconst1), arg0);
8616 }
8617
8618 return NULL_TREE;
8619 }
8620
8621 /* A subroutine of fold_builtin to fold the various exponent
8622 functions. Return NULL_TREE if no simplification can be made.
8623 FUNC is the corresponding MPFR exponent function. */
8624
8625 static tree
8626 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8627 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8628 {
8629 if (validate_arg (arg, REAL_TYPE))
8630 {
8631 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8632 tree res;
8633
8634 /* Calculate the result when the argument is a constant. */
8635 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8636 return res;
8637
8638 /* Optimize expN(logN(x)) = x. */
8639 if (flag_unsafe_math_optimizations)
8640 {
8641 const enum built_in_function fcode = builtin_mathfn_code (arg);
8642
8643 if ((func == mpfr_exp
8644 && (fcode == BUILT_IN_LOG
8645 || fcode == BUILT_IN_LOGF
8646 || fcode == BUILT_IN_LOGL))
8647 || (func == mpfr_exp2
8648 && (fcode == BUILT_IN_LOG2
8649 || fcode == BUILT_IN_LOG2F
8650 || fcode == BUILT_IN_LOG2L))
8651 || (func == mpfr_exp10
8652 && (fcode == BUILT_IN_LOG10
8653 || fcode == BUILT_IN_LOG10F
8654 || fcode == BUILT_IN_LOG10L)))
8655 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8656 }
8657 }
8658
8659 return NULL_TREE;
8660 }
8661
8662 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8663 Return NULL_TREE if no simplification can be made. */
8664
8665 static tree
8666 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8667 {
8668 tree fn, len, lenp1, call, type;
8669
8670 if (!validate_arg (dest, POINTER_TYPE)
8671 || !validate_arg (src, POINTER_TYPE))
8672 return NULL_TREE;
8673
8674 len = c_strlen (src, 1);
8675 if (!len
8676 || TREE_CODE (len) != INTEGER_CST)
8677 return NULL_TREE;
8678
8679 if (optimize_function_for_size_p (cfun)
8680 /* If length is zero it's small enough. */
8681 && !integer_zerop (len))
8682 return NULL_TREE;
8683
8684 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8685 if (!fn)
8686 return NULL_TREE;
8687
8688 lenp1 = size_binop_loc (loc, PLUS_EXPR,
8689 fold_convert_loc (loc, size_type_node, len),
8690 build_int_cst (size_type_node, 1));
8691 /* We use dest twice in building our expression. Save it from
8692 multiple expansions. */
8693 dest = builtin_save_expr (dest);
8694 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8695
8696 type = TREE_TYPE (TREE_TYPE (fndecl));
8697 dest = fold_build_pointer_plus_loc (loc, dest, len);
8698 dest = fold_convert_loc (loc, type, dest);
8699 dest = omit_one_operand_loc (loc, type, dest, call);
8700 return dest;
8701 }
8702
8703 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8704 arguments to the call, and TYPE is its return type.
8705 Return NULL_TREE if no simplification can be made. */
8706
8707 static tree
8708 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8709 {
8710 if (!validate_arg (arg1, POINTER_TYPE)
8711 || !validate_arg (arg2, INTEGER_TYPE)
8712 || !validate_arg (len, INTEGER_TYPE))
8713 return NULL_TREE;
8714 else
8715 {
8716 const char *p1;
8717
8718 if (TREE_CODE (arg2) != INTEGER_CST
8719 || !tree_fits_uhwi_p (len))
8720 return NULL_TREE;
8721
8722 p1 = c_getstr (arg1);
8723 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8724 {
8725 char c;
8726 const char *r;
8727 tree tem;
8728
8729 if (target_char_cast (arg2, &c))
8730 return NULL_TREE;
8731
8732 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8733
8734 if (r == NULL)
8735 return build_int_cst (TREE_TYPE (arg1), 0);
8736
8737 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8738 return fold_convert_loc (loc, type, tem);
8739 }
8740 return NULL_TREE;
8741 }
8742 }
8743
8744 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8745 Return NULL_TREE if no simplification can be made. */
8746
8747 static tree
8748 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8749 {
8750 const char *p1, *p2;
8751
8752 if (!validate_arg (arg1, POINTER_TYPE)
8753 || !validate_arg (arg2, POINTER_TYPE)
8754 || !validate_arg (len, INTEGER_TYPE))
8755 return NULL_TREE;
8756
8757 /* If the LEN parameter is zero, return zero. */
8758 if (integer_zerop (len))
8759 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8760 arg1, arg2);
8761
8762 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8763 if (operand_equal_p (arg1, arg2, 0))
8764 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8765
8766 p1 = c_getstr (arg1);
8767 p2 = c_getstr (arg2);
8768
8769 /* If all arguments are constant, and the value of len is not greater
8770 than the lengths of arg1 and arg2, evaluate at compile-time. */
8771 if (tree_fits_uhwi_p (len) && p1 && p2
8772 && compare_tree_int (len, strlen (p1) + 1) <= 0
8773 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8774 {
8775 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8776
8777 if (r > 0)
8778 return integer_one_node;
8779 else if (r < 0)
8780 return integer_minus_one_node;
8781 else
8782 return integer_zero_node;
8783 }
8784
8785 /* If len parameter is one, return an expression corresponding to
8786 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8787 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8788 {
8789 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8790 tree cst_uchar_ptr_node
8791 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8792
8793 tree ind1
8794 = fold_convert_loc (loc, integer_type_node,
8795 build1 (INDIRECT_REF, cst_uchar_node,
8796 fold_convert_loc (loc,
8797 cst_uchar_ptr_node,
8798 arg1)));
8799 tree ind2
8800 = fold_convert_loc (loc, integer_type_node,
8801 build1 (INDIRECT_REF, cst_uchar_node,
8802 fold_convert_loc (loc,
8803 cst_uchar_ptr_node,
8804 arg2)));
8805 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8806 }
8807
8808 return NULL_TREE;
8809 }
8810
8811 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8812 Return NULL_TREE if no simplification can be made. */
8813
8814 static tree
8815 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8816 {
8817 const char *p1, *p2;
8818
8819 if (!validate_arg (arg1, POINTER_TYPE)
8820 || !validate_arg (arg2, POINTER_TYPE))
8821 return NULL_TREE;
8822
8823 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8824 if (operand_equal_p (arg1, arg2, 0))
8825 return integer_zero_node;
8826
8827 p1 = c_getstr (arg1);
8828 p2 = c_getstr (arg2);
8829
8830 if (p1 && p2)
8831 {
8832 const int i = strcmp (p1, p2);
8833 if (i < 0)
8834 return integer_minus_one_node;
8835 else if (i > 0)
8836 return integer_one_node;
8837 else
8838 return integer_zero_node;
8839 }
8840
8841 /* If the second arg is "", return *(const unsigned char*)arg1. */
8842 if (p2 && *p2 == '\0')
8843 {
8844 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8845 tree cst_uchar_ptr_node
8846 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8847
8848 return fold_convert_loc (loc, integer_type_node,
8849 build1 (INDIRECT_REF, cst_uchar_node,
8850 fold_convert_loc (loc,
8851 cst_uchar_ptr_node,
8852 arg1)));
8853 }
8854
8855 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8856 if (p1 && *p1 == '\0')
8857 {
8858 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8859 tree cst_uchar_ptr_node
8860 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8861
8862 tree temp
8863 = fold_convert_loc (loc, integer_type_node,
8864 build1 (INDIRECT_REF, cst_uchar_node,
8865 fold_convert_loc (loc,
8866 cst_uchar_ptr_node,
8867 arg2)));
8868 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8869 }
8870
8871 return NULL_TREE;
8872 }
8873
8874 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8875 Return NULL_TREE if no simplification can be made. */
8876
8877 static tree
8878 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8879 {
8880 const char *p1, *p2;
8881
8882 if (!validate_arg (arg1, POINTER_TYPE)
8883 || !validate_arg (arg2, POINTER_TYPE)
8884 || !validate_arg (len, INTEGER_TYPE))
8885 return NULL_TREE;
8886
8887 /* If the LEN parameter is zero, return zero. */
8888 if (integer_zerop (len))
8889 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8890 arg1, arg2);
8891
8892 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8893 if (operand_equal_p (arg1, arg2, 0))
8894 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8895
8896 p1 = c_getstr (arg1);
8897 p2 = c_getstr (arg2);
8898
8899 if (tree_fits_uhwi_p (len) && p1 && p2)
8900 {
8901 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8902 if (i > 0)
8903 return integer_one_node;
8904 else if (i < 0)
8905 return integer_minus_one_node;
8906 else
8907 return integer_zero_node;
8908 }
8909
8910 /* If the second arg is "", and the length is greater than zero,
8911 return *(const unsigned char*)arg1. */
8912 if (p2 && *p2 == '\0'
8913 && TREE_CODE (len) == INTEGER_CST
8914 && tree_int_cst_sgn (len) == 1)
8915 {
8916 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8917 tree cst_uchar_ptr_node
8918 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8919
8920 return fold_convert_loc (loc, integer_type_node,
8921 build1 (INDIRECT_REF, cst_uchar_node,
8922 fold_convert_loc (loc,
8923 cst_uchar_ptr_node,
8924 arg1)));
8925 }
8926
8927 /* If the first arg is "", and the length is greater than zero,
8928 return -*(const unsigned char*)arg2. */
8929 if (p1 && *p1 == '\0'
8930 && TREE_CODE (len) == INTEGER_CST
8931 && tree_int_cst_sgn (len) == 1)
8932 {
8933 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8934 tree cst_uchar_ptr_node
8935 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8936
8937 tree temp = fold_convert_loc (loc, integer_type_node,
8938 build1 (INDIRECT_REF, cst_uchar_node,
8939 fold_convert_loc (loc,
8940 cst_uchar_ptr_node,
8941 arg2)));
8942 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8943 }
8944
8945 /* If len parameter is one, return an expression corresponding to
8946 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8947 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8948 {
8949 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8950 tree cst_uchar_ptr_node
8951 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8952
8953 tree ind1 = fold_convert_loc (loc, integer_type_node,
8954 build1 (INDIRECT_REF, cst_uchar_node,
8955 fold_convert_loc (loc,
8956 cst_uchar_ptr_node,
8957 arg1)));
8958 tree ind2 = fold_convert_loc (loc, integer_type_node,
8959 build1 (INDIRECT_REF, cst_uchar_node,
8960 fold_convert_loc (loc,
8961 cst_uchar_ptr_node,
8962 arg2)));
8963 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8964 }
8965
8966 return NULL_TREE;
8967 }
8968
8969 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8970 ARG. Return NULL_TREE if no simplification can be made. */
8971
8972 static tree
8973 fold_builtin_signbit (location_t loc, tree arg, tree type)
8974 {
8975 if (!validate_arg (arg, REAL_TYPE))
8976 return NULL_TREE;
8977
8978 /* If ARG is a compile-time constant, determine the result. */
8979 if (TREE_CODE (arg) == REAL_CST
8980 && !TREE_OVERFLOW (arg))
8981 {
8982 REAL_VALUE_TYPE c;
8983
8984 c = TREE_REAL_CST (arg);
8985 return (REAL_VALUE_NEGATIVE (c)
8986 ? build_one_cst (type)
8987 : build_zero_cst (type));
8988 }
8989
8990 /* If ARG is non-negative, the result is always zero. */
8991 if (tree_expr_nonnegative_p (arg))
8992 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8993
8994 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8995 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8996 return fold_convert (type,
8997 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8998 build_real (TREE_TYPE (arg), dconst0)));
8999
9000 return NULL_TREE;
9001 }
9002
9003 /* Fold function call to builtin copysign, copysignf or copysignl with
9004 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9005 be made. */
9006
9007 static tree
9008 fold_builtin_copysign (location_t loc, tree fndecl,
9009 tree arg1, tree arg2, tree type)
9010 {
9011 tree tem;
9012
9013 if (!validate_arg (arg1, REAL_TYPE)
9014 || !validate_arg (arg2, REAL_TYPE))
9015 return NULL_TREE;
9016
9017 /* copysign(X,X) is X. */
9018 if (operand_equal_p (arg1, arg2, 0))
9019 return fold_convert_loc (loc, type, arg1);
9020
9021 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9022 if (TREE_CODE (arg1) == REAL_CST
9023 && TREE_CODE (arg2) == REAL_CST
9024 && !TREE_OVERFLOW (arg1)
9025 && !TREE_OVERFLOW (arg2))
9026 {
9027 REAL_VALUE_TYPE c1, c2;
9028
9029 c1 = TREE_REAL_CST (arg1);
9030 c2 = TREE_REAL_CST (arg2);
9031 /* c1.sign := c2.sign. */
9032 real_copysign (&c1, &c2);
9033 return build_real (type, c1);
9034 }
9035
9036 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9037 Remember to evaluate Y for side-effects. */
9038 if (tree_expr_nonnegative_p (arg2))
9039 return omit_one_operand_loc (loc, type,
9040 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9041 arg2);
9042
9043 /* Strip sign changing operations for the first argument. */
9044 tem = fold_strip_sign_ops (arg1);
9045 if (tem)
9046 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9047
9048 return NULL_TREE;
9049 }
9050
9051 /* Fold a call to builtin isascii with argument ARG. */
9052
9053 static tree
9054 fold_builtin_isascii (location_t loc, tree arg)
9055 {
9056 if (!validate_arg (arg, INTEGER_TYPE))
9057 return NULL_TREE;
9058 else
9059 {
9060 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9061 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9062 build_int_cst (integer_type_node,
9063 ~ (unsigned HOST_WIDE_INT) 0x7f));
9064 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9065 arg, integer_zero_node);
9066 }
9067 }
9068
9069 /* Fold a call to builtin toascii with argument ARG. */
9070
9071 static tree
9072 fold_builtin_toascii (location_t loc, tree arg)
9073 {
9074 if (!validate_arg (arg, INTEGER_TYPE))
9075 return NULL_TREE;
9076
9077 /* Transform toascii(c) -> (c & 0x7f). */
9078 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9079 build_int_cst (integer_type_node, 0x7f));
9080 }
9081
9082 /* Fold a call to builtin isdigit with argument ARG. */
9083
9084 static tree
9085 fold_builtin_isdigit (location_t loc, tree arg)
9086 {
9087 if (!validate_arg (arg, INTEGER_TYPE))
9088 return NULL_TREE;
9089 else
9090 {
9091 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9092 /* According to the C standard, isdigit is unaffected by locale.
9093 However, it definitely is affected by the target character set. */
9094 unsigned HOST_WIDE_INT target_digit0
9095 = lang_hooks.to_target_charset ('0');
9096
9097 if (target_digit0 == 0)
9098 return NULL_TREE;
9099
9100 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9101 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9102 build_int_cst (unsigned_type_node, target_digit0));
9103 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9104 build_int_cst (unsigned_type_node, 9));
9105 }
9106 }
9107
9108 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9109
9110 static tree
9111 fold_builtin_fabs (location_t loc, tree arg, tree type)
9112 {
9113 if (!validate_arg (arg, REAL_TYPE))
9114 return NULL_TREE;
9115
9116 arg = fold_convert_loc (loc, type, arg);
9117 if (TREE_CODE (arg) == REAL_CST)
9118 return fold_abs_const (arg, type);
9119 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9120 }
9121
9122 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9123
9124 static tree
9125 fold_builtin_abs (location_t loc, tree arg, tree type)
9126 {
9127 if (!validate_arg (arg, INTEGER_TYPE))
9128 return NULL_TREE;
9129
9130 arg = fold_convert_loc (loc, type, arg);
9131 if (TREE_CODE (arg) == INTEGER_CST)
9132 return fold_abs_const (arg, type);
9133 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9134 }
9135
9136 /* Fold a fma operation with arguments ARG[012]. */
9137
9138 tree
9139 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9140 tree type, tree arg0, tree arg1, tree arg2)
9141 {
9142 if (TREE_CODE (arg0) == REAL_CST
9143 && TREE_CODE (arg1) == REAL_CST
9144 && TREE_CODE (arg2) == REAL_CST)
9145 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9146
9147 return NULL_TREE;
9148 }
9149
9150 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9151
9152 static tree
9153 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9154 {
9155 if (validate_arg (arg0, REAL_TYPE)
9156 && validate_arg (arg1, REAL_TYPE)
9157 && validate_arg (arg2, REAL_TYPE))
9158 {
9159 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9160 if (tem)
9161 return tem;
9162
9163 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9164 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9165 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9166 }
9167 return NULL_TREE;
9168 }
9169
9170 /* Fold a call to builtin fmin or fmax. */
9171
9172 static tree
9173 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9174 tree type, bool max)
9175 {
9176 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9177 {
9178 /* Calculate the result when the argument is a constant. */
9179 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9180
9181 if (res)
9182 return res;
9183
9184 /* If either argument is NaN, return the other one. Avoid the
9185 transformation if we get (and honor) a signalling NaN. Using
9186 omit_one_operand() ensures we create a non-lvalue. */
9187 if (TREE_CODE (arg0) == REAL_CST
9188 && real_isnan (&TREE_REAL_CST (arg0))
9189 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9190 || ! TREE_REAL_CST (arg0).signalling))
9191 return omit_one_operand_loc (loc, type, arg1, arg0);
9192 if (TREE_CODE (arg1) == REAL_CST
9193 && real_isnan (&TREE_REAL_CST (arg1))
9194 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9195 || ! TREE_REAL_CST (arg1).signalling))
9196 return omit_one_operand_loc (loc, type, arg0, arg1);
9197
9198 /* Transform fmin/fmax(x,x) -> x. */
9199 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9200 return omit_one_operand_loc (loc, type, arg0, arg1);
9201
9202 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9203 functions to return the numeric arg if the other one is NaN.
9204 These tree codes don't honor that, so only transform if
9205 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9206 handled, so we don't have to worry about it either. */
9207 if (flag_finite_math_only)
9208 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9209 fold_convert_loc (loc, type, arg0),
9210 fold_convert_loc (loc, type, arg1));
9211 }
9212 return NULL_TREE;
9213 }
9214
9215 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9216
9217 static tree
9218 fold_builtin_carg (location_t loc, tree arg, tree type)
9219 {
9220 if (validate_arg (arg, COMPLEX_TYPE)
9221 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9222 {
9223 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9224
9225 if (atan2_fn)
9226 {
9227 tree new_arg = builtin_save_expr (arg);
9228 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9229 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9230 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9231 }
9232 }
9233
9234 return NULL_TREE;
9235 }
9236
9237 /* Fold a call to builtin logb/ilogb. */
9238
9239 static tree
9240 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9241 {
9242 if (! validate_arg (arg, REAL_TYPE))
9243 return NULL_TREE;
9244
9245 STRIP_NOPS (arg);
9246
9247 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9248 {
9249 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9250
9251 switch (value->cl)
9252 {
9253 case rvc_nan:
9254 case rvc_inf:
9255 /* If arg is Inf or NaN and we're logb, return it. */
9256 if (TREE_CODE (rettype) == REAL_TYPE)
9257 {
9258 /* For logb(-Inf) we have to return +Inf. */
9259 if (real_isinf (value) && real_isneg (value))
9260 {
9261 REAL_VALUE_TYPE tem;
9262 real_inf (&tem);
9263 return build_real (rettype, tem);
9264 }
9265 return fold_convert_loc (loc, rettype, arg);
9266 }
9267 /* Fall through... */
9268 case rvc_zero:
9269 /* Zero may set errno and/or raise an exception for logb, also
9270 for ilogb we don't know FP_ILOGB0. */
9271 return NULL_TREE;
9272 case rvc_normal:
9273 /* For normal numbers, proceed iff radix == 2. In GCC,
9274 normalized significands are in the range [0.5, 1.0). We
9275 want the exponent as if they were [1.0, 2.0) so get the
9276 exponent and subtract 1. */
9277 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9278 return fold_convert_loc (loc, rettype,
9279 build_int_cst (integer_type_node,
9280 REAL_EXP (value)-1));
9281 break;
9282 }
9283 }
9284
9285 return NULL_TREE;
9286 }
9287
9288 /* Fold a call to builtin significand, if radix == 2. */
9289
9290 static tree
9291 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9292 {
9293 if (! validate_arg (arg, REAL_TYPE))
9294 return NULL_TREE;
9295
9296 STRIP_NOPS (arg);
9297
9298 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9299 {
9300 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9301
9302 switch (value->cl)
9303 {
9304 case rvc_zero:
9305 case rvc_nan:
9306 case rvc_inf:
9307 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9308 return fold_convert_loc (loc, rettype, arg);
9309 case rvc_normal:
9310 /* For normal numbers, proceed iff radix == 2. */
9311 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9312 {
9313 REAL_VALUE_TYPE result = *value;
9314 /* In GCC, normalized significands are in the range [0.5,
9315 1.0). We want them to be [1.0, 2.0) so set the
9316 exponent to 1. */
9317 SET_REAL_EXP (&result, 1);
9318 return build_real (rettype, result);
9319 }
9320 break;
9321 }
9322 }
9323
9324 return NULL_TREE;
9325 }
9326
9327 /* Fold a call to builtin frexp, we can assume the base is 2. */
9328
9329 static tree
9330 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9331 {
9332 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9333 return NULL_TREE;
9334
9335 STRIP_NOPS (arg0);
9336
9337 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9338 return NULL_TREE;
9339
9340 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9341
9342 /* Proceed if a valid pointer type was passed in. */
9343 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9344 {
9345 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9346 tree frac, exp;
9347
9348 switch (value->cl)
9349 {
9350 case rvc_zero:
9351 /* For +-0, return (*exp = 0, +-0). */
9352 exp = integer_zero_node;
9353 frac = arg0;
9354 break;
9355 case rvc_nan:
9356 case rvc_inf:
9357 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9358 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9359 case rvc_normal:
9360 {
9361 /* Since the frexp function always expects base 2, and in
9362 GCC normalized significands are already in the range
9363 [0.5, 1.0), we have exactly what frexp wants. */
9364 REAL_VALUE_TYPE frac_rvt = *value;
9365 SET_REAL_EXP (&frac_rvt, 0);
9366 frac = build_real (rettype, frac_rvt);
9367 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9368 }
9369 break;
9370 default:
9371 gcc_unreachable ();
9372 }
9373
9374 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9375 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9376 TREE_SIDE_EFFECTS (arg1) = 1;
9377 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9378 }
9379
9380 return NULL_TREE;
9381 }
9382
9383 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9384 then we can assume the base is two. If it's false, then we have to
9385 check the mode of the TYPE parameter in certain cases. */
9386
9387 static tree
9388 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9389 tree type, bool ldexp)
9390 {
9391 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9392 {
9393 STRIP_NOPS (arg0);
9394 STRIP_NOPS (arg1);
9395
9396 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9397 if (real_zerop (arg0) || integer_zerop (arg1)
9398 || (TREE_CODE (arg0) == REAL_CST
9399 && !real_isfinite (&TREE_REAL_CST (arg0))))
9400 return omit_one_operand_loc (loc, type, arg0, arg1);
9401
9402 /* If both arguments are constant, then try to evaluate it. */
9403 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9404 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9405 && tree_fits_shwi_p (arg1))
9406 {
9407 /* Bound the maximum adjustment to twice the range of the
9408 mode's valid exponents. Use abs to ensure the range is
9409 positive as a sanity check. */
9410 const long max_exp_adj = 2 *
9411 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9412 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9413
9414 /* Get the user-requested adjustment. */
9415 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9416
9417 /* The requested adjustment must be inside this range. This
9418 is a preliminary cap to avoid things like overflow, we
9419 may still fail to compute the result for other reasons. */
9420 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9421 {
9422 REAL_VALUE_TYPE initial_result;
9423
9424 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9425
9426 /* Ensure we didn't overflow. */
9427 if (! real_isinf (&initial_result))
9428 {
9429 const REAL_VALUE_TYPE trunc_result
9430 = real_value_truncate (TYPE_MODE (type), initial_result);
9431
9432 /* Only proceed if the target mode can hold the
9433 resulting value. */
9434 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9435 return build_real (type, trunc_result);
9436 }
9437 }
9438 }
9439 }
9440
9441 return NULL_TREE;
9442 }
9443
9444 /* Fold a call to builtin modf. */
9445
9446 static tree
9447 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9448 {
9449 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9450 return NULL_TREE;
9451
9452 STRIP_NOPS (arg0);
9453
9454 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9455 return NULL_TREE;
9456
9457 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9458
9459 /* Proceed if a valid pointer type was passed in. */
9460 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9461 {
9462 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9463 REAL_VALUE_TYPE trunc, frac;
9464
9465 switch (value->cl)
9466 {
9467 case rvc_nan:
9468 case rvc_zero:
9469 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9470 trunc = frac = *value;
9471 break;
9472 case rvc_inf:
9473 /* For +-Inf, return (*arg1 = arg0, +-0). */
9474 frac = dconst0;
9475 frac.sign = value->sign;
9476 trunc = *value;
9477 break;
9478 case rvc_normal:
9479 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9480 real_trunc (&trunc, VOIDmode, value);
9481 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9482 /* If the original number was negative and already
9483 integral, then the fractional part is -0.0. */
9484 if (value->sign && frac.cl == rvc_zero)
9485 frac.sign = value->sign;
9486 break;
9487 }
9488
9489 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9490 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9491 build_real (rettype, trunc));
9492 TREE_SIDE_EFFECTS (arg1) = 1;
9493 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9494 build_real (rettype, frac));
9495 }
9496
9497 return NULL_TREE;
9498 }
9499
9500 /* Given a location LOC, an interclass builtin function decl FNDECL
9501 and its single argument ARG, return an folded expression computing
9502 the same, or NULL_TREE if we either couldn't or didn't want to fold
9503 (the latter happen if there's an RTL instruction available). */
9504
9505 static tree
9506 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9507 {
9508 machine_mode mode;
9509
9510 if (!validate_arg (arg, REAL_TYPE))
9511 return NULL_TREE;
9512
9513 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9514 return NULL_TREE;
9515
9516 mode = TYPE_MODE (TREE_TYPE (arg));
9517
9518 /* If there is no optab, try generic code. */
9519 switch (DECL_FUNCTION_CODE (fndecl))
9520 {
9521 tree result;
9522
9523 CASE_FLT_FN (BUILT_IN_ISINF):
9524 {
9525 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9526 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9527 tree const type = TREE_TYPE (arg);
9528 REAL_VALUE_TYPE r;
9529 char buf[128];
9530
9531 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9532 real_from_string (&r, buf);
9533 result = build_call_expr (isgr_fn, 2,
9534 fold_build1_loc (loc, ABS_EXPR, type, arg),
9535 build_real (type, r));
9536 return result;
9537 }
9538 CASE_FLT_FN (BUILT_IN_FINITE):
9539 case BUILT_IN_ISFINITE:
9540 {
9541 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9542 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9543 tree const type = TREE_TYPE (arg);
9544 REAL_VALUE_TYPE r;
9545 char buf[128];
9546
9547 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9548 real_from_string (&r, buf);
9549 result = build_call_expr (isle_fn, 2,
9550 fold_build1_loc (loc, ABS_EXPR, type, arg),
9551 build_real (type, r));
9552 /*result = fold_build2_loc (loc, UNGT_EXPR,
9553 TREE_TYPE (TREE_TYPE (fndecl)),
9554 fold_build1_loc (loc, ABS_EXPR, type, arg),
9555 build_real (type, r));
9556 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9557 TREE_TYPE (TREE_TYPE (fndecl)),
9558 result);*/
9559 return result;
9560 }
9561 case BUILT_IN_ISNORMAL:
9562 {
9563 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9564 islessequal(fabs(x),DBL_MAX). */
9565 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9566 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9567 tree const type = TREE_TYPE (arg);
9568 REAL_VALUE_TYPE rmax, rmin;
9569 char buf[128];
9570
9571 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9572 real_from_string (&rmax, buf);
9573 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9574 real_from_string (&rmin, buf);
9575 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9576 result = build_call_expr (isle_fn, 2, arg,
9577 build_real (type, rmax));
9578 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9579 build_call_expr (isge_fn, 2, arg,
9580 build_real (type, rmin)));
9581 return result;
9582 }
9583 default:
9584 break;
9585 }
9586
9587 return NULL_TREE;
9588 }
9589
9590 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9591 ARG is the argument for the call. */
9592
9593 static tree
9594 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9595 {
9596 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9597 REAL_VALUE_TYPE r;
9598
9599 if (!validate_arg (arg, REAL_TYPE))
9600 return NULL_TREE;
9601
9602 switch (builtin_index)
9603 {
9604 case BUILT_IN_ISINF:
9605 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9606 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9607
9608 if (TREE_CODE (arg) == REAL_CST)
9609 {
9610 r = TREE_REAL_CST (arg);
9611 if (real_isinf (&r))
9612 return real_compare (GT_EXPR, &r, &dconst0)
9613 ? integer_one_node : integer_minus_one_node;
9614 else
9615 return integer_zero_node;
9616 }
9617
9618 return NULL_TREE;
9619
9620 case BUILT_IN_ISINF_SIGN:
9621 {
9622 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9623 /* In a boolean context, GCC will fold the inner COND_EXPR to
9624 1. So e.g. "if (isinf_sign(x))" would be folded to just
9625 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9626 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9627 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9628 tree tmp = NULL_TREE;
9629
9630 arg = builtin_save_expr (arg);
9631
9632 if (signbit_fn && isinf_fn)
9633 {
9634 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9635 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9636
9637 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9638 signbit_call, integer_zero_node);
9639 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9640 isinf_call, integer_zero_node);
9641
9642 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9643 integer_minus_one_node, integer_one_node);
9644 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9645 isinf_call, tmp,
9646 integer_zero_node);
9647 }
9648
9649 return tmp;
9650 }
9651
9652 case BUILT_IN_ISFINITE:
9653 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9654 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9655 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9656
9657 if (TREE_CODE (arg) == REAL_CST)
9658 {
9659 r = TREE_REAL_CST (arg);
9660 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9661 }
9662
9663 return NULL_TREE;
9664
9665 case BUILT_IN_ISNAN:
9666 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9667 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9668
9669 if (TREE_CODE (arg) == REAL_CST)
9670 {
9671 r = TREE_REAL_CST (arg);
9672 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9673 }
9674
9675 arg = builtin_save_expr (arg);
9676 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9677
9678 default:
9679 gcc_unreachable ();
9680 }
9681 }
9682
9683 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9684 This builtin will generate code to return the appropriate floating
9685 point classification depending on the value of the floating point
9686 number passed in. The possible return values must be supplied as
9687 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9688 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9689 one floating point argument which is "type generic". */
9690
9691 static tree
9692 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9693 {
9694 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9695 arg, type, res, tmp;
9696 machine_mode mode;
9697 REAL_VALUE_TYPE r;
9698 char buf[128];
9699
9700 /* Verify the required arguments in the original call. */
9701 if (nargs != 6
9702 || !validate_arg (args[0], INTEGER_TYPE)
9703 || !validate_arg (args[1], INTEGER_TYPE)
9704 || !validate_arg (args[2], INTEGER_TYPE)
9705 || !validate_arg (args[3], INTEGER_TYPE)
9706 || !validate_arg (args[4], INTEGER_TYPE)
9707 || !validate_arg (args[5], REAL_TYPE))
9708 return NULL_TREE;
9709
9710 fp_nan = args[0];
9711 fp_infinite = args[1];
9712 fp_normal = args[2];
9713 fp_subnormal = args[3];
9714 fp_zero = args[4];
9715 arg = args[5];
9716 type = TREE_TYPE (arg);
9717 mode = TYPE_MODE (type);
9718 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9719
9720 /* fpclassify(x) ->
9721 isnan(x) ? FP_NAN :
9722 (fabs(x) == Inf ? FP_INFINITE :
9723 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9724 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9725
9726 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9727 build_real (type, dconst0));
9728 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9729 tmp, fp_zero, fp_subnormal);
9730
9731 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9732 real_from_string (&r, buf);
9733 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9734 arg, build_real (type, r));
9735 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9736
9737 if (HONOR_INFINITIES (mode))
9738 {
9739 real_inf (&r);
9740 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9741 build_real (type, r));
9742 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9743 fp_infinite, res);
9744 }
9745
9746 if (HONOR_NANS (mode))
9747 {
9748 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9749 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9750 }
9751
9752 return res;
9753 }
9754
9755 /* Fold a call to an unordered comparison function such as
9756 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9757 being called and ARG0 and ARG1 are the arguments for the call.
9758 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9759 the opposite of the desired result. UNORDERED_CODE is used
9760 for modes that can hold NaNs and ORDERED_CODE is used for
9761 the rest. */
9762
9763 static tree
9764 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9765 enum tree_code unordered_code,
9766 enum tree_code ordered_code)
9767 {
9768 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9769 enum tree_code code;
9770 tree type0, type1;
9771 enum tree_code code0, code1;
9772 tree cmp_type = NULL_TREE;
9773
9774 type0 = TREE_TYPE (arg0);
9775 type1 = TREE_TYPE (arg1);
9776
9777 code0 = TREE_CODE (type0);
9778 code1 = TREE_CODE (type1);
9779
9780 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9781 /* Choose the wider of two real types. */
9782 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9783 ? type0 : type1;
9784 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9785 cmp_type = type0;
9786 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9787 cmp_type = type1;
9788
9789 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9790 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9791
9792 if (unordered_code == UNORDERED_EXPR)
9793 {
9794 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9795 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9796 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9797 }
9798
9799 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9800 : ordered_code;
9801 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9802 fold_build2_loc (loc, code, type, arg0, arg1));
9803 }
9804
9805 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9806 arithmetics if it can never overflow, or into internal functions that
9807 return both result of arithmetics and overflowed boolean flag in
9808 a complex integer result, or some other check for overflow. */
9809
9810 static tree
9811 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9812 tree arg0, tree arg1, tree arg2)
9813 {
9814 enum internal_fn ifn = IFN_LAST;
9815 tree type = TREE_TYPE (TREE_TYPE (arg2));
9816 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9817 switch (fcode)
9818 {
9819 case BUILT_IN_ADD_OVERFLOW:
9820 case BUILT_IN_SADD_OVERFLOW:
9821 case BUILT_IN_SADDL_OVERFLOW:
9822 case BUILT_IN_SADDLL_OVERFLOW:
9823 case BUILT_IN_UADD_OVERFLOW:
9824 case BUILT_IN_UADDL_OVERFLOW:
9825 case BUILT_IN_UADDLL_OVERFLOW:
9826 ifn = IFN_ADD_OVERFLOW;
9827 break;
9828 case BUILT_IN_SUB_OVERFLOW:
9829 case BUILT_IN_SSUB_OVERFLOW:
9830 case BUILT_IN_SSUBL_OVERFLOW:
9831 case BUILT_IN_SSUBLL_OVERFLOW:
9832 case BUILT_IN_USUB_OVERFLOW:
9833 case BUILT_IN_USUBL_OVERFLOW:
9834 case BUILT_IN_USUBLL_OVERFLOW:
9835 ifn = IFN_SUB_OVERFLOW;
9836 break;
9837 case BUILT_IN_MUL_OVERFLOW:
9838 case BUILT_IN_SMUL_OVERFLOW:
9839 case BUILT_IN_SMULL_OVERFLOW:
9840 case BUILT_IN_SMULLL_OVERFLOW:
9841 case BUILT_IN_UMUL_OVERFLOW:
9842 case BUILT_IN_UMULL_OVERFLOW:
9843 case BUILT_IN_UMULLL_OVERFLOW:
9844 ifn = IFN_MUL_OVERFLOW;
9845 break;
9846 default:
9847 gcc_unreachable ();
9848 }
9849 tree ctype = build_complex_type (type);
9850 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9851 2, arg0, arg1);
9852 tree tgt = save_expr (call);
9853 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9854 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9855 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9856 tree store
9857 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9858 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9859 }
9860
9861 /* Fold a call to built-in function FNDECL with 0 arguments.
9862 IGNORE is true if the result of the function call is ignored. This
9863 function returns NULL_TREE if no simplification was possible. */
9864
9865 static tree
9866 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9867 {
9868 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9869 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9870 switch (fcode)
9871 {
9872 CASE_FLT_FN (BUILT_IN_INF):
9873 case BUILT_IN_INFD32:
9874 case BUILT_IN_INFD64:
9875 case BUILT_IN_INFD128:
9876 return fold_builtin_inf (loc, type, true);
9877
9878 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9879 return fold_builtin_inf (loc, type, false);
9880
9881 case BUILT_IN_CLASSIFY_TYPE:
9882 return fold_builtin_classify_type (NULL_TREE);
9883
9884 default:
9885 break;
9886 }
9887 return NULL_TREE;
9888 }
9889
9890 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9891 IGNORE is true if the result of the function call is ignored. This
9892 function returns NULL_TREE if no simplification was possible. */
9893
9894 static tree
9895 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9896 {
9897 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9898 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9899 switch (fcode)
9900 {
9901 case BUILT_IN_CONSTANT_P:
9902 {
9903 tree val = fold_builtin_constant_p (arg0);
9904
9905 /* Gimplification will pull the CALL_EXPR for the builtin out of
9906 an if condition. When not optimizing, we'll not CSE it back.
9907 To avoid link error types of regressions, return false now. */
9908 if (!val && !optimize)
9909 val = integer_zero_node;
9910
9911 return val;
9912 }
9913
9914 case BUILT_IN_CLASSIFY_TYPE:
9915 return fold_builtin_classify_type (arg0);
9916
9917 case BUILT_IN_STRLEN:
9918 return fold_builtin_strlen (loc, type, arg0);
9919
9920 CASE_FLT_FN (BUILT_IN_FABS):
9921 case BUILT_IN_FABSD32:
9922 case BUILT_IN_FABSD64:
9923 case BUILT_IN_FABSD128:
9924 return fold_builtin_fabs (loc, arg0, type);
9925
9926 case BUILT_IN_ABS:
9927 case BUILT_IN_LABS:
9928 case BUILT_IN_LLABS:
9929 case BUILT_IN_IMAXABS:
9930 return fold_builtin_abs (loc, arg0, type);
9931
9932 CASE_FLT_FN (BUILT_IN_CONJ):
9933 if (validate_arg (arg0, COMPLEX_TYPE)
9934 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9935 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9936 break;
9937
9938 CASE_FLT_FN (BUILT_IN_CREAL):
9939 if (validate_arg (arg0, COMPLEX_TYPE)
9940 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9941 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9942 break;
9943
9944 CASE_FLT_FN (BUILT_IN_CIMAG):
9945 if (validate_arg (arg0, COMPLEX_TYPE)
9946 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9947 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9948 break;
9949
9950 CASE_FLT_FN (BUILT_IN_CCOS):
9951 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9952
9953 CASE_FLT_FN (BUILT_IN_CCOSH):
9954 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9955
9956 CASE_FLT_FN (BUILT_IN_CPROJ):
9957 return fold_builtin_cproj (loc, arg0, type);
9958
9959 CASE_FLT_FN (BUILT_IN_CSIN):
9960 if (validate_arg (arg0, COMPLEX_TYPE)
9961 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9962 return do_mpc_arg1 (arg0, type, mpc_sin);
9963 break;
9964
9965 CASE_FLT_FN (BUILT_IN_CSINH):
9966 if (validate_arg (arg0, COMPLEX_TYPE)
9967 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9968 return do_mpc_arg1 (arg0, type, mpc_sinh);
9969 break;
9970
9971 CASE_FLT_FN (BUILT_IN_CTAN):
9972 if (validate_arg (arg0, COMPLEX_TYPE)
9973 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9974 return do_mpc_arg1 (arg0, type, mpc_tan);
9975 break;
9976
9977 CASE_FLT_FN (BUILT_IN_CTANH):
9978 if (validate_arg (arg0, COMPLEX_TYPE)
9979 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9980 return do_mpc_arg1 (arg0, type, mpc_tanh);
9981 break;
9982
9983 CASE_FLT_FN (BUILT_IN_CLOG):
9984 if (validate_arg (arg0, COMPLEX_TYPE)
9985 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9986 return do_mpc_arg1 (arg0, type, mpc_log);
9987 break;
9988
9989 CASE_FLT_FN (BUILT_IN_CSQRT):
9990 if (validate_arg (arg0, COMPLEX_TYPE)
9991 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9992 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9993 break;
9994
9995 CASE_FLT_FN (BUILT_IN_CASIN):
9996 if (validate_arg (arg0, COMPLEX_TYPE)
9997 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9998 return do_mpc_arg1 (arg0, type, mpc_asin);
9999 break;
10000
10001 CASE_FLT_FN (BUILT_IN_CACOS):
10002 if (validate_arg (arg0, COMPLEX_TYPE)
10003 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10004 return do_mpc_arg1 (arg0, type, mpc_acos);
10005 break;
10006
10007 CASE_FLT_FN (BUILT_IN_CATAN):
10008 if (validate_arg (arg0, COMPLEX_TYPE)
10009 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10010 return do_mpc_arg1 (arg0, type, mpc_atan);
10011 break;
10012
10013 CASE_FLT_FN (BUILT_IN_CASINH):
10014 if (validate_arg (arg0, COMPLEX_TYPE)
10015 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10016 return do_mpc_arg1 (arg0, type, mpc_asinh);
10017 break;
10018
10019 CASE_FLT_FN (BUILT_IN_CACOSH):
10020 if (validate_arg (arg0, COMPLEX_TYPE)
10021 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10022 return do_mpc_arg1 (arg0, type, mpc_acosh);
10023 break;
10024
10025 CASE_FLT_FN (BUILT_IN_CATANH):
10026 if (validate_arg (arg0, COMPLEX_TYPE)
10027 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10028 return do_mpc_arg1 (arg0, type, mpc_atanh);
10029 break;
10030
10031 CASE_FLT_FN (BUILT_IN_CABS):
10032 return fold_builtin_cabs (loc, arg0, type, fndecl);
10033
10034 CASE_FLT_FN (BUILT_IN_CARG):
10035 return fold_builtin_carg (loc, arg0, type);
10036
10037 CASE_FLT_FN (BUILT_IN_SQRT):
10038 return fold_builtin_sqrt (loc, arg0, type);
10039
10040 CASE_FLT_FN (BUILT_IN_CBRT):
10041 return fold_builtin_cbrt (loc, arg0, type);
10042
10043 CASE_FLT_FN (BUILT_IN_ASIN):
10044 if (validate_arg (arg0, REAL_TYPE))
10045 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10046 &dconstm1, &dconst1, true);
10047 break;
10048
10049 CASE_FLT_FN (BUILT_IN_ACOS):
10050 if (validate_arg (arg0, REAL_TYPE))
10051 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10052 &dconstm1, &dconst1, true);
10053 break;
10054
10055 CASE_FLT_FN (BUILT_IN_ATAN):
10056 if (validate_arg (arg0, REAL_TYPE))
10057 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10058 break;
10059
10060 CASE_FLT_FN (BUILT_IN_ASINH):
10061 if (validate_arg (arg0, REAL_TYPE))
10062 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10063 break;
10064
10065 CASE_FLT_FN (BUILT_IN_ACOSH):
10066 if (validate_arg (arg0, REAL_TYPE))
10067 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10068 &dconst1, NULL, true);
10069 break;
10070
10071 CASE_FLT_FN (BUILT_IN_ATANH):
10072 if (validate_arg (arg0, REAL_TYPE))
10073 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10074 &dconstm1, &dconst1, false);
10075 break;
10076
10077 CASE_FLT_FN (BUILT_IN_SIN):
10078 if (validate_arg (arg0, REAL_TYPE))
10079 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10080 break;
10081
10082 CASE_FLT_FN (BUILT_IN_COS):
10083 return fold_builtin_cos (loc, arg0, type, fndecl);
10084
10085 CASE_FLT_FN (BUILT_IN_TAN):
10086 return fold_builtin_tan (arg0, type);
10087
10088 CASE_FLT_FN (BUILT_IN_CEXP):
10089 return fold_builtin_cexp (loc, arg0, type);
10090
10091 CASE_FLT_FN (BUILT_IN_CEXPI):
10092 if (validate_arg (arg0, REAL_TYPE))
10093 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10094 break;
10095
10096 CASE_FLT_FN (BUILT_IN_SINH):
10097 if (validate_arg (arg0, REAL_TYPE))
10098 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10099 break;
10100
10101 CASE_FLT_FN (BUILT_IN_COSH):
10102 return fold_builtin_cosh (loc, arg0, type, fndecl);
10103
10104 CASE_FLT_FN (BUILT_IN_TANH):
10105 if (validate_arg (arg0, REAL_TYPE))
10106 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10107 break;
10108
10109 CASE_FLT_FN (BUILT_IN_ERF):
10110 if (validate_arg (arg0, REAL_TYPE))
10111 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10112 break;
10113
10114 CASE_FLT_FN (BUILT_IN_ERFC):
10115 if (validate_arg (arg0, REAL_TYPE))
10116 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10117 break;
10118
10119 CASE_FLT_FN (BUILT_IN_TGAMMA):
10120 if (validate_arg (arg0, REAL_TYPE))
10121 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10122 break;
10123
10124 CASE_FLT_FN (BUILT_IN_EXP):
10125 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10126
10127 CASE_FLT_FN (BUILT_IN_EXP2):
10128 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10129
10130 CASE_FLT_FN (BUILT_IN_EXP10):
10131 CASE_FLT_FN (BUILT_IN_POW10):
10132 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10133
10134 CASE_FLT_FN (BUILT_IN_EXPM1):
10135 if (validate_arg (arg0, REAL_TYPE))
10136 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10137 break;
10138
10139 CASE_FLT_FN (BUILT_IN_LOG):
10140 if (validate_arg (arg0, REAL_TYPE))
10141 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
10142 break;
10143
10144 CASE_FLT_FN (BUILT_IN_LOG2):
10145 if (validate_arg (arg0, REAL_TYPE))
10146 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
10147 break;
10148
10149 CASE_FLT_FN (BUILT_IN_LOG10):
10150 if (validate_arg (arg0, REAL_TYPE))
10151 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
10152 break;
10153
10154 CASE_FLT_FN (BUILT_IN_LOG1P):
10155 if (validate_arg (arg0, REAL_TYPE))
10156 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10157 &dconstm1, NULL, false);
10158 break;
10159
10160 CASE_FLT_FN (BUILT_IN_J0):
10161 if (validate_arg (arg0, REAL_TYPE))
10162 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10163 NULL, NULL, 0);
10164 break;
10165
10166 CASE_FLT_FN (BUILT_IN_J1):
10167 if (validate_arg (arg0, REAL_TYPE))
10168 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10169 NULL, NULL, 0);
10170 break;
10171
10172 CASE_FLT_FN (BUILT_IN_Y0):
10173 if (validate_arg (arg0, REAL_TYPE))
10174 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10175 &dconst0, NULL, false);
10176 break;
10177
10178 CASE_FLT_FN (BUILT_IN_Y1):
10179 if (validate_arg (arg0, REAL_TYPE))
10180 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10181 &dconst0, NULL, false);
10182 break;
10183
10184 CASE_FLT_FN (BUILT_IN_NAN):
10185 case BUILT_IN_NAND32:
10186 case BUILT_IN_NAND64:
10187 case BUILT_IN_NAND128:
10188 return fold_builtin_nan (arg0, type, true);
10189
10190 CASE_FLT_FN (BUILT_IN_NANS):
10191 return fold_builtin_nan (arg0, type, false);
10192
10193 CASE_FLT_FN (BUILT_IN_FLOOR):
10194 return fold_builtin_floor (loc, fndecl, arg0);
10195
10196 CASE_FLT_FN (BUILT_IN_CEIL):
10197 return fold_builtin_ceil (loc, fndecl, arg0);
10198
10199 CASE_FLT_FN (BUILT_IN_TRUNC):
10200 return fold_builtin_trunc (loc, fndecl, arg0);
10201
10202 CASE_FLT_FN (BUILT_IN_ROUND):
10203 return fold_builtin_round (loc, fndecl, arg0);
10204
10205 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10206 CASE_FLT_FN (BUILT_IN_RINT):
10207 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10208
10209 CASE_FLT_FN (BUILT_IN_ICEIL):
10210 CASE_FLT_FN (BUILT_IN_LCEIL):
10211 CASE_FLT_FN (BUILT_IN_LLCEIL):
10212 CASE_FLT_FN (BUILT_IN_LFLOOR):
10213 CASE_FLT_FN (BUILT_IN_IFLOOR):
10214 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10215 CASE_FLT_FN (BUILT_IN_IROUND):
10216 CASE_FLT_FN (BUILT_IN_LROUND):
10217 CASE_FLT_FN (BUILT_IN_LLROUND):
10218 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10219
10220 CASE_FLT_FN (BUILT_IN_IRINT):
10221 CASE_FLT_FN (BUILT_IN_LRINT):
10222 CASE_FLT_FN (BUILT_IN_LLRINT):
10223 return fold_fixed_mathfn (loc, fndecl, arg0);
10224
10225 case BUILT_IN_BSWAP16:
10226 case BUILT_IN_BSWAP32:
10227 case BUILT_IN_BSWAP64:
10228 return fold_builtin_bswap (fndecl, arg0);
10229
10230 CASE_INT_FN (BUILT_IN_FFS):
10231 CASE_INT_FN (BUILT_IN_CLZ):
10232 CASE_INT_FN (BUILT_IN_CTZ):
10233 CASE_INT_FN (BUILT_IN_CLRSB):
10234 CASE_INT_FN (BUILT_IN_POPCOUNT):
10235 CASE_INT_FN (BUILT_IN_PARITY):
10236 return fold_builtin_bitop (fndecl, arg0);
10237
10238 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10239 return fold_builtin_signbit (loc, arg0, type);
10240
10241 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10242 return fold_builtin_significand (loc, arg0, type);
10243
10244 CASE_FLT_FN (BUILT_IN_ILOGB):
10245 CASE_FLT_FN (BUILT_IN_LOGB):
10246 return fold_builtin_logb (loc, arg0, type);
10247
10248 case BUILT_IN_ISASCII:
10249 return fold_builtin_isascii (loc, arg0);
10250
10251 case BUILT_IN_TOASCII:
10252 return fold_builtin_toascii (loc, arg0);
10253
10254 case BUILT_IN_ISDIGIT:
10255 return fold_builtin_isdigit (loc, arg0);
10256
10257 CASE_FLT_FN (BUILT_IN_FINITE):
10258 case BUILT_IN_FINITED32:
10259 case BUILT_IN_FINITED64:
10260 case BUILT_IN_FINITED128:
10261 case BUILT_IN_ISFINITE:
10262 {
10263 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10264 if (ret)
10265 return ret;
10266 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10267 }
10268
10269 CASE_FLT_FN (BUILT_IN_ISINF):
10270 case BUILT_IN_ISINFD32:
10271 case BUILT_IN_ISINFD64:
10272 case BUILT_IN_ISINFD128:
10273 {
10274 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10275 if (ret)
10276 return ret;
10277 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10278 }
10279
10280 case BUILT_IN_ISNORMAL:
10281 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10282
10283 case BUILT_IN_ISINF_SIGN:
10284 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10285
10286 CASE_FLT_FN (BUILT_IN_ISNAN):
10287 case BUILT_IN_ISNAND32:
10288 case BUILT_IN_ISNAND64:
10289 case BUILT_IN_ISNAND128:
10290 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10291
10292 case BUILT_IN_PRINTF:
10293 case BUILT_IN_PRINTF_UNLOCKED:
10294 case BUILT_IN_VPRINTF:
10295 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10296
10297 case BUILT_IN_FREE:
10298 if (integer_zerop (arg0))
10299 return build_empty_stmt (loc);
10300 break;
10301
10302 default:
10303 break;
10304 }
10305
10306 return NULL_TREE;
10307
10308 }
10309
10310 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10311 IGNORE is true if the result of the function call is ignored. This
10312 function returns NULL_TREE if no simplification was possible. */
10313
10314 static tree
10315 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10316 {
10317 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10318 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10319
10320 switch (fcode)
10321 {
10322 CASE_FLT_FN (BUILT_IN_JN):
10323 if (validate_arg (arg0, INTEGER_TYPE)
10324 && validate_arg (arg1, REAL_TYPE))
10325 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10326 break;
10327
10328 CASE_FLT_FN (BUILT_IN_YN):
10329 if (validate_arg (arg0, INTEGER_TYPE)
10330 && validate_arg (arg1, REAL_TYPE))
10331 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10332 &dconst0, false);
10333 break;
10334
10335 CASE_FLT_FN (BUILT_IN_DREM):
10336 CASE_FLT_FN (BUILT_IN_REMAINDER):
10337 if (validate_arg (arg0, REAL_TYPE)
10338 && validate_arg (arg1, REAL_TYPE))
10339 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10340 break;
10341
10342 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10343 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10344 if (validate_arg (arg0, REAL_TYPE)
10345 && validate_arg (arg1, POINTER_TYPE))
10346 return do_mpfr_lgamma_r (arg0, arg1, type);
10347 break;
10348
10349 CASE_FLT_FN (BUILT_IN_ATAN2):
10350 if (validate_arg (arg0, REAL_TYPE)
10351 && validate_arg (arg1, REAL_TYPE))
10352 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10353 break;
10354
10355 CASE_FLT_FN (BUILT_IN_FDIM):
10356 if (validate_arg (arg0, REAL_TYPE)
10357 && validate_arg (arg1, REAL_TYPE))
10358 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10359 break;
10360
10361 CASE_FLT_FN (BUILT_IN_HYPOT):
10362 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10363
10364 CASE_FLT_FN (BUILT_IN_CPOW):
10365 if (validate_arg (arg0, COMPLEX_TYPE)
10366 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10367 && validate_arg (arg1, COMPLEX_TYPE)
10368 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10369 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10370 break;
10371
10372 CASE_FLT_FN (BUILT_IN_LDEXP):
10373 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10374 CASE_FLT_FN (BUILT_IN_SCALBN):
10375 CASE_FLT_FN (BUILT_IN_SCALBLN):
10376 return fold_builtin_load_exponent (loc, arg0, arg1,
10377 type, /*ldexp=*/false);
10378
10379 CASE_FLT_FN (BUILT_IN_FREXP):
10380 return fold_builtin_frexp (loc, arg0, arg1, type);
10381
10382 CASE_FLT_FN (BUILT_IN_MODF):
10383 return fold_builtin_modf (loc, arg0, arg1, type);
10384
10385 case BUILT_IN_STRSTR:
10386 return fold_builtin_strstr (loc, arg0, arg1, type);
10387
10388 case BUILT_IN_STRSPN:
10389 return fold_builtin_strspn (loc, arg0, arg1);
10390
10391 case BUILT_IN_STRCSPN:
10392 return fold_builtin_strcspn (loc, arg0, arg1);
10393
10394 case BUILT_IN_STRCHR:
10395 case BUILT_IN_INDEX:
10396 return fold_builtin_strchr (loc, arg0, arg1, type);
10397
10398 case BUILT_IN_STRRCHR:
10399 case BUILT_IN_RINDEX:
10400 return fold_builtin_strrchr (loc, arg0, arg1, type);
10401
10402 case BUILT_IN_STPCPY:
10403 if (ignore)
10404 {
10405 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10406 if (!fn)
10407 break;
10408
10409 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10410 }
10411 else
10412 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10413 break;
10414
10415 case BUILT_IN_STRCMP:
10416 return fold_builtin_strcmp (loc, arg0, arg1);
10417
10418 case BUILT_IN_STRPBRK:
10419 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10420
10421 case BUILT_IN_EXPECT:
10422 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10423
10424 CASE_FLT_FN (BUILT_IN_POW):
10425 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10426
10427 CASE_FLT_FN (BUILT_IN_POWI):
10428 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10429
10430 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10431 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10432
10433 CASE_FLT_FN (BUILT_IN_FMIN):
10434 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10435
10436 CASE_FLT_FN (BUILT_IN_FMAX):
10437 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10438
10439 case BUILT_IN_ISGREATER:
10440 return fold_builtin_unordered_cmp (loc, fndecl,
10441 arg0, arg1, UNLE_EXPR, LE_EXPR);
10442 case BUILT_IN_ISGREATEREQUAL:
10443 return fold_builtin_unordered_cmp (loc, fndecl,
10444 arg0, arg1, UNLT_EXPR, LT_EXPR);
10445 case BUILT_IN_ISLESS:
10446 return fold_builtin_unordered_cmp (loc, fndecl,
10447 arg0, arg1, UNGE_EXPR, GE_EXPR);
10448 case BUILT_IN_ISLESSEQUAL:
10449 return fold_builtin_unordered_cmp (loc, fndecl,
10450 arg0, arg1, UNGT_EXPR, GT_EXPR);
10451 case BUILT_IN_ISLESSGREATER:
10452 return fold_builtin_unordered_cmp (loc, fndecl,
10453 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10454 case BUILT_IN_ISUNORDERED:
10455 return fold_builtin_unordered_cmp (loc, fndecl,
10456 arg0, arg1, UNORDERED_EXPR,
10457 NOP_EXPR);
10458
10459 /* We do the folding for va_start in the expander. */
10460 case BUILT_IN_VA_START:
10461 break;
10462
10463 case BUILT_IN_OBJECT_SIZE:
10464 return fold_builtin_object_size (arg0, arg1);
10465
10466 case BUILT_IN_PRINTF:
10467 case BUILT_IN_PRINTF_UNLOCKED:
10468 case BUILT_IN_VPRINTF:
10469 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10470
10471 case BUILT_IN_PRINTF_CHK:
10472 case BUILT_IN_VPRINTF_CHK:
10473 if (!validate_arg (arg0, INTEGER_TYPE)
10474 || TREE_SIDE_EFFECTS (arg0))
10475 return NULL_TREE;
10476 else
10477 return fold_builtin_printf (loc, fndecl,
10478 arg1, NULL_TREE, ignore, fcode);
10479 break;
10480
10481 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10482 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10483
10484 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10485 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10486
10487 default:
10488 break;
10489 }
10490 return NULL_TREE;
10491 }
10492
10493 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10494 and ARG2. IGNORE is true if the result of the function call is ignored.
10495 This function returns NULL_TREE if no simplification was possible. */
10496
10497 static tree
10498 fold_builtin_3 (location_t loc, tree fndecl,
10499 tree arg0, tree arg1, tree arg2, bool ignore)
10500 {
10501 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10502 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10503 switch (fcode)
10504 {
10505
10506 CASE_FLT_FN (BUILT_IN_SINCOS):
10507 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10508
10509 CASE_FLT_FN (BUILT_IN_FMA):
10510 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10511 break;
10512
10513 CASE_FLT_FN (BUILT_IN_REMQUO):
10514 if (validate_arg (arg0, REAL_TYPE)
10515 && validate_arg (arg1, REAL_TYPE)
10516 && validate_arg (arg2, POINTER_TYPE))
10517 return do_mpfr_remquo (arg0, arg1, arg2);
10518 break;
10519
10520 case BUILT_IN_STRNCAT:
10521 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10522
10523 case BUILT_IN_STRNCMP:
10524 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10525
10526 case BUILT_IN_MEMCHR:
10527 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10528
10529 case BUILT_IN_BCMP:
10530 case BUILT_IN_MEMCMP:
10531 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10532
10533 case BUILT_IN_PRINTF_CHK:
10534 case BUILT_IN_VPRINTF_CHK:
10535 if (!validate_arg (arg0, INTEGER_TYPE)
10536 || TREE_SIDE_EFFECTS (arg0))
10537 return NULL_TREE;
10538 else
10539 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10540 break;
10541
10542 case BUILT_IN_EXPECT:
10543 return fold_builtin_expect (loc, arg0, arg1, arg2);
10544
10545 case BUILT_IN_ADD_OVERFLOW:
10546 case BUILT_IN_SUB_OVERFLOW:
10547 case BUILT_IN_MUL_OVERFLOW:
10548 case BUILT_IN_SADD_OVERFLOW:
10549 case BUILT_IN_SADDL_OVERFLOW:
10550 case BUILT_IN_SADDLL_OVERFLOW:
10551 case BUILT_IN_SSUB_OVERFLOW:
10552 case BUILT_IN_SSUBL_OVERFLOW:
10553 case BUILT_IN_SSUBLL_OVERFLOW:
10554 case BUILT_IN_SMUL_OVERFLOW:
10555 case BUILT_IN_SMULL_OVERFLOW:
10556 case BUILT_IN_SMULLL_OVERFLOW:
10557 case BUILT_IN_UADD_OVERFLOW:
10558 case BUILT_IN_UADDL_OVERFLOW:
10559 case BUILT_IN_UADDLL_OVERFLOW:
10560 case BUILT_IN_USUB_OVERFLOW:
10561 case BUILT_IN_USUBL_OVERFLOW:
10562 case BUILT_IN_USUBLL_OVERFLOW:
10563 case BUILT_IN_UMUL_OVERFLOW:
10564 case BUILT_IN_UMULL_OVERFLOW:
10565 case BUILT_IN_UMULLL_OVERFLOW:
10566 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10567
10568 default:
10569 break;
10570 }
10571 return NULL_TREE;
10572 }
10573
10574 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10575 arguments. IGNORE is true if the result of the
10576 function call is ignored. This function returns NULL_TREE if no
10577 simplification was possible. */
10578
10579 tree
10580 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10581 {
10582 tree ret = NULL_TREE;
10583
10584 switch (nargs)
10585 {
10586 case 0:
10587 ret = fold_builtin_0 (loc, fndecl, ignore);
10588 break;
10589 case 1:
10590 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10591 break;
10592 case 2:
10593 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10594 break;
10595 case 3:
10596 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10597 break;
10598 default:
10599 ret = fold_builtin_varargs (loc, fndecl, args, nargs, ignore);
10600 break;
10601 }
10602 if (ret)
10603 {
10604 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10605 SET_EXPR_LOCATION (ret, loc);
10606 TREE_NO_WARNING (ret) = 1;
10607 return ret;
10608 }
10609 return NULL_TREE;
10610 }
10611
10612 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10613 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10614 of arguments in ARGS to be omitted. OLDNARGS is the number of
10615 elements in ARGS. */
10616
10617 static tree
10618 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10619 int skip, tree fndecl, int n, va_list newargs)
10620 {
10621 int nargs = oldnargs - skip + n;
10622 tree *buffer;
10623
10624 if (n > 0)
10625 {
10626 int i, j;
10627
10628 buffer = XALLOCAVEC (tree, nargs);
10629 for (i = 0; i < n; i++)
10630 buffer[i] = va_arg (newargs, tree);
10631 for (j = skip; j < oldnargs; j++, i++)
10632 buffer[i] = args[j];
10633 }
10634 else
10635 buffer = args + skip;
10636
10637 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10638 }
10639
10640 /* Return true if FNDECL shouldn't be folded right now.
10641 If a built-in function has an inline attribute always_inline
10642 wrapper, defer folding it after always_inline functions have
10643 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10644 might not be performed. */
10645
10646 bool
10647 avoid_folding_inline_builtin (tree fndecl)
10648 {
10649 return (DECL_DECLARED_INLINE_P (fndecl)
10650 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10651 && cfun
10652 && !cfun->always_inline_functions_inlined
10653 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10654 }
10655
10656 /* A wrapper function for builtin folding that prevents warnings for
10657 "statement without effect" and the like, caused by removing the
10658 call node earlier than the warning is generated. */
10659
10660 tree
10661 fold_call_expr (location_t loc, tree exp, bool ignore)
10662 {
10663 tree ret = NULL_TREE;
10664 tree fndecl = get_callee_fndecl (exp);
10665 if (fndecl
10666 && TREE_CODE (fndecl) == FUNCTION_DECL
10667 && DECL_BUILT_IN (fndecl)
10668 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10669 yet. Defer folding until we see all the arguments
10670 (after inlining). */
10671 && !CALL_EXPR_VA_ARG_PACK (exp))
10672 {
10673 int nargs = call_expr_nargs (exp);
10674
10675 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10676 instead last argument is __builtin_va_arg_pack (). Defer folding
10677 even in that case, until arguments are finalized. */
10678 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10679 {
10680 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10681 if (fndecl2
10682 && TREE_CODE (fndecl2) == FUNCTION_DECL
10683 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10684 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10685 return NULL_TREE;
10686 }
10687
10688 if (avoid_folding_inline_builtin (fndecl))
10689 return NULL_TREE;
10690
10691 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10692 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10693 CALL_EXPR_ARGP (exp), ignore);
10694 else
10695 {
10696 tree *args = CALL_EXPR_ARGP (exp);
10697 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10698 if (ret)
10699 return ret;
10700 }
10701 }
10702 return NULL_TREE;
10703 }
10704
10705 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10706 N arguments are passed in the array ARGARRAY. Return a folded
10707 expression or NULL_TREE if no simplification was possible. */
10708
10709 tree
10710 fold_builtin_call_array (location_t loc, tree,
10711 tree fn,
10712 int n,
10713 tree *argarray)
10714 {
10715 if (TREE_CODE (fn) != ADDR_EXPR)
10716 return NULL_TREE;
10717
10718 tree fndecl = TREE_OPERAND (fn, 0);
10719 if (TREE_CODE (fndecl) == FUNCTION_DECL
10720 && DECL_BUILT_IN (fndecl))
10721 {
10722 /* If last argument is __builtin_va_arg_pack (), arguments to this
10723 function are not finalized yet. Defer folding until they are. */
10724 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10725 {
10726 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10727 if (fndecl2
10728 && TREE_CODE (fndecl2) == FUNCTION_DECL
10729 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10730 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10731 return NULL_TREE;
10732 }
10733 if (avoid_folding_inline_builtin (fndecl))
10734 return NULL_TREE;
10735 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10736 return targetm.fold_builtin (fndecl, n, argarray, false);
10737 else
10738 return fold_builtin_n (loc, fndecl, argarray, n, false);
10739 }
10740
10741 return NULL_TREE;
10742 }
10743
10744 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10745 along with N new arguments specified as the "..." parameters. SKIP
10746 is the number of arguments in EXP to be omitted. This function is used
10747 to do varargs-to-varargs transformations. */
10748
10749 static tree
10750 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10751 {
10752 va_list ap;
10753 tree t;
10754
10755 va_start (ap, n);
10756 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10757 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10758 va_end (ap);
10759
10760 return t;
10761 }
10762
10763 /* Validate a single argument ARG against a tree code CODE representing
10764 a type. */
10765
10766 static bool
10767 validate_arg (const_tree arg, enum tree_code code)
10768 {
10769 if (!arg)
10770 return false;
10771 else if (code == POINTER_TYPE)
10772 return POINTER_TYPE_P (TREE_TYPE (arg));
10773 else if (code == INTEGER_TYPE)
10774 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10775 return code == TREE_CODE (TREE_TYPE (arg));
10776 }
10777
10778 /* This function validates the types of a function call argument list
10779 against a specified list of tree_codes. If the last specifier is a 0,
10780 that represents an ellipses, otherwise the last specifier must be a
10781 VOID_TYPE.
10782
10783 This is the GIMPLE version of validate_arglist. Eventually we want to
10784 completely convert builtins.c to work from GIMPLEs and the tree based
10785 validate_arglist will then be removed. */
10786
10787 bool
10788 validate_gimple_arglist (const gcall *call, ...)
10789 {
10790 enum tree_code code;
10791 bool res = 0;
10792 va_list ap;
10793 const_tree arg;
10794 size_t i;
10795
10796 va_start (ap, call);
10797 i = 0;
10798
10799 do
10800 {
10801 code = (enum tree_code) va_arg (ap, int);
10802 switch (code)
10803 {
10804 case 0:
10805 /* This signifies an ellipses, any further arguments are all ok. */
10806 res = true;
10807 goto end;
10808 case VOID_TYPE:
10809 /* This signifies an endlink, if no arguments remain, return
10810 true, otherwise return false. */
10811 res = (i == gimple_call_num_args (call));
10812 goto end;
10813 default:
10814 /* If no parameters remain or the parameter's code does not
10815 match the specified code, return false. Otherwise continue
10816 checking any remaining arguments. */
10817 arg = gimple_call_arg (call, i++);
10818 if (!validate_arg (arg, code))
10819 goto end;
10820 break;
10821 }
10822 }
10823 while (1);
10824
10825 /* We need gotos here since we can only have one VA_CLOSE in a
10826 function. */
10827 end: ;
10828 va_end (ap);
10829
10830 return res;
10831 }
10832
10833 /* Default target-specific builtin expander that does nothing. */
10834
10835 rtx
10836 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10837 rtx target ATTRIBUTE_UNUSED,
10838 rtx subtarget ATTRIBUTE_UNUSED,
10839 machine_mode mode ATTRIBUTE_UNUSED,
10840 int ignore ATTRIBUTE_UNUSED)
10841 {
10842 return NULL_RTX;
10843 }
10844
10845 /* Returns true is EXP represents data that would potentially reside
10846 in a readonly section. */
10847
10848 bool
10849 readonly_data_expr (tree exp)
10850 {
10851 STRIP_NOPS (exp);
10852
10853 if (TREE_CODE (exp) != ADDR_EXPR)
10854 return false;
10855
10856 exp = get_base_address (TREE_OPERAND (exp, 0));
10857 if (!exp)
10858 return false;
10859
10860 /* Make sure we call decl_readonly_section only for trees it
10861 can handle (since it returns true for everything it doesn't
10862 understand). */
10863 if (TREE_CODE (exp) == STRING_CST
10864 || TREE_CODE (exp) == CONSTRUCTOR
10865 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10866 return decl_readonly_section (exp, 0);
10867 else
10868 return false;
10869 }
10870
10871 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10872 to the call, and TYPE is its return type.
10873
10874 Return NULL_TREE if no simplification was possible, otherwise return the
10875 simplified form of the call as a tree.
10876
10877 The simplified form may be a constant or other expression which
10878 computes the same value, but in a more efficient manner (including
10879 calls to other builtin functions).
10880
10881 The call may contain arguments which need to be evaluated, but
10882 which are not useful to determine the result of the call. In
10883 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10884 COMPOUND_EXPR will be an argument which must be evaluated.
10885 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10886 COMPOUND_EXPR in the chain will contain the tree for the simplified
10887 form of the builtin function call. */
10888
10889 static tree
10890 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10891 {
10892 if (!validate_arg (s1, POINTER_TYPE)
10893 || !validate_arg (s2, POINTER_TYPE))
10894 return NULL_TREE;
10895 else
10896 {
10897 tree fn;
10898 const char *p1, *p2;
10899
10900 p2 = c_getstr (s2);
10901 if (p2 == NULL)
10902 return NULL_TREE;
10903
10904 p1 = c_getstr (s1);
10905 if (p1 != NULL)
10906 {
10907 const char *r = strstr (p1, p2);
10908 tree tem;
10909
10910 if (r == NULL)
10911 return build_int_cst (TREE_TYPE (s1), 0);
10912
10913 /* Return an offset into the constant string argument. */
10914 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10915 return fold_convert_loc (loc, type, tem);
10916 }
10917
10918 /* The argument is const char *, and the result is char *, so we need
10919 a type conversion here to avoid a warning. */
10920 if (p2[0] == '\0')
10921 return fold_convert_loc (loc, type, s1);
10922
10923 if (p2[1] != '\0')
10924 return NULL_TREE;
10925
10926 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10927 if (!fn)
10928 return NULL_TREE;
10929
10930 /* New argument list transforming strstr(s1, s2) to
10931 strchr(s1, s2[0]). */
10932 return build_call_expr_loc (loc, fn, 2, s1,
10933 build_int_cst (integer_type_node, p2[0]));
10934 }
10935 }
10936
10937 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10938 the call, and TYPE is its return type.
10939
10940 Return NULL_TREE if no simplification was possible, otherwise return the
10941 simplified form of the call as a tree.
10942
10943 The simplified form may be a constant or other expression which
10944 computes the same value, but in a more efficient manner (including
10945 calls to other builtin functions).
10946
10947 The call may contain arguments which need to be evaluated, but
10948 which are not useful to determine the result of the call. In
10949 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10950 COMPOUND_EXPR will be an argument which must be evaluated.
10951 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10952 COMPOUND_EXPR in the chain will contain the tree for the simplified
10953 form of the builtin function call. */
10954
10955 static tree
10956 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10957 {
10958 if (!validate_arg (s1, POINTER_TYPE)
10959 || !validate_arg (s2, INTEGER_TYPE))
10960 return NULL_TREE;
10961 else
10962 {
10963 const char *p1;
10964
10965 if (TREE_CODE (s2) != INTEGER_CST)
10966 return NULL_TREE;
10967
10968 p1 = c_getstr (s1);
10969 if (p1 != NULL)
10970 {
10971 char c;
10972 const char *r;
10973 tree tem;
10974
10975 if (target_char_cast (s2, &c))
10976 return NULL_TREE;
10977
10978 r = strchr (p1, c);
10979
10980 if (r == NULL)
10981 return build_int_cst (TREE_TYPE (s1), 0);
10982
10983 /* Return an offset into the constant string argument. */
10984 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10985 return fold_convert_loc (loc, type, tem);
10986 }
10987 return NULL_TREE;
10988 }
10989 }
10990
10991 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10992 the call, and TYPE is its return type.
10993
10994 Return NULL_TREE if no simplification was possible, otherwise return the
10995 simplified form of the call as a tree.
10996
10997 The simplified form may be a constant or other expression which
10998 computes the same value, but in a more efficient manner (including
10999 calls to other builtin functions).
11000
11001 The call may contain arguments which need to be evaluated, but
11002 which are not useful to determine the result of the call. In
11003 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11004 COMPOUND_EXPR will be an argument which must be evaluated.
11005 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11006 COMPOUND_EXPR in the chain will contain the tree for the simplified
11007 form of the builtin function call. */
11008
11009 static tree
11010 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11011 {
11012 if (!validate_arg (s1, POINTER_TYPE)
11013 || !validate_arg (s2, INTEGER_TYPE))
11014 return NULL_TREE;
11015 else
11016 {
11017 tree fn;
11018 const char *p1;
11019
11020 if (TREE_CODE (s2) != INTEGER_CST)
11021 return NULL_TREE;
11022
11023 p1 = c_getstr (s1);
11024 if (p1 != NULL)
11025 {
11026 char c;
11027 const char *r;
11028 tree tem;
11029
11030 if (target_char_cast (s2, &c))
11031 return NULL_TREE;
11032
11033 r = strrchr (p1, c);
11034
11035 if (r == NULL)
11036 return build_int_cst (TREE_TYPE (s1), 0);
11037
11038 /* Return an offset into the constant string argument. */
11039 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11040 return fold_convert_loc (loc, type, tem);
11041 }
11042
11043 if (! integer_zerop (s2))
11044 return NULL_TREE;
11045
11046 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11047 if (!fn)
11048 return NULL_TREE;
11049
11050 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11051 return build_call_expr_loc (loc, fn, 2, s1, s2);
11052 }
11053 }
11054
11055 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11056 to the call, and TYPE is its return type.
11057
11058 Return NULL_TREE if no simplification was possible, otherwise return the
11059 simplified form of the call as a tree.
11060
11061 The simplified form may be a constant or other expression which
11062 computes the same value, but in a more efficient manner (including
11063 calls to other builtin functions).
11064
11065 The call may contain arguments which need to be evaluated, but
11066 which are not useful to determine the result of the call. In
11067 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11068 COMPOUND_EXPR will be an argument which must be evaluated.
11069 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11070 COMPOUND_EXPR in the chain will contain the tree for the simplified
11071 form of the builtin function call. */
11072
11073 static tree
11074 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11075 {
11076 if (!validate_arg (s1, POINTER_TYPE)
11077 || !validate_arg (s2, POINTER_TYPE))
11078 return NULL_TREE;
11079 else
11080 {
11081 tree fn;
11082 const char *p1, *p2;
11083
11084 p2 = c_getstr (s2);
11085 if (p2 == NULL)
11086 return NULL_TREE;
11087
11088 p1 = c_getstr (s1);
11089 if (p1 != NULL)
11090 {
11091 const char *r = strpbrk (p1, p2);
11092 tree tem;
11093
11094 if (r == NULL)
11095 return build_int_cst (TREE_TYPE (s1), 0);
11096
11097 /* Return an offset into the constant string argument. */
11098 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11099 return fold_convert_loc (loc, type, tem);
11100 }
11101
11102 if (p2[0] == '\0')
11103 /* strpbrk(x, "") == NULL.
11104 Evaluate and ignore s1 in case it had side-effects. */
11105 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11106
11107 if (p2[1] != '\0')
11108 return NULL_TREE; /* Really call strpbrk. */
11109
11110 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11111 if (!fn)
11112 return NULL_TREE;
11113
11114 /* New argument list transforming strpbrk(s1, s2) to
11115 strchr(s1, s2[0]). */
11116 return build_call_expr_loc (loc, fn, 2, s1,
11117 build_int_cst (integer_type_node, p2[0]));
11118 }
11119 }
11120
11121 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11122 arguments to the call.
11123
11124 Return NULL_TREE if no simplification was possible, otherwise return the
11125 simplified form of the call as a tree.
11126
11127 The simplified form may be a constant or other expression which
11128 computes the same value, but in a more efficient manner (including
11129 calls to other builtin functions).
11130
11131 The call may contain arguments which need to be evaluated, but
11132 which are not useful to determine the result of the call. In
11133 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11134 COMPOUND_EXPR will be an argument which must be evaluated.
11135 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11136 COMPOUND_EXPR in the chain will contain the tree for the simplified
11137 form of the builtin function call. */
11138
11139 static tree
11140 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11141 {
11142 if (!validate_arg (dst, POINTER_TYPE)
11143 || !validate_arg (src, POINTER_TYPE)
11144 || !validate_arg (len, INTEGER_TYPE))
11145 return NULL_TREE;
11146 else
11147 {
11148 const char *p = c_getstr (src);
11149
11150 /* If the requested length is zero, or the src parameter string
11151 length is zero, return the dst parameter. */
11152 if (integer_zerop (len) || (p && *p == '\0'))
11153 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11154
11155 /* If the requested len is greater than or equal to the string
11156 length, call strcat. */
11157 if (TREE_CODE (len) == INTEGER_CST && p
11158 && compare_tree_int (len, strlen (p)) >= 0)
11159 {
11160 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11161
11162 /* If the replacement _DECL isn't initialized, don't do the
11163 transformation. */
11164 if (!fn)
11165 return NULL_TREE;
11166
11167 return build_call_expr_loc (loc, fn, 2, dst, src);
11168 }
11169 return NULL_TREE;
11170 }
11171 }
11172
11173 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11174 to the call.
11175
11176 Return NULL_TREE if no simplification was possible, otherwise return the
11177 simplified form of the call as a tree.
11178
11179 The simplified form may be a constant or other expression which
11180 computes the same value, but in a more efficient manner (including
11181 calls to other builtin functions).
11182
11183 The call may contain arguments which need to be evaluated, but
11184 which are not useful to determine the result of the call. In
11185 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11186 COMPOUND_EXPR will be an argument which must be evaluated.
11187 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11188 COMPOUND_EXPR in the chain will contain the tree for the simplified
11189 form of the builtin function call. */
11190
11191 static tree
11192 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11193 {
11194 if (!validate_arg (s1, POINTER_TYPE)
11195 || !validate_arg (s2, POINTER_TYPE))
11196 return NULL_TREE;
11197 else
11198 {
11199 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11200
11201 /* If both arguments are constants, evaluate at compile-time. */
11202 if (p1 && p2)
11203 {
11204 const size_t r = strspn (p1, p2);
11205 return build_int_cst (size_type_node, r);
11206 }
11207
11208 /* If either argument is "", return NULL_TREE. */
11209 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11210 /* Evaluate and ignore both arguments in case either one has
11211 side-effects. */
11212 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11213 s1, s2);
11214 return NULL_TREE;
11215 }
11216 }
11217
11218 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11219 to the call.
11220
11221 Return NULL_TREE if no simplification was possible, otherwise return the
11222 simplified form of the call as a tree.
11223
11224 The simplified form may be a constant or other expression which
11225 computes the same value, but in a more efficient manner (including
11226 calls to other builtin functions).
11227
11228 The call may contain arguments which need to be evaluated, but
11229 which are not useful to determine the result of the call. In
11230 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11231 COMPOUND_EXPR will be an argument which must be evaluated.
11232 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11233 COMPOUND_EXPR in the chain will contain the tree for the simplified
11234 form of the builtin function call. */
11235
11236 static tree
11237 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11238 {
11239 if (!validate_arg (s1, POINTER_TYPE)
11240 || !validate_arg (s2, POINTER_TYPE))
11241 return NULL_TREE;
11242 else
11243 {
11244 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11245
11246 /* If both arguments are constants, evaluate at compile-time. */
11247 if (p1 && p2)
11248 {
11249 const size_t r = strcspn (p1, p2);
11250 return build_int_cst (size_type_node, r);
11251 }
11252
11253 /* If the first argument is "", return NULL_TREE. */
11254 if (p1 && *p1 == '\0')
11255 {
11256 /* Evaluate and ignore argument s2 in case it has
11257 side-effects. */
11258 return omit_one_operand_loc (loc, size_type_node,
11259 size_zero_node, s2);
11260 }
11261
11262 /* If the second argument is "", return __builtin_strlen(s1). */
11263 if (p2 && *p2 == '\0')
11264 {
11265 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11266
11267 /* If the replacement _DECL isn't initialized, don't do the
11268 transformation. */
11269 if (!fn)
11270 return NULL_TREE;
11271
11272 return build_call_expr_loc (loc, fn, 1, s1);
11273 }
11274 return NULL_TREE;
11275 }
11276 }
11277
11278 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11279 produced. False otherwise. This is done so that we don't output the error
11280 or warning twice or three times. */
11281
11282 bool
11283 fold_builtin_next_arg (tree exp, bool va_start_p)
11284 {
11285 tree fntype = TREE_TYPE (current_function_decl);
11286 int nargs = call_expr_nargs (exp);
11287 tree arg;
11288 /* There is good chance the current input_location points inside the
11289 definition of the va_start macro (perhaps on the token for
11290 builtin) in a system header, so warnings will not be emitted.
11291 Use the location in real source code. */
11292 source_location current_location =
11293 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11294 NULL);
11295
11296 if (!stdarg_p (fntype))
11297 {
11298 error ("%<va_start%> used in function with fixed args");
11299 return true;
11300 }
11301
11302 if (va_start_p)
11303 {
11304 if (va_start_p && (nargs != 2))
11305 {
11306 error ("wrong number of arguments to function %<va_start%>");
11307 return true;
11308 }
11309 arg = CALL_EXPR_ARG (exp, 1);
11310 }
11311 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11312 when we checked the arguments and if needed issued a warning. */
11313 else
11314 {
11315 if (nargs == 0)
11316 {
11317 /* Evidently an out of date version of <stdarg.h>; can't validate
11318 va_start's second argument, but can still work as intended. */
11319 warning_at (current_location,
11320 OPT_Wvarargs,
11321 "%<__builtin_next_arg%> called without an argument");
11322 return true;
11323 }
11324 else if (nargs > 1)
11325 {
11326 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11327 return true;
11328 }
11329 arg = CALL_EXPR_ARG (exp, 0);
11330 }
11331
11332 if (TREE_CODE (arg) == SSA_NAME)
11333 arg = SSA_NAME_VAR (arg);
11334
11335 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11336 or __builtin_next_arg (0) the first time we see it, after checking
11337 the arguments and if needed issuing a warning. */
11338 if (!integer_zerop (arg))
11339 {
11340 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11341
11342 /* Strip off all nops for the sake of the comparison. This
11343 is not quite the same as STRIP_NOPS. It does more.
11344 We must also strip off INDIRECT_EXPR for C++ reference
11345 parameters. */
11346 while (CONVERT_EXPR_P (arg)
11347 || TREE_CODE (arg) == INDIRECT_REF)
11348 arg = TREE_OPERAND (arg, 0);
11349 if (arg != last_parm)
11350 {
11351 /* FIXME: Sometimes with the tree optimizers we can get the
11352 not the last argument even though the user used the last
11353 argument. We just warn and set the arg to be the last
11354 argument so that we will get wrong-code because of
11355 it. */
11356 warning_at (current_location,
11357 OPT_Wvarargs,
11358 "second parameter of %<va_start%> not last named argument");
11359 }
11360
11361 /* Undefined by C99 7.15.1.4p4 (va_start):
11362 "If the parameter parmN is declared with the register storage
11363 class, with a function or array type, or with a type that is
11364 not compatible with the type that results after application of
11365 the default argument promotions, the behavior is undefined."
11366 */
11367 else if (DECL_REGISTER (arg))
11368 {
11369 warning_at (current_location,
11370 OPT_Wvarargs,
11371 "undefined behaviour when second parameter of "
11372 "%<va_start%> is declared with %<register%> storage");
11373 }
11374
11375 /* We want to verify the second parameter just once before the tree
11376 optimizers are run and then avoid keeping it in the tree,
11377 as otherwise we could warn even for correct code like:
11378 void foo (int i, ...)
11379 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11380 if (va_start_p)
11381 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11382 else
11383 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11384 }
11385 return false;
11386 }
11387
11388
11389 /* Expand a call EXP to __builtin_object_size. */
11390
11391 static rtx
11392 expand_builtin_object_size (tree exp)
11393 {
11394 tree ost;
11395 int object_size_type;
11396 tree fndecl = get_callee_fndecl (exp);
11397
11398 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11399 {
11400 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11401 exp, fndecl);
11402 expand_builtin_trap ();
11403 return const0_rtx;
11404 }
11405
11406 ost = CALL_EXPR_ARG (exp, 1);
11407 STRIP_NOPS (ost);
11408
11409 if (TREE_CODE (ost) != INTEGER_CST
11410 || tree_int_cst_sgn (ost) < 0
11411 || compare_tree_int (ost, 3) > 0)
11412 {
11413 error ("%Klast argument of %D is not integer constant between 0 and 3",
11414 exp, fndecl);
11415 expand_builtin_trap ();
11416 return const0_rtx;
11417 }
11418
11419 object_size_type = tree_to_shwi (ost);
11420
11421 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11422 }
11423
11424 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11425 FCODE is the BUILT_IN_* to use.
11426 Return NULL_RTX if we failed; the caller should emit a normal call,
11427 otherwise try to get the result in TARGET, if convenient (and in
11428 mode MODE if that's convenient). */
11429
11430 static rtx
11431 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11432 enum built_in_function fcode)
11433 {
11434 tree dest, src, len, size;
11435
11436 if (!validate_arglist (exp,
11437 POINTER_TYPE,
11438 fcode == BUILT_IN_MEMSET_CHK
11439 ? INTEGER_TYPE : POINTER_TYPE,
11440 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11441 return NULL_RTX;
11442
11443 dest = CALL_EXPR_ARG (exp, 0);
11444 src = CALL_EXPR_ARG (exp, 1);
11445 len = CALL_EXPR_ARG (exp, 2);
11446 size = CALL_EXPR_ARG (exp, 3);
11447
11448 if (! tree_fits_uhwi_p (size))
11449 return NULL_RTX;
11450
11451 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11452 {
11453 tree fn;
11454
11455 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11456 {
11457 warning_at (tree_nonartificial_location (exp),
11458 0, "%Kcall to %D will always overflow destination buffer",
11459 exp, get_callee_fndecl (exp));
11460 return NULL_RTX;
11461 }
11462
11463 fn = NULL_TREE;
11464 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11465 mem{cpy,pcpy,move,set} is available. */
11466 switch (fcode)
11467 {
11468 case BUILT_IN_MEMCPY_CHK:
11469 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11470 break;
11471 case BUILT_IN_MEMPCPY_CHK:
11472 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11473 break;
11474 case BUILT_IN_MEMMOVE_CHK:
11475 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11476 break;
11477 case BUILT_IN_MEMSET_CHK:
11478 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11479 break;
11480 default:
11481 break;
11482 }
11483
11484 if (! fn)
11485 return NULL_RTX;
11486
11487 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11488 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11489 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11490 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11491 }
11492 else if (fcode == BUILT_IN_MEMSET_CHK)
11493 return NULL_RTX;
11494 else
11495 {
11496 unsigned int dest_align = get_pointer_alignment (dest);
11497
11498 /* If DEST is not a pointer type, call the normal function. */
11499 if (dest_align == 0)
11500 return NULL_RTX;
11501
11502 /* If SRC and DEST are the same (and not volatile), do nothing. */
11503 if (operand_equal_p (src, dest, 0))
11504 {
11505 tree expr;
11506
11507 if (fcode != BUILT_IN_MEMPCPY_CHK)
11508 {
11509 /* Evaluate and ignore LEN in case it has side-effects. */
11510 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11511 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11512 }
11513
11514 expr = fold_build_pointer_plus (dest, len);
11515 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11516 }
11517
11518 /* __memmove_chk special case. */
11519 if (fcode == BUILT_IN_MEMMOVE_CHK)
11520 {
11521 unsigned int src_align = get_pointer_alignment (src);
11522
11523 if (src_align == 0)
11524 return NULL_RTX;
11525
11526 /* If src is categorized for a readonly section we can use
11527 normal __memcpy_chk. */
11528 if (readonly_data_expr (src))
11529 {
11530 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11531 if (!fn)
11532 return NULL_RTX;
11533 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11534 dest, src, len, size);
11535 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11536 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11537 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11538 }
11539 }
11540 return NULL_RTX;
11541 }
11542 }
11543
11544 /* Emit warning if a buffer overflow is detected at compile time. */
11545
11546 static void
11547 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11548 {
11549 int is_strlen = 0;
11550 tree len, size;
11551 location_t loc = tree_nonartificial_location (exp);
11552
11553 switch (fcode)
11554 {
11555 case BUILT_IN_STRCPY_CHK:
11556 case BUILT_IN_STPCPY_CHK:
11557 /* For __strcat_chk the warning will be emitted only if overflowing
11558 by at least strlen (dest) + 1 bytes. */
11559 case BUILT_IN_STRCAT_CHK:
11560 len = CALL_EXPR_ARG (exp, 1);
11561 size = CALL_EXPR_ARG (exp, 2);
11562 is_strlen = 1;
11563 break;
11564 case BUILT_IN_STRNCAT_CHK:
11565 case BUILT_IN_STRNCPY_CHK:
11566 case BUILT_IN_STPNCPY_CHK:
11567 len = CALL_EXPR_ARG (exp, 2);
11568 size = CALL_EXPR_ARG (exp, 3);
11569 break;
11570 case BUILT_IN_SNPRINTF_CHK:
11571 case BUILT_IN_VSNPRINTF_CHK:
11572 len = CALL_EXPR_ARG (exp, 1);
11573 size = CALL_EXPR_ARG (exp, 3);
11574 break;
11575 default:
11576 gcc_unreachable ();
11577 }
11578
11579 if (!len || !size)
11580 return;
11581
11582 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11583 return;
11584
11585 if (is_strlen)
11586 {
11587 len = c_strlen (len, 1);
11588 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11589 return;
11590 }
11591 else if (fcode == BUILT_IN_STRNCAT_CHK)
11592 {
11593 tree src = CALL_EXPR_ARG (exp, 1);
11594 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11595 return;
11596 src = c_strlen (src, 1);
11597 if (! src || ! tree_fits_uhwi_p (src))
11598 {
11599 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11600 exp, get_callee_fndecl (exp));
11601 return;
11602 }
11603 else if (tree_int_cst_lt (src, size))
11604 return;
11605 }
11606 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11607 return;
11608
11609 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11610 exp, get_callee_fndecl (exp));
11611 }
11612
11613 /* Emit warning if a buffer overflow is detected at compile time
11614 in __sprintf_chk/__vsprintf_chk calls. */
11615
11616 static void
11617 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11618 {
11619 tree size, len, fmt;
11620 const char *fmt_str;
11621 int nargs = call_expr_nargs (exp);
11622
11623 /* Verify the required arguments in the original call. */
11624
11625 if (nargs < 4)
11626 return;
11627 size = CALL_EXPR_ARG (exp, 2);
11628 fmt = CALL_EXPR_ARG (exp, 3);
11629
11630 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11631 return;
11632
11633 /* Check whether the format is a literal string constant. */
11634 fmt_str = c_getstr (fmt);
11635 if (fmt_str == NULL)
11636 return;
11637
11638 if (!init_target_chars ())
11639 return;
11640
11641 /* If the format doesn't contain % args or %%, we know its size. */
11642 if (strchr (fmt_str, target_percent) == 0)
11643 len = build_int_cstu (size_type_node, strlen (fmt_str));
11644 /* If the format is "%s" and first ... argument is a string literal,
11645 we know it too. */
11646 else if (fcode == BUILT_IN_SPRINTF_CHK
11647 && strcmp (fmt_str, target_percent_s) == 0)
11648 {
11649 tree arg;
11650
11651 if (nargs < 5)
11652 return;
11653 arg = CALL_EXPR_ARG (exp, 4);
11654 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11655 return;
11656
11657 len = c_strlen (arg, 1);
11658 if (!len || ! tree_fits_uhwi_p (len))
11659 return;
11660 }
11661 else
11662 return;
11663
11664 if (! tree_int_cst_lt (len, size))
11665 warning_at (tree_nonartificial_location (exp),
11666 0, "%Kcall to %D will always overflow destination buffer",
11667 exp, get_callee_fndecl (exp));
11668 }
11669
11670 /* Emit warning if a free is called with address of a variable. */
11671
11672 static void
11673 maybe_emit_free_warning (tree exp)
11674 {
11675 tree arg = CALL_EXPR_ARG (exp, 0);
11676
11677 STRIP_NOPS (arg);
11678 if (TREE_CODE (arg) != ADDR_EXPR)
11679 return;
11680
11681 arg = get_base_address (TREE_OPERAND (arg, 0));
11682 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11683 return;
11684
11685 if (SSA_VAR_P (arg))
11686 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11687 "%Kattempt to free a non-heap object %qD", exp, arg);
11688 else
11689 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11690 "%Kattempt to free a non-heap object", exp);
11691 }
11692
11693 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11694 if possible. */
11695
11696 static tree
11697 fold_builtin_object_size (tree ptr, tree ost)
11698 {
11699 unsigned HOST_WIDE_INT bytes;
11700 int object_size_type;
11701
11702 if (!validate_arg (ptr, POINTER_TYPE)
11703 || !validate_arg (ost, INTEGER_TYPE))
11704 return NULL_TREE;
11705
11706 STRIP_NOPS (ost);
11707
11708 if (TREE_CODE (ost) != INTEGER_CST
11709 || tree_int_cst_sgn (ost) < 0
11710 || compare_tree_int (ost, 3) > 0)
11711 return NULL_TREE;
11712
11713 object_size_type = tree_to_shwi (ost);
11714
11715 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11716 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11717 and (size_t) 0 for types 2 and 3. */
11718 if (TREE_SIDE_EFFECTS (ptr))
11719 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11720
11721 if (TREE_CODE (ptr) == ADDR_EXPR)
11722 {
11723 bytes = compute_builtin_object_size (ptr, object_size_type);
11724 if (wi::fits_to_tree_p (bytes, size_type_node))
11725 return build_int_cstu (size_type_node, bytes);
11726 }
11727 else if (TREE_CODE (ptr) == SSA_NAME)
11728 {
11729 /* If object size is not known yet, delay folding until
11730 later. Maybe subsequent passes will help determining
11731 it. */
11732 bytes = compute_builtin_object_size (ptr, object_size_type);
11733 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11734 && wi::fits_to_tree_p (bytes, size_type_node))
11735 return build_int_cstu (size_type_node, bytes);
11736 }
11737
11738 return NULL_TREE;
11739 }
11740
11741 /* Builtins with folding operations that operate on "..." arguments
11742 need special handling; we need to store the arguments in a convenient
11743 data structure before attempting any folding. Fortunately there are
11744 only a few builtins that fall into this category. FNDECL is the
11745 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11746 result of the function call is ignored. */
11747
11748 static tree
11749 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs,
11750 bool ignore ATTRIBUTE_UNUSED)
11751 {
11752 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11753 tree ret = NULL_TREE;
11754
11755 switch (fcode)
11756 {
11757 case BUILT_IN_FPCLASSIFY:
11758 ret = fold_builtin_fpclassify (loc, args, nargs);
11759 break;
11760
11761 default:
11762 break;
11763 }
11764 if (ret)
11765 {
11766 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11767 SET_EXPR_LOCATION (ret, loc);
11768 TREE_NO_WARNING (ret) = 1;
11769 return ret;
11770 }
11771 return NULL_TREE;
11772 }
11773
11774 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
11775 FMT and ARG are the arguments to the call; we don't fold cases with
11776 more than 2 arguments, and ARG may be null if this is a 1-argument case.
11777
11778 Return NULL_TREE if no simplification was possible, otherwise return the
11779 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11780 code of the function to be simplified. */
11781
11782 static tree
11783 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
11784 tree arg, bool ignore,
11785 enum built_in_function fcode)
11786 {
11787 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
11788 const char *fmt_str = NULL;
11789
11790 /* If the return value is used, don't do the transformation. */
11791 if (! ignore)
11792 return NULL_TREE;
11793
11794 /* Verify the required arguments in the original call. */
11795 if (!validate_arg (fmt, POINTER_TYPE))
11796 return NULL_TREE;
11797
11798 /* Check whether the format is a literal string constant. */
11799 fmt_str = c_getstr (fmt);
11800 if (fmt_str == NULL)
11801 return NULL_TREE;
11802
11803 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
11804 {
11805 /* If we're using an unlocked function, assume the other
11806 unlocked functions exist explicitly. */
11807 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
11808 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
11809 }
11810 else
11811 {
11812 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
11813 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
11814 }
11815
11816 if (!init_target_chars ())
11817 return NULL_TREE;
11818
11819 if (strcmp (fmt_str, target_percent_s) == 0
11820 || strchr (fmt_str, target_percent) == NULL)
11821 {
11822 const char *str;
11823
11824 if (strcmp (fmt_str, target_percent_s) == 0)
11825 {
11826 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11827 return NULL_TREE;
11828
11829 if (!arg || !validate_arg (arg, POINTER_TYPE))
11830 return NULL_TREE;
11831
11832 str = c_getstr (arg);
11833 if (str == NULL)
11834 return NULL_TREE;
11835 }
11836 else
11837 {
11838 /* The format specifier doesn't contain any '%' characters. */
11839 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
11840 && arg)
11841 return NULL_TREE;
11842 str = fmt_str;
11843 }
11844
11845 /* If the string was "", printf does nothing. */
11846 if (str[0] == '\0')
11847 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
11848
11849 /* If the string has length of 1, call putchar. */
11850 if (str[1] == '\0')
11851 {
11852 /* Given printf("c"), (where c is any one character,)
11853 convert "c"[0] to an int and pass that to the replacement
11854 function. */
11855 newarg = build_int_cst (integer_type_node, str[0]);
11856 if (fn_putchar)
11857 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
11858 }
11859 else
11860 {
11861 /* If the string was "string\n", call puts("string"). */
11862 size_t len = strlen (str);
11863 if ((unsigned char)str[len - 1] == target_newline
11864 && (size_t) (int) len == len
11865 && (int) len > 0)
11866 {
11867 char *newstr;
11868 tree offset_node, string_cst;
11869
11870 /* Create a NUL-terminated string that's one char shorter
11871 than the original, stripping off the trailing '\n'. */
11872 newarg = build_string_literal (len, str);
11873 string_cst = string_constant (newarg, &offset_node);
11874 gcc_checking_assert (string_cst
11875 && (TREE_STRING_LENGTH (string_cst)
11876 == (int) len)
11877 && integer_zerop (offset_node)
11878 && (unsigned char)
11879 TREE_STRING_POINTER (string_cst)[len - 1]
11880 == target_newline);
11881 /* build_string_literal creates a new STRING_CST,
11882 modify it in place to avoid double copying. */
11883 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
11884 newstr[len - 1] = '\0';
11885 if (fn_puts)
11886 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
11887 }
11888 else
11889 /* We'd like to arrange to call fputs(string,stdout) here,
11890 but we need stdout and don't have a way to get it yet. */
11891 return NULL_TREE;
11892 }
11893 }
11894
11895 /* The other optimizations can be done only on the non-va_list variants. */
11896 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11897 return NULL_TREE;
11898
11899 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
11900 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
11901 {
11902 if (!arg || !validate_arg (arg, POINTER_TYPE))
11903 return NULL_TREE;
11904 if (fn_puts)
11905 call = build_call_expr_loc (loc, fn_puts, 1, arg);
11906 }
11907
11908 /* If the format specifier was "%c", call __builtin_putchar(arg). */
11909 else if (strcmp (fmt_str, target_percent_c) == 0)
11910 {
11911 if (!arg || !validate_arg (arg, INTEGER_TYPE))
11912 return NULL_TREE;
11913 if (fn_putchar)
11914 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
11915 }
11916
11917 if (!call)
11918 return NULL_TREE;
11919
11920 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
11921 }
11922
11923 /* Initialize format string characters in the target charset. */
11924
11925 bool
11926 init_target_chars (void)
11927 {
11928 static bool init;
11929 if (!init)
11930 {
11931 target_newline = lang_hooks.to_target_charset ('\n');
11932 target_percent = lang_hooks.to_target_charset ('%');
11933 target_c = lang_hooks.to_target_charset ('c');
11934 target_s = lang_hooks.to_target_charset ('s');
11935 if (target_newline == 0 || target_percent == 0 || target_c == 0
11936 || target_s == 0)
11937 return false;
11938
11939 target_percent_c[0] = target_percent;
11940 target_percent_c[1] = target_c;
11941 target_percent_c[2] = '\0';
11942
11943 target_percent_s[0] = target_percent;
11944 target_percent_s[1] = target_s;
11945 target_percent_s[2] = '\0';
11946
11947 target_percent_s_newline[0] = target_percent;
11948 target_percent_s_newline[1] = target_s;
11949 target_percent_s_newline[2] = target_newline;
11950 target_percent_s_newline[3] = '\0';
11951
11952 init = true;
11953 }
11954 return true;
11955 }
11956
11957 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11958 and no overflow/underflow occurred. INEXACT is true if M was not
11959 exactly calculated. TYPE is the tree type for the result. This
11960 function assumes that you cleared the MPFR flags and then
11961 calculated M to see if anything subsequently set a flag prior to
11962 entering this function. Return NULL_TREE if any checks fail. */
11963
11964 static tree
11965 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11966 {
11967 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11968 overflow/underflow occurred. If -frounding-math, proceed iff the
11969 result of calling FUNC was exact. */
11970 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11971 && (!flag_rounding_math || !inexact))
11972 {
11973 REAL_VALUE_TYPE rr;
11974
11975 real_from_mpfr (&rr, m, type, GMP_RNDN);
11976 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11977 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11978 but the mpft_t is not, then we underflowed in the
11979 conversion. */
11980 if (real_isfinite (&rr)
11981 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11982 {
11983 REAL_VALUE_TYPE rmode;
11984
11985 real_convert (&rmode, TYPE_MODE (type), &rr);
11986 /* Proceed iff the specified mode can hold the value. */
11987 if (real_identical (&rmode, &rr))
11988 return build_real (type, rmode);
11989 }
11990 }
11991 return NULL_TREE;
11992 }
11993
11994 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11995 number and no overflow/underflow occurred. INEXACT is true if M
11996 was not exactly calculated. TYPE is the tree type for the result.
11997 This function assumes that you cleared the MPFR flags and then
11998 calculated M to see if anything subsequently set a flag prior to
11999 entering this function. Return NULL_TREE if any checks fail, if
12000 FORCE_CONVERT is true, then bypass the checks. */
12001
12002 static tree
12003 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12004 {
12005 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12006 overflow/underflow occurred. If -frounding-math, proceed iff the
12007 result of calling FUNC was exact. */
12008 if (force_convert
12009 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12010 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12011 && (!flag_rounding_math || !inexact)))
12012 {
12013 REAL_VALUE_TYPE re, im;
12014
12015 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12016 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12017 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12018 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12019 but the mpft_t is not, then we underflowed in the
12020 conversion. */
12021 if (force_convert
12022 || (real_isfinite (&re) && real_isfinite (&im)
12023 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12024 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12025 {
12026 REAL_VALUE_TYPE re_mode, im_mode;
12027
12028 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12029 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12030 /* Proceed iff the specified mode can hold the value. */
12031 if (force_convert
12032 || (real_identical (&re_mode, &re)
12033 && real_identical (&im_mode, &im)))
12034 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12035 build_real (TREE_TYPE (type), im_mode));
12036 }
12037 }
12038 return NULL_TREE;
12039 }
12040
12041 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12042 FUNC on it and return the resulting value as a tree with type TYPE.
12043 If MIN and/or MAX are not NULL, then the supplied ARG must be
12044 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12045 acceptable values, otherwise they are not. The mpfr precision is
12046 set to the precision of TYPE. We assume that function FUNC returns
12047 zero if the result could be calculated exactly within the requested
12048 precision. */
12049
12050 static tree
12051 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12052 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12053 bool inclusive)
12054 {
12055 tree result = NULL_TREE;
12056
12057 STRIP_NOPS (arg);
12058
12059 /* To proceed, MPFR must exactly represent the target floating point
12060 format, which only happens when the target base equals two. */
12061 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12062 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12063 {
12064 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12065
12066 if (real_isfinite (ra)
12067 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12068 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12069 {
12070 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12071 const int prec = fmt->p;
12072 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12073 int inexact;
12074 mpfr_t m;
12075
12076 mpfr_init2 (m, prec);
12077 mpfr_from_real (m, ra, GMP_RNDN);
12078 mpfr_clear_flags ();
12079 inexact = func (m, m, rnd);
12080 result = do_mpfr_ckconv (m, type, inexact);
12081 mpfr_clear (m);
12082 }
12083 }
12084
12085 return result;
12086 }
12087
12088 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12089 FUNC on it and return the resulting value as a tree with type TYPE.
12090 The mpfr precision is set to the precision of TYPE. We assume that
12091 function FUNC returns zero if the result could be calculated
12092 exactly within the requested precision. */
12093
12094 static tree
12095 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12096 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12097 {
12098 tree result = NULL_TREE;
12099
12100 STRIP_NOPS (arg1);
12101 STRIP_NOPS (arg2);
12102
12103 /* To proceed, MPFR must exactly represent the target floating point
12104 format, which only happens when the target base equals two. */
12105 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12106 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12107 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12108 {
12109 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12110 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12111
12112 if (real_isfinite (ra1) && real_isfinite (ra2))
12113 {
12114 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12115 const int prec = fmt->p;
12116 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12117 int inexact;
12118 mpfr_t m1, m2;
12119
12120 mpfr_inits2 (prec, m1, m2, NULL);
12121 mpfr_from_real (m1, ra1, GMP_RNDN);
12122 mpfr_from_real (m2, ra2, GMP_RNDN);
12123 mpfr_clear_flags ();
12124 inexact = func (m1, m1, m2, rnd);
12125 result = do_mpfr_ckconv (m1, type, inexact);
12126 mpfr_clears (m1, m2, NULL);
12127 }
12128 }
12129
12130 return result;
12131 }
12132
12133 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12134 FUNC on it and return the resulting value as a tree with type TYPE.
12135 The mpfr precision is set to the precision of TYPE. We assume that
12136 function FUNC returns zero if the result could be calculated
12137 exactly within the requested precision. */
12138
12139 static tree
12140 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12141 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12142 {
12143 tree result = NULL_TREE;
12144
12145 STRIP_NOPS (arg1);
12146 STRIP_NOPS (arg2);
12147 STRIP_NOPS (arg3);
12148
12149 /* To proceed, MPFR must exactly represent the target floating point
12150 format, which only happens when the target base equals two. */
12151 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12152 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12153 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12154 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12155 {
12156 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12157 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12158 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12159
12160 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12161 {
12162 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12163 const int prec = fmt->p;
12164 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12165 int inexact;
12166 mpfr_t m1, m2, m3;
12167
12168 mpfr_inits2 (prec, m1, m2, m3, NULL);
12169 mpfr_from_real (m1, ra1, GMP_RNDN);
12170 mpfr_from_real (m2, ra2, GMP_RNDN);
12171 mpfr_from_real (m3, ra3, GMP_RNDN);
12172 mpfr_clear_flags ();
12173 inexact = func (m1, m1, m2, m3, rnd);
12174 result = do_mpfr_ckconv (m1, type, inexact);
12175 mpfr_clears (m1, m2, m3, NULL);
12176 }
12177 }
12178
12179 return result;
12180 }
12181
12182 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12183 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12184 If ARG_SINP and ARG_COSP are NULL then the result is returned
12185 as a complex value.
12186 The type is taken from the type of ARG and is used for setting the
12187 precision of the calculation and results. */
12188
12189 static tree
12190 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12191 {
12192 tree const type = TREE_TYPE (arg);
12193 tree result = NULL_TREE;
12194
12195 STRIP_NOPS (arg);
12196
12197 /* To proceed, MPFR must exactly represent the target floating point
12198 format, which only happens when the target base equals two. */
12199 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12200 && TREE_CODE (arg) == REAL_CST
12201 && !TREE_OVERFLOW (arg))
12202 {
12203 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12204
12205 if (real_isfinite (ra))
12206 {
12207 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12208 const int prec = fmt->p;
12209 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12210 tree result_s, result_c;
12211 int inexact;
12212 mpfr_t m, ms, mc;
12213
12214 mpfr_inits2 (prec, m, ms, mc, NULL);
12215 mpfr_from_real (m, ra, GMP_RNDN);
12216 mpfr_clear_flags ();
12217 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12218 result_s = do_mpfr_ckconv (ms, type, inexact);
12219 result_c = do_mpfr_ckconv (mc, type, inexact);
12220 mpfr_clears (m, ms, mc, NULL);
12221 if (result_s && result_c)
12222 {
12223 /* If we are to return in a complex value do so. */
12224 if (!arg_sinp && !arg_cosp)
12225 return build_complex (build_complex_type (type),
12226 result_c, result_s);
12227
12228 /* Dereference the sin/cos pointer arguments. */
12229 arg_sinp = build_fold_indirect_ref (arg_sinp);
12230 arg_cosp = build_fold_indirect_ref (arg_cosp);
12231 /* Proceed if valid pointer type were passed in. */
12232 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12233 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12234 {
12235 /* Set the values. */
12236 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12237 result_s);
12238 TREE_SIDE_EFFECTS (result_s) = 1;
12239 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12240 result_c);
12241 TREE_SIDE_EFFECTS (result_c) = 1;
12242 /* Combine the assignments into a compound expr. */
12243 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12244 result_s, result_c));
12245 }
12246 }
12247 }
12248 }
12249 return result;
12250 }
12251
12252 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12253 two-argument mpfr order N Bessel function FUNC on them and return
12254 the resulting value as a tree with type TYPE. The mpfr precision
12255 is set to the precision of TYPE. We assume that function FUNC
12256 returns zero if the result could be calculated exactly within the
12257 requested precision. */
12258 static tree
12259 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12260 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12261 const REAL_VALUE_TYPE *min, bool inclusive)
12262 {
12263 tree result = NULL_TREE;
12264
12265 STRIP_NOPS (arg1);
12266 STRIP_NOPS (arg2);
12267
12268 /* To proceed, MPFR must exactly represent the target floating point
12269 format, which only happens when the target base equals two. */
12270 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12271 && tree_fits_shwi_p (arg1)
12272 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12273 {
12274 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12275 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12276
12277 if (n == (long)n
12278 && real_isfinite (ra)
12279 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12280 {
12281 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12282 const int prec = fmt->p;
12283 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12284 int inexact;
12285 mpfr_t m;
12286
12287 mpfr_init2 (m, prec);
12288 mpfr_from_real (m, ra, GMP_RNDN);
12289 mpfr_clear_flags ();
12290 inexact = func (m, n, m, rnd);
12291 result = do_mpfr_ckconv (m, type, inexact);
12292 mpfr_clear (m);
12293 }
12294 }
12295
12296 return result;
12297 }
12298
12299 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12300 the pointer *(ARG_QUO) and return the result. The type is taken
12301 from the type of ARG0 and is used for setting the precision of the
12302 calculation and results. */
12303
12304 static tree
12305 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12306 {
12307 tree const type = TREE_TYPE (arg0);
12308 tree result = NULL_TREE;
12309
12310 STRIP_NOPS (arg0);
12311 STRIP_NOPS (arg1);
12312
12313 /* To proceed, MPFR must exactly represent the target floating point
12314 format, which only happens when the target base equals two. */
12315 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12316 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12317 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12318 {
12319 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12320 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12321
12322 if (real_isfinite (ra0) && real_isfinite (ra1))
12323 {
12324 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12325 const int prec = fmt->p;
12326 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12327 tree result_rem;
12328 long integer_quo;
12329 mpfr_t m0, m1;
12330
12331 mpfr_inits2 (prec, m0, m1, NULL);
12332 mpfr_from_real (m0, ra0, GMP_RNDN);
12333 mpfr_from_real (m1, ra1, GMP_RNDN);
12334 mpfr_clear_flags ();
12335 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12336 /* Remquo is independent of the rounding mode, so pass
12337 inexact=0 to do_mpfr_ckconv(). */
12338 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12339 mpfr_clears (m0, m1, NULL);
12340 if (result_rem)
12341 {
12342 /* MPFR calculates quo in the host's long so it may
12343 return more bits in quo than the target int can hold
12344 if sizeof(host long) > sizeof(target int). This can
12345 happen even for native compilers in LP64 mode. In
12346 these cases, modulo the quo value with the largest
12347 number that the target int can hold while leaving one
12348 bit for the sign. */
12349 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12350 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12351
12352 /* Dereference the quo pointer argument. */
12353 arg_quo = build_fold_indirect_ref (arg_quo);
12354 /* Proceed iff a valid pointer type was passed in. */
12355 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12356 {
12357 /* Set the value. */
12358 tree result_quo
12359 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12360 build_int_cst (TREE_TYPE (arg_quo),
12361 integer_quo));
12362 TREE_SIDE_EFFECTS (result_quo) = 1;
12363 /* Combine the quo assignment with the rem. */
12364 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12365 result_quo, result_rem));
12366 }
12367 }
12368 }
12369 }
12370 return result;
12371 }
12372
12373 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12374 resulting value as a tree with type TYPE. The mpfr precision is
12375 set to the precision of TYPE. We assume that this mpfr function
12376 returns zero if the result could be calculated exactly within the
12377 requested precision. In addition, the integer pointer represented
12378 by ARG_SG will be dereferenced and set to the appropriate signgam
12379 (-1,1) value. */
12380
12381 static tree
12382 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12383 {
12384 tree result = NULL_TREE;
12385
12386 STRIP_NOPS (arg);
12387
12388 /* To proceed, MPFR must exactly represent the target floating point
12389 format, which only happens when the target base equals two. Also
12390 verify ARG is a constant and that ARG_SG is an int pointer. */
12391 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12392 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12393 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12394 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12395 {
12396 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12397
12398 /* In addition to NaN and Inf, the argument cannot be zero or a
12399 negative integer. */
12400 if (real_isfinite (ra)
12401 && ra->cl != rvc_zero
12402 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12403 {
12404 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12405 const int prec = fmt->p;
12406 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12407 int inexact, sg;
12408 mpfr_t m;
12409 tree result_lg;
12410
12411 mpfr_init2 (m, prec);
12412 mpfr_from_real (m, ra, GMP_RNDN);
12413 mpfr_clear_flags ();
12414 inexact = mpfr_lgamma (m, &sg, m, rnd);
12415 result_lg = do_mpfr_ckconv (m, type, inexact);
12416 mpfr_clear (m);
12417 if (result_lg)
12418 {
12419 tree result_sg;
12420
12421 /* Dereference the arg_sg pointer argument. */
12422 arg_sg = build_fold_indirect_ref (arg_sg);
12423 /* Assign the signgam value into *arg_sg. */
12424 result_sg = fold_build2 (MODIFY_EXPR,
12425 TREE_TYPE (arg_sg), arg_sg,
12426 build_int_cst (TREE_TYPE (arg_sg), sg));
12427 TREE_SIDE_EFFECTS (result_sg) = 1;
12428 /* Combine the signgam assignment with the lgamma result. */
12429 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12430 result_sg, result_lg));
12431 }
12432 }
12433 }
12434
12435 return result;
12436 }
12437
12438 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12439 function FUNC on it and return the resulting value as a tree with
12440 type TYPE. The mpfr precision is set to the precision of TYPE. We
12441 assume that function FUNC returns zero if the result could be
12442 calculated exactly within the requested precision. */
12443
12444 static tree
12445 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12446 {
12447 tree result = NULL_TREE;
12448
12449 STRIP_NOPS (arg);
12450
12451 /* To proceed, MPFR must exactly represent the target floating point
12452 format, which only happens when the target base equals two. */
12453 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12454 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12455 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12456 {
12457 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12458 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12459
12460 if (real_isfinite (re) && real_isfinite (im))
12461 {
12462 const struct real_format *const fmt =
12463 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12464 const int prec = fmt->p;
12465 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12466 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12467 int inexact;
12468 mpc_t m;
12469
12470 mpc_init2 (m, prec);
12471 mpfr_from_real (mpc_realref (m), re, rnd);
12472 mpfr_from_real (mpc_imagref (m), im, rnd);
12473 mpfr_clear_flags ();
12474 inexact = func (m, m, crnd);
12475 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12476 mpc_clear (m);
12477 }
12478 }
12479
12480 return result;
12481 }
12482
12483 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12484 mpc function FUNC on it and return the resulting value as a tree
12485 with type TYPE. The mpfr precision is set to the precision of
12486 TYPE. We assume that function FUNC returns zero if the result
12487 could be calculated exactly within the requested precision. If
12488 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12489 in the arguments and/or results. */
12490
12491 tree
12492 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12493 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12494 {
12495 tree result = NULL_TREE;
12496
12497 STRIP_NOPS (arg0);
12498 STRIP_NOPS (arg1);
12499
12500 /* To proceed, MPFR must exactly represent the target floating point
12501 format, which only happens when the target base equals two. */
12502 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12503 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12504 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12505 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12506 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12507 {
12508 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12509 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12510 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12511 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12512
12513 if (do_nonfinite
12514 || (real_isfinite (re0) && real_isfinite (im0)
12515 && real_isfinite (re1) && real_isfinite (im1)))
12516 {
12517 const struct real_format *const fmt =
12518 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12519 const int prec = fmt->p;
12520 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12521 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12522 int inexact;
12523 mpc_t m0, m1;
12524
12525 mpc_init2 (m0, prec);
12526 mpc_init2 (m1, prec);
12527 mpfr_from_real (mpc_realref (m0), re0, rnd);
12528 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12529 mpfr_from_real (mpc_realref (m1), re1, rnd);
12530 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12531 mpfr_clear_flags ();
12532 inexact = func (m0, m0, m1, crnd);
12533 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12534 mpc_clear (m0);
12535 mpc_clear (m1);
12536 }
12537 }
12538
12539 return result;
12540 }
12541
12542 /* A wrapper function for builtin folding that prevents warnings for
12543 "statement without effect" and the like, caused by removing the
12544 call node earlier than the warning is generated. */
12545
12546 tree
12547 fold_call_stmt (gcall *stmt, bool ignore)
12548 {
12549 tree ret = NULL_TREE;
12550 tree fndecl = gimple_call_fndecl (stmt);
12551 location_t loc = gimple_location (stmt);
12552 if (fndecl
12553 && TREE_CODE (fndecl) == FUNCTION_DECL
12554 && DECL_BUILT_IN (fndecl)
12555 && !gimple_call_va_arg_pack_p (stmt))
12556 {
12557 int nargs = gimple_call_num_args (stmt);
12558 tree *args = (nargs > 0
12559 ? gimple_call_arg_ptr (stmt, 0)
12560 : &error_mark_node);
12561
12562 if (avoid_folding_inline_builtin (fndecl))
12563 return NULL_TREE;
12564 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12565 {
12566 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12567 }
12568 else
12569 {
12570 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12571 if (ret)
12572 {
12573 /* Propagate location information from original call to
12574 expansion of builtin. Otherwise things like
12575 maybe_emit_chk_warning, that operate on the expansion
12576 of a builtin, will use the wrong location information. */
12577 if (gimple_has_location (stmt))
12578 {
12579 tree realret = ret;
12580 if (TREE_CODE (ret) == NOP_EXPR)
12581 realret = TREE_OPERAND (ret, 0);
12582 if (CAN_HAVE_LOCATION_P (realret)
12583 && !EXPR_HAS_LOCATION (realret))
12584 SET_EXPR_LOCATION (realret, loc);
12585 return realret;
12586 }
12587 return ret;
12588 }
12589 }
12590 }
12591 return NULL_TREE;
12592 }
12593
12594 /* Look up the function in builtin_decl that corresponds to DECL
12595 and set ASMSPEC as its user assembler name. DECL must be a
12596 function decl that declares a builtin. */
12597
12598 void
12599 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12600 {
12601 tree builtin;
12602 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12603 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12604 && asmspec != 0);
12605
12606 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12607 set_user_assembler_name (builtin, asmspec);
12608 switch (DECL_FUNCTION_CODE (decl))
12609 {
12610 case BUILT_IN_MEMCPY:
12611 init_block_move_fn (asmspec);
12612 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12613 break;
12614 case BUILT_IN_MEMSET:
12615 init_block_clear_fn (asmspec);
12616 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12617 break;
12618 case BUILT_IN_MEMMOVE:
12619 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12620 break;
12621 case BUILT_IN_MEMCMP:
12622 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12623 break;
12624 case BUILT_IN_ABORT:
12625 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12626 break;
12627 case BUILT_IN_FFS:
12628 if (INT_TYPE_SIZE < BITS_PER_WORD)
12629 {
12630 set_user_assembler_libfunc ("ffs", asmspec);
12631 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12632 MODE_INT, 0), "ffs");
12633 }
12634 break;
12635 default:
12636 break;
12637 }
12638 }
12639
12640 /* Return true if DECL is a builtin that expands to a constant or similarly
12641 simple code. */
12642 bool
12643 is_simple_builtin (tree decl)
12644 {
12645 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12646 switch (DECL_FUNCTION_CODE (decl))
12647 {
12648 /* Builtins that expand to constants. */
12649 case BUILT_IN_CONSTANT_P:
12650 case BUILT_IN_EXPECT:
12651 case BUILT_IN_OBJECT_SIZE:
12652 case BUILT_IN_UNREACHABLE:
12653 /* Simple register moves or loads from stack. */
12654 case BUILT_IN_ASSUME_ALIGNED:
12655 case BUILT_IN_RETURN_ADDRESS:
12656 case BUILT_IN_EXTRACT_RETURN_ADDR:
12657 case BUILT_IN_FROB_RETURN_ADDR:
12658 case BUILT_IN_RETURN:
12659 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12660 case BUILT_IN_FRAME_ADDRESS:
12661 case BUILT_IN_VA_END:
12662 case BUILT_IN_STACK_SAVE:
12663 case BUILT_IN_STACK_RESTORE:
12664 /* Exception state returns or moves registers around. */
12665 case BUILT_IN_EH_FILTER:
12666 case BUILT_IN_EH_POINTER:
12667 case BUILT_IN_EH_COPY_VALUES:
12668 return true;
12669
12670 default:
12671 return false;
12672 }
12673
12674 return false;
12675 }
12676
12677 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12678 most probably expanded inline into reasonably simple code. This is a
12679 superset of is_simple_builtin. */
12680 bool
12681 is_inexpensive_builtin (tree decl)
12682 {
12683 if (!decl)
12684 return false;
12685 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12686 return true;
12687 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12688 switch (DECL_FUNCTION_CODE (decl))
12689 {
12690 case BUILT_IN_ABS:
12691 case BUILT_IN_ALLOCA:
12692 case BUILT_IN_ALLOCA_WITH_ALIGN:
12693 case BUILT_IN_BSWAP16:
12694 case BUILT_IN_BSWAP32:
12695 case BUILT_IN_BSWAP64:
12696 case BUILT_IN_CLZ:
12697 case BUILT_IN_CLZIMAX:
12698 case BUILT_IN_CLZL:
12699 case BUILT_IN_CLZLL:
12700 case BUILT_IN_CTZ:
12701 case BUILT_IN_CTZIMAX:
12702 case BUILT_IN_CTZL:
12703 case BUILT_IN_CTZLL:
12704 case BUILT_IN_FFS:
12705 case BUILT_IN_FFSIMAX:
12706 case BUILT_IN_FFSL:
12707 case BUILT_IN_FFSLL:
12708 case BUILT_IN_IMAXABS:
12709 case BUILT_IN_FINITE:
12710 case BUILT_IN_FINITEF:
12711 case BUILT_IN_FINITEL:
12712 case BUILT_IN_FINITED32:
12713 case BUILT_IN_FINITED64:
12714 case BUILT_IN_FINITED128:
12715 case BUILT_IN_FPCLASSIFY:
12716 case BUILT_IN_ISFINITE:
12717 case BUILT_IN_ISINF_SIGN:
12718 case BUILT_IN_ISINF:
12719 case BUILT_IN_ISINFF:
12720 case BUILT_IN_ISINFL:
12721 case BUILT_IN_ISINFD32:
12722 case BUILT_IN_ISINFD64:
12723 case BUILT_IN_ISINFD128:
12724 case BUILT_IN_ISNAN:
12725 case BUILT_IN_ISNANF:
12726 case BUILT_IN_ISNANL:
12727 case BUILT_IN_ISNAND32:
12728 case BUILT_IN_ISNAND64:
12729 case BUILT_IN_ISNAND128:
12730 case BUILT_IN_ISNORMAL:
12731 case BUILT_IN_ISGREATER:
12732 case BUILT_IN_ISGREATEREQUAL:
12733 case BUILT_IN_ISLESS:
12734 case BUILT_IN_ISLESSEQUAL:
12735 case BUILT_IN_ISLESSGREATER:
12736 case BUILT_IN_ISUNORDERED:
12737 case BUILT_IN_VA_ARG_PACK:
12738 case BUILT_IN_VA_ARG_PACK_LEN:
12739 case BUILT_IN_VA_COPY:
12740 case BUILT_IN_TRAP:
12741 case BUILT_IN_SAVEREGS:
12742 case BUILT_IN_POPCOUNTL:
12743 case BUILT_IN_POPCOUNTLL:
12744 case BUILT_IN_POPCOUNTIMAX:
12745 case BUILT_IN_POPCOUNT:
12746 case BUILT_IN_PARITYL:
12747 case BUILT_IN_PARITYLL:
12748 case BUILT_IN_PARITYIMAX:
12749 case BUILT_IN_PARITY:
12750 case BUILT_IN_LABS:
12751 case BUILT_IN_LLABS:
12752 case BUILT_IN_PREFETCH:
12753 return true;
12754
12755 default:
12756 return is_simple_builtin (decl);
12757 }
12758
12759 return false;
12760 }