2014-11-01 Andrew MacLeod <amacleod@redhat,com>
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "tree-object-size.h"
32 #include "realmpfr.h"
33 #include "predict.h"
34 #include "vec.h"
35 #include "hashtab.h"
36 #include "hash-set.h"
37 #include "hard-reg-set.h"
38 #include "input.h"
39 #include "function.h"
40 #include "cfgrtl.h"
41 #include "basic-block.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-expr.h"
45 #include "is-a.h"
46 #include "gimple.h"
47 #include "flags.h"
48 #include "regs.h"
49 #include "except.h"
50 #include "insn-config.h"
51 #include "expr.h"
52 #include "insn-codes.h"
53 #include "optabs.h"
54 #include "libfuncs.h"
55 #include "recog.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "tm_p.h"
59 #include "target.h"
60 #include "langhooks.h"
61 #include "tree-ssanames.h"
62 #include "tree-dfa.h"
63 #include "value-prof.h"
64 #include "diagnostic-core.h"
65 #include "builtins.h"
66 #include "asan.h"
67 #include "ubsan.h"
68 #include "cilk.h"
69
70
71 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
72
73 struct target_builtins default_target_builtins;
74 #if SWITCHABLE_TARGET
75 struct target_builtins *this_target_builtins = &default_target_builtins;
76 #endif
77
78 /* Define the names of the builtin function types and codes. */
79 const char *const built_in_class_names[BUILT_IN_LAST]
80 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
81
82 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
83 const char * built_in_names[(int) END_BUILTINS] =
84 {
85 #include "builtins.def"
86 };
87 #undef DEF_BUILTIN
88
89 /* Setup an array of _DECL trees, make sure each element is
90 initialized to NULL_TREE. */
91 builtin_info_type builtin_info;
92
93 /* Non-zero if __builtin_constant_p should be folded right away. */
94 bool force_folding_builtin_constant_p;
95
96 static rtx c_readstr (const char *, machine_mode);
97 static int target_char_cast (tree, char *);
98 static rtx get_memory_rtx (tree, tree);
99 static int apply_args_size (void);
100 static int apply_result_size (void);
101 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
102 static rtx result_vector (int, rtx);
103 #endif
104 static void expand_builtin_update_setjmp_buf (rtx);
105 static void expand_builtin_prefetch (tree);
106 static rtx expand_builtin_apply_args (void);
107 static rtx expand_builtin_apply_args_1 (void);
108 static rtx expand_builtin_apply (rtx, rtx, rtx);
109 static void expand_builtin_return (rtx);
110 static enum type_class type_to_class (tree);
111 static rtx expand_builtin_classify_type (tree);
112 static void expand_errno_check (tree, rtx);
113 static rtx expand_builtin_mathfn (tree, rtx, rtx);
114 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
115 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
116 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
117 static rtx expand_builtin_interclass_mathfn (tree, rtx);
118 static rtx expand_builtin_sincos (tree);
119 static rtx expand_builtin_cexpi (tree, rtx);
120 static rtx expand_builtin_int_roundingfn (tree, rtx);
121 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
122 static rtx expand_builtin_next_arg (void);
123 static rtx expand_builtin_va_start (tree);
124 static rtx expand_builtin_va_end (tree);
125 static rtx expand_builtin_va_copy (tree);
126 static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
127 static rtx expand_builtin_strcmp (tree, rtx);
128 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
129 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
130 static rtx expand_builtin_memcpy (tree, rtx);
131 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
132 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
133 machine_mode, int);
134 static rtx expand_builtin_strcpy (tree, rtx);
135 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
136 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
137 static rtx expand_builtin_strncpy (tree, rtx);
138 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
139 static rtx expand_builtin_memset (tree, rtx, machine_mode);
140 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
141 static rtx expand_builtin_bzero (tree);
142 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
143 static rtx expand_builtin_alloca (tree, bool);
144 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
145 static rtx expand_builtin_frame_address (tree, tree);
146 static tree stabilize_va_list_loc (location_t, tree, int);
147 static rtx expand_builtin_expect (tree, rtx);
148 static tree fold_builtin_constant_p (tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (location_t, tree, tree);
151 static tree fold_builtin_inf (location_t, tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
157 static rtx expand_builtin_fabs (tree, rtx, rtx);
158 static rtx expand_builtin_signbit (tree, rtx);
159 static tree fold_builtin_sqrt (location_t, tree, tree);
160 static tree fold_builtin_cbrt (location_t, tree, tree);
161 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
163 static tree fold_builtin_cos (location_t, tree, tree, tree);
164 static tree fold_builtin_cosh (location_t, tree, tree, tree);
165 static tree fold_builtin_tan (tree, tree);
166 static tree fold_builtin_trunc (location_t, tree, tree);
167 static tree fold_builtin_floor (location_t, tree, tree);
168 static tree fold_builtin_ceil (location_t, tree, tree);
169 static tree fold_builtin_round (location_t, tree, tree);
170 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
171 static tree fold_builtin_bitop (tree, tree);
172 static tree fold_builtin_strchr (location_t, tree, tree, tree);
173 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
174 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
175 static tree fold_builtin_strcmp (location_t, tree, tree);
176 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
177 static tree fold_builtin_signbit (location_t, tree, tree);
178 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
179 static tree fold_builtin_isascii (location_t, tree);
180 static tree fold_builtin_toascii (location_t, tree);
181 static tree fold_builtin_isdigit (location_t, tree);
182 static tree fold_builtin_fabs (location_t, tree, tree);
183 static tree fold_builtin_abs (location_t, tree, tree);
184 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
185 enum tree_code);
186 static tree fold_builtin_0 (location_t, tree, bool);
187 static tree fold_builtin_1 (location_t, tree, tree, bool);
188 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
189 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
190 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
191 static tree fold_builtin_varargs (location_t, tree, tree, bool);
192
193 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
194 static tree fold_builtin_strstr (location_t, tree, tree, tree);
195 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
196 static tree fold_builtin_strncat (location_t, tree, tree, tree);
197 static tree fold_builtin_strspn (location_t, tree, tree);
198 static tree fold_builtin_strcspn (location_t, tree, tree);
199
200 static rtx expand_builtin_object_size (tree);
201 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
202 enum built_in_function);
203 static void maybe_emit_chk_warning (tree, enum built_in_function);
204 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
205 static void maybe_emit_free_warning (tree);
206 static tree fold_builtin_object_size (tree, tree);
207 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
208 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
209 enum built_in_function);
210
211 static unsigned HOST_WIDE_INT target_newline;
212 unsigned HOST_WIDE_INT target_percent;
213 static unsigned HOST_WIDE_INT target_c;
214 static unsigned HOST_WIDE_INT target_s;
215 static char target_percent_c[3];
216 char target_percent_s[3];
217 static char target_percent_s_newline[4];
218 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
219 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
220 static tree do_mpfr_arg2 (tree, tree, tree,
221 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
222 static tree do_mpfr_arg3 (tree, tree, tree, tree,
223 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
224 static tree do_mpfr_sincos (tree, tree, tree);
225 static tree do_mpfr_bessel_n (tree, tree, tree,
226 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_remquo (tree, tree, tree);
229 static tree do_mpfr_lgamma_r (tree, tree, tree);
230 static void expand_builtin_sync_synchronize (void);
231
232 /* Return true if NAME starts with __builtin_ or __sync_. */
233
234 static bool
235 is_builtin_name (const char *name)
236 {
237 if (strncmp (name, "__builtin_", 10) == 0)
238 return true;
239 if (strncmp (name, "__sync_", 7) == 0)
240 return true;
241 if (strncmp (name, "__atomic_", 9) == 0)
242 return true;
243 if (flag_cilkplus
244 && (!strcmp (name, "__cilkrts_detach")
245 || !strcmp (name, "__cilkrts_pop_frame")))
246 return true;
247 return false;
248 }
249
250
251 /* Return true if DECL is a function symbol representing a built-in. */
252
253 bool
254 is_builtin_fn (tree decl)
255 {
256 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
257 }
258
259 /* Return true if NODE should be considered for inline expansion regardless
260 of the optimization level. This means whenever a function is invoked with
261 its "internal" name, which normally contains the prefix "__builtin". */
262
263 static bool
264 called_as_built_in (tree node)
265 {
266 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
267 we want the name used to call the function, not the name it
268 will have. */
269 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
270 return is_builtin_name (name);
271 }
272
273 /* Compute values M and N such that M divides (address of EXP - N) and such
274 that N < M. If these numbers can be determined, store M in alignp and N in
275 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
276 *alignp and any bit-offset to *bitposp.
277
278 Note that the address (and thus the alignment) computed here is based
279 on the address to which a symbol resolves, whereas DECL_ALIGN is based
280 on the address at which an object is actually located. These two
281 addresses are not always the same. For example, on ARM targets,
282 the address &foo of a Thumb function foo() has the lowest bit set,
283 whereas foo() itself starts on an even address.
284
285 If ADDR_P is true we are taking the address of the memory reference EXP
286 and thus cannot rely on the access taking place. */
287
288 static bool
289 get_object_alignment_2 (tree exp, unsigned int *alignp,
290 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
291 {
292 HOST_WIDE_INT bitsize, bitpos;
293 tree offset;
294 machine_mode mode;
295 int unsignedp, volatilep;
296 unsigned int align = BITS_PER_UNIT;
297 bool known_alignment = false;
298
299 /* Get the innermost object and the constant (bitpos) and possibly
300 variable (offset) offset of the access. */
301 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
302 &mode, &unsignedp, &volatilep, true);
303
304 /* Extract alignment information from the innermost object and
305 possibly adjust bitpos and offset. */
306 if (TREE_CODE (exp) == FUNCTION_DECL)
307 {
308 /* Function addresses can encode extra information besides their
309 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
310 allows the low bit to be used as a virtual bit, we know
311 that the address itself must be at least 2-byte aligned. */
312 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
313 align = 2 * BITS_PER_UNIT;
314 }
315 else if (TREE_CODE (exp) == LABEL_DECL)
316 ;
317 else if (TREE_CODE (exp) == CONST_DECL)
318 {
319 /* The alignment of a CONST_DECL is determined by its initializer. */
320 exp = DECL_INITIAL (exp);
321 align = TYPE_ALIGN (TREE_TYPE (exp));
322 #ifdef CONSTANT_ALIGNMENT
323 if (CONSTANT_CLASS_P (exp))
324 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
325 #endif
326 known_alignment = true;
327 }
328 else if (DECL_P (exp))
329 {
330 align = DECL_ALIGN (exp);
331 known_alignment = true;
332 }
333 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
334 {
335 align = TYPE_ALIGN (TREE_TYPE (exp));
336 }
337 else if (TREE_CODE (exp) == INDIRECT_REF
338 || TREE_CODE (exp) == MEM_REF
339 || TREE_CODE (exp) == TARGET_MEM_REF)
340 {
341 tree addr = TREE_OPERAND (exp, 0);
342 unsigned ptr_align;
343 unsigned HOST_WIDE_INT ptr_bitpos;
344
345 if (TREE_CODE (addr) == BIT_AND_EXPR
346 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
347 {
348 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
349 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
350 align *= BITS_PER_UNIT;
351 addr = TREE_OPERAND (addr, 0);
352 }
353
354 known_alignment
355 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
356 align = MAX (ptr_align, align);
357
358 /* The alignment of the pointer operand in a TARGET_MEM_REF
359 has to take the variable offset parts into account. */
360 if (TREE_CODE (exp) == TARGET_MEM_REF)
361 {
362 if (TMR_INDEX (exp))
363 {
364 unsigned HOST_WIDE_INT step = 1;
365 if (TMR_STEP (exp))
366 step = TREE_INT_CST_LOW (TMR_STEP (exp));
367 align = MIN (align, (step & -step) * BITS_PER_UNIT);
368 }
369 if (TMR_INDEX2 (exp))
370 align = BITS_PER_UNIT;
371 known_alignment = false;
372 }
373
374 /* When EXP is an actual memory reference then we can use
375 TYPE_ALIGN of a pointer indirection to derive alignment.
376 Do so only if get_pointer_alignment_1 did not reveal absolute
377 alignment knowledge and if using that alignment would
378 improve the situation. */
379 if (!addr_p && !known_alignment
380 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
381 align = TYPE_ALIGN (TREE_TYPE (exp));
382 else
383 {
384 /* Else adjust bitpos accordingly. */
385 bitpos += ptr_bitpos;
386 if (TREE_CODE (exp) == MEM_REF
387 || TREE_CODE (exp) == TARGET_MEM_REF)
388 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
389 }
390 }
391 else if (TREE_CODE (exp) == STRING_CST)
392 {
393 /* STRING_CST are the only constant objects we allow to be not
394 wrapped inside a CONST_DECL. */
395 align = TYPE_ALIGN (TREE_TYPE (exp));
396 #ifdef CONSTANT_ALIGNMENT
397 if (CONSTANT_CLASS_P (exp))
398 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
399 #endif
400 known_alignment = true;
401 }
402
403 /* If there is a non-constant offset part extract the maximum
404 alignment that can prevail. */
405 if (offset)
406 {
407 unsigned int trailing_zeros = tree_ctz (offset);
408 if (trailing_zeros < HOST_BITS_PER_INT)
409 {
410 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
411 if (inner)
412 align = MIN (align, inner);
413 }
414 }
415
416 *alignp = align;
417 *bitposp = bitpos & (*alignp - 1);
418 return known_alignment;
419 }
420
421 /* For a memory reference expression EXP compute values M and N such that M
422 divides (&EXP - N) and such that N < M. If these numbers can be determined,
423 store M in alignp and N in *BITPOSP and return true. Otherwise return false
424 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
425
426 bool
427 get_object_alignment_1 (tree exp, unsigned int *alignp,
428 unsigned HOST_WIDE_INT *bitposp)
429 {
430 return get_object_alignment_2 (exp, alignp, bitposp, false);
431 }
432
433 /* Return the alignment in bits of EXP, an object. */
434
435 unsigned int
436 get_object_alignment (tree exp)
437 {
438 unsigned HOST_WIDE_INT bitpos = 0;
439 unsigned int align;
440
441 get_object_alignment_1 (exp, &align, &bitpos);
442
443 /* align and bitpos now specify known low bits of the pointer.
444 ptr & (align - 1) == bitpos. */
445
446 if (bitpos != 0)
447 align = (bitpos & -bitpos);
448 return align;
449 }
450
451 /* For a pointer valued expression EXP compute values M and N such that M
452 divides (EXP - N) and such that N < M. If these numbers can be determined,
453 store M in alignp and N in *BITPOSP and return true. Return false if
454 the results are just a conservative approximation.
455
456 If EXP is not a pointer, false is returned too. */
457
458 bool
459 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
460 unsigned HOST_WIDE_INT *bitposp)
461 {
462 STRIP_NOPS (exp);
463
464 if (TREE_CODE (exp) == ADDR_EXPR)
465 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
466 alignp, bitposp, true);
467 else if (TREE_CODE (exp) == SSA_NAME
468 && POINTER_TYPE_P (TREE_TYPE (exp)))
469 {
470 unsigned int ptr_align, ptr_misalign;
471 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
472
473 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
474 {
475 *bitposp = ptr_misalign * BITS_PER_UNIT;
476 *alignp = ptr_align * BITS_PER_UNIT;
477 /* We cannot really tell whether this result is an approximation. */
478 return true;
479 }
480 else
481 {
482 *bitposp = 0;
483 *alignp = BITS_PER_UNIT;
484 return false;
485 }
486 }
487 else if (TREE_CODE (exp) == INTEGER_CST)
488 {
489 *alignp = BIGGEST_ALIGNMENT;
490 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
491 & (BIGGEST_ALIGNMENT - 1));
492 return true;
493 }
494
495 *bitposp = 0;
496 *alignp = BITS_PER_UNIT;
497 return false;
498 }
499
500 /* Return the alignment in bits of EXP, a pointer valued expression.
501 The alignment returned is, by default, the alignment of the thing that
502 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
503
504 Otherwise, look at the expression to see if we can do better, i.e., if the
505 expression is actually pointing at an object whose alignment is tighter. */
506
507 unsigned int
508 get_pointer_alignment (tree exp)
509 {
510 unsigned HOST_WIDE_INT bitpos = 0;
511 unsigned int align;
512
513 get_pointer_alignment_1 (exp, &align, &bitpos);
514
515 /* align and bitpos now specify known low bits of the pointer.
516 ptr & (align - 1) == bitpos. */
517
518 if (bitpos != 0)
519 align = (bitpos & -bitpos);
520
521 return align;
522 }
523
524 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
525 way, because it could contain a zero byte in the middle.
526 TREE_STRING_LENGTH is the size of the character array, not the string.
527
528 ONLY_VALUE should be nonzero if the result is not going to be emitted
529 into the instruction stream and zero if it is going to be expanded.
530 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
531 is returned, otherwise NULL, since
532 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
533 evaluate the side-effects.
534
535 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
536 accesses. Note that this implies the result is not going to be emitted
537 into the instruction stream.
538
539 The value returned is of type `ssizetype'.
540
541 Unfortunately, string_constant can't access the values of const char
542 arrays with initializers, so neither can we do so here. */
543
544 tree
545 c_strlen (tree src, int only_value)
546 {
547 tree offset_node;
548 HOST_WIDE_INT offset;
549 int max;
550 const char *ptr;
551 location_t loc;
552
553 STRIP_NOPS (src);
554 if (TREE_CODE (src) == COND_EXPR
555 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
556 {
557 tree len1, len2;
558
559 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
560 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
561 if (tree_int_cst_equal (len1, len2))
562 return len1;
563 }
564
565 if (TREE_CODE (src) == COMPOUND_EXPR
566 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
567 return c_strlen (TREE_OPERAND (src, 1), only_value);
568
569 loc = EXPR_LOC_OR_LOC (src, input_location);
570
571 src = string_constant (src, &offset_node);
572 if (src == 0)
573 return NULL_TREE;
574
575 max = TREE_STRING_LENGTH (src) - 1;
576 ptr = TREE_STRING_POINTER (src);
577
578 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
579 {
580 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
581 compute the offset to the following null if we don't know where to
582 start searching for it. */
583 int i;
584
585 for (i = 0; i < max; i++)
586 if (ptr[i] == 0)
587 return NULL_TREE;
588
589 /* We don't know the starting offset, but we do know that the string
590 has no internal zero bytes. We can assume that the offset falls
591 within the bounds of the string; otherwise, the programmer deserves
592 what he gets. Subtract the offset from the length of the string,
593 and return that. This would perhaps not be valid if we were dealing
594 with named arrays in addition to literal string constants. */
595
596 return size_diffop_loc (loc, size_int (max), offset_node);
597 }
598
599 /* We have a known offset into the string. Start searching there for
600 a null character if we can represent it as a single HOST_WIDE_INT. */
601 if (offset_node == 0)
602 offset = 0;
603 else if (! tree_fits_shwi_p (offset_node))
604 offset = -1;
605 else
606 offset = tree_to_shwi (offset_node);
607
608 /* If the offset is known to be out of bounds, warn, and call strlen at
609 runtime. */
610 if (offset < 0 || offset > max)
611 {
612 /* Suppress multiple warnings for propagated constant strings. */
613 if (only_value != 2
614 && !TREE_NO_WARNING (src))
615 {
616 warning_at (loc, 0, "offset outside bounds of constant string");
617 TREE_NO_WARNING (src) = 1;
618 }
619 return NULL_TREE;
620 }
621
622 /* Use strlen to search for the first zero byte. Since any strings
623 constructed with build_string will have nulls appended, we win even
624 if we get handed something like (char[4])"abcd".
625
626 Since OFFSET is our starting index into the string, no further
627 calculation is needed. */
628 return ssize_int (strlen (ptr + offset));
629 }
630
631 /* Return a char pointer for a C string if it is a string constant
632 or sum of string constant and integer constant. */
633
634 const char *
635 c_getstr (tree src)
636 {
637 tree offset_node;
638
639 src = string_constant (src, &offset_node);
640 if (src == 0)
641 return 0;
642
643 if (offset_node == 0)
644 return TREE_STRING_POINTER (src);
645 else if (!tree_fits_uhwi_p (offset_node)
646 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
647 return 0;
648
649 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
650 }
651
652 /* Return a constant integer corresponding to target reading
653 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
654
655 static rtx
656 c_readstr (const char *str, machine_mode mode)
657 {
658 HOST_WIDE_INT ch;
659 unsigned int i, j;
660 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
661
662 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
663 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
664 / HOST_BITS_PER_WIDE_INT;
665
666 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
667 for (i = 0; i < len; i++)
668 tmp[i] = 0;
669
670 ch = 1;
671 for (i = 0; i < GET_MODE_SIZE (mode); i++)
672 {
673 j = i;
674 if (WORDS_BIG_ENDIAN)
675 j = GET_MODE_SIZE (mode) - i - 1;
676 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
677 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
678 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
679 j *= BITS_PER_UNIT;
680
681 if (ch)
682 ch = (unsigned char) str[i];
683 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
684 }
685
686 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
687 return immed_wide_int_const (c, mode);
688 }
689
690 /* Cast a target constant CST to target CHAR and if that value fits into
691 host char type, return zero and put that value into variable pointed to by
692 P. */
693
694 static int
695 target_char_cast (tree cst, char *p)
696 {
697 unsigned HOST_WIDE_INT val, hostval;
698
699 if (TREE_CODE (cst) != INTEGER_CST
700 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
701 return 1;
702
703 /* Do not care if it fits or not right here. */
704 val = TREE_INT_CST_LOW (cst);
705
706 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
707 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
708
709 hostval = val;
710 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
711 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
712
713 if (val != hostval)
714 return 1;
715
716 *p = hostval;
717 return 0;
718 }
719
720 /* Similar to save_expr, but assumes that arbitrary code is not executed
721 in between the multiple evaluations. In particular, we assume that a
722 non-addressable local variable will not be modified. */
723
724 static tree
725 builtin_save_expr (tree exp)
726 {
727 if (TREE_CODE (exp) == SSA_NAME
728 || (TREE_ADDRESSABLE (exp) == 0
729 && (TREE_CODE (exp) == PARM_DECL
730 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
731 return exp;
732
733 return save_expr (exp);
734 }
735
736 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
737 times to get the address of either a higher stack frame, or a return
738 address located within it (depending on FNDECL_CODE). */
739
740 static rtx
741 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
742 {
743 int i;
744
745 #ifdef INITIAL_FRAME_ADDRESS_RTX
746 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
747 #else
748 rtx tem;
749
750 /* For a zero count with __builtin_return_address, we don't care what
751 frame address we return, because target-specific definitions will
752 override us. Therefore frame pointer elimination is OK, and using
753 the soft frame pointer is OK.
754
755 For a nonzero count, or a zero count with __builtin_frame_address,
756 we require a stable offset from the current frame pointer to the
757 previous one, so we must use the hard frame pointer, and
758 we must disable frame pointer elimination. */
759 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
760 tem = frame_pointer_rtx;
761 else
762 {
763 tem = hard_frame_pointer_rtx;
764
765 /* Tell reload not to eliminate the frame pointer. */
766 crtl->accesses_prior_frames = 1;
767 }
768 #endif
769
770 /* Some machines need special handling before we can access
771 arbitrary frames. For example, on the SPARC, we must first flush
772 all register windows to the stack. */
773 #ifdef SETUP_FRAME_ADDRESSES
774 if (count > 0)
775 SETUP_FRAME_ADDRESSES ();
776 #endif
777
778 /* On the SPARC, the return address is not in the frame, it is in a
779 register. There is no way to access it off of the current frame
780 pointer, but it can be accessed off the previous frame pointer by
781 reading the value from the register window save area. */
782 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
783 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
784 count--;
785 #endif
786
787 /* Scan back COUNT frames to the specified frame. */
788 for (i = 0; i < count; i++)
789 {
790 /* Assume the dynamic chain pointer is in the word that the
791 frame address points to, unless otherwise specified. */
792 #ifdef DYNAMIC_CHAIN_ADDRESS
793 tem = DYNAMIC_CHAIN_ADDRESS (tem);
794 #endif
795 tem = memory_address (Pmode, tem);
796 tem = gen_frame_mem (Pmode, tem);
797 tem = copy_to_reg (tem);
798 }
799
800 /* For __builtin_frame_address, return what we've got. But, on
801 the SPARC for example, we may have to add a bias. */
802 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
803 #ifdef FRAME_ADDR_RTX
804 return FRAME_ADDR_RTX (tem);
805 #else
806 return tem;
807 #endif
808
809 /* For __builtin_return_address, get the return address from that frame. */
810 #ifdef RETURN_ADDR_RTX
811 tem = RETURN_ADDR_RTX (count, tem);
812 #else
813 tem = memory_address (Pmode,
814 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
815 tem = gen_frame_mem (Pmode, tem);
816 #endif
817 return tem;
818 }
819
820 /* Alias set used for setjmp buffer. */
821 static alias_set_type setjmp_alias_set = -1;
822
823 /* Construct the leading half of a __builtin_setjmp call. Control will
824 return to RECEIVER_LABEL. This is also called directly by the SJLJ
825 exception handling code. */
826
827 void
828 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
829 {
830 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
831 rtx stack_save;
832 rtx mem;
833
834 if (setjmp_alias_set == -1)
835 setjmp_alias_set = new_alias_set ();
836
837 buf_addr = convert_memory_address (Pmode, buf_addr);
838
839 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
840
841 /* We store the frame pointer and the address of receiver_label in
842 the buffer and use the rest of it for the stack save area, which
843 is machine-dependent. */
844
845 mem = gen_rtx_MEM (Pmode, buf_addr);
846 set_mem_alias_set (mem, setjmp_alias_set);
847 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
848
849 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
850 GET_MODE_SIZE (Pmode))),
851 set_mem_alias_set (mem, setjmp_alias_set);
852
853 emit_move_insn (validize_mem (mem),
854 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
855
856 stack_save = gen_rtx_MEM (sa_mode,
857 plus_constant (Pmode, buf_addr,
858 2 * GET_MODE_SIZE (Pmode)));
859 set_mem_alias_set (stack_save, setjmp_alias_set);
860 emit_stack_save (SAVE_NONLOCAL, &stack_save);
861
862 /* If there is further processing to do, do it. */
863 #ifdef HAVE_builtin_setjmp_setup
864 if (HAVE_builtin_setjmp_setup)
865 emit_insn (gen_builtin_setjmp_setup (buf_addr));
866 #endif
867
868 /* We have a nonlocal label. */
869 cfun->has_nonlocal_label = 1;
870 }
871
872 /* Construct the trailing part of a __builtin_setjmp call. This is
873 also called directly by the SJLJ exception handling code.
874 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
875
876 void
877 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
878 {
879 rtx chain;
880
881 /* Mark the FP as used when we get here, so we have to make sure it's
882 marked as used by this function. */
883 emit_use (hard_frame_pointer_rtx);
884
885 /* Mark the static chain as clobbered here so life information
886 doesn't get messed up for it. */
887 chain = targetm.calls.static_chain (current_function_decl, true);
888 if (chain && REG_P (chain))
889 emit_clobber (chain);
890
891 /* Now put in the code to restore the frame pointer, and argument
892 pointer, if needed. */
893 #ifdef HAVE_nonlocal_goto
894 if (! HAVE_nonlocal_goto)
895 #endif
896 {
897 /* First adjust our frame pointer to its actual value. It was
898 previously set to the start of the virtual area corresponding to
899 the stacked variables when we branched here and now needs to be
900 adjusted to the actual hardware fp value.
901
902 Assignments to virtual registers are converted by
903 instantiate_virtual_regs into the corresponding assignment
904 to the underlying register (fp in this case) that makes
905 the original assignment true.
906 So the following insn will actually be decrementing fp by
907 STARTING_FRAME_OFFSET. */
908 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
909
910 /* Restoring the frame pointer also modifies the hard frame pointer.
911 Mark it used (so that the previous assignment remains live once
912 the frame pointer is eliminated) and clobbered (to represent the
913 implicit update from the assignment). */
914 emit_use (hard_frame_pointer_rtx);
915 emit_clobber (hard_frame_pointer_rtx);
916 }
917
918 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
919 if (fixed_regs[ARG_POINTER_REGNUM])
920 {
921 #ifdef ELIMINABLE_REGS
922 /* If the argument pointer can be eliminated in favor of the
923 frame pointer, we don't need to restore it. We assume here
924 that if such an elimination is present, it can always be used.
925 This is the case on all known machines; if we don't make this
926 assumption, we do unnecessary saving on many machines. */
927 size_t i;
928 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
929
930 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
931 if (elim_regs[i].from == ARG_POINTER_REGNUM
932 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
933 break;
934
935 if (i == ARRAY_SIZE (elim_regs))
936 #endif
937 {
938 /* Now restore our arg pointer from the address at which it
939 was saved in our stack frame. */
940 emit_move_insn (crtl->args.internal_arg_pointer,
941 copy_to_reg (get_arg_pointer_save_area ()));
942 }
943 }
944 #endif
945
946 #ifdef HAVE_builtin_setjmp_receiver
947 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
948 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
949 else
950 #endif
951 #ifdef HAVE_nonlocal_goto_receiver
952 if (HAVE_nonlocal_goto_receiver)
953 emit_insn (gen_nonlocal_goto_receiver ());
954 else
955 #endif
956 { /* Nothing */ }
957
958 /* We must not allow the code we just generated to be reordered by
959 scheduling. Specifically, the update of the frame pointer must
960 happen immediately, not later. */
961 emit_insn (gen_blockage ());
962 }
963
964 /* __builtin_longjmp is passed a pointer to an array of five words (not
965 all will be used on all machines). It operates similarly to the C
966 library function of the same name, but is more efficient. Much of
967 the code below is copied from the handling of non-local gotos. */
968
969 static void
970 expand_builtin_longjmp (rtx buf_addr, rtx value)
971 {
972 rtx fp, lab, stack;
973 rtx_insn *insn, *last;
974 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
975
976 /* DRAP is needed for stack realign if longjmp is expanded to current
977 function */
978 if (SUPPORTS_STACK_ALIGNMENT)
979 crtl->need_drap = true;
980
981 if (setjmp_alias_set == -1)
982 setjmp_alias_set = new_alias_set ();
983
984 buf_addr = convert_memory_address (Pmode, buf_addr);
985
986 buf_addr = force_reg (Pmode, buf_addr);
987
988 /* We require that the user must pass a second argument of 1, because
989 that is what builtin_setjmp will return. */
990 gcc_assert (value == const1_rtx);
991
992 last = get_last_insn ();
993 #ifdef HAVE_builtin_longjmp
994 if (HAVE_builtin_longjmp)
995 emit_insn (gen_builtin_longjmp (buf_addr));
996 else
997 #endif
998 {
999 fp = gen_rtx_MEM (Pmode, buf_addr);
1000 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1001 GET_MODE_SIZE (Pmode)));
1002
1003 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1004 2 * GET_MODE_SIZE (Pmode)));
1005 set_mem_alias_set (fp, setjmp_alias_set);
1006 set_mem_alias_set (lab, setjmp_alias_set);
1007 set_mem_alias_set (stack, setjmp_alias_set);
1008
1009 /* Pick up FP, label, and SP from the block and jump. This code is
1010 from expand_goto in stmt.c; see there for detailed comments. */
1011 #ifdef HAVE_nonlocal_goto
1012 if (HAVE_nonlocal_goto)
1013 /* We have to pass a value to the nonlocal_goto pattern that will
1014 get copied into the static_chain pointer, but it does not matter
1015 what that value is, because builtin_setjmp does not use it. */
1016 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1017 else
1018 #endif
1019 {
1020 lab = copy_to_reg (lab);
1021
1022 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1023 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1024
1025 emit_move_insn (hard_frame_pointer_rtx, fp);
1026 emit_stack_restore (SAVE_NONLOCAL, stack);
1027
1028 emit_use (hard_frame_pointer_rtx);
1029 emit_use (stack_pointer_rtx);
1030 emit_indirect_jump (lab);
1031 }
1032 }
1033
1034 /* Search backwards and mark the jump insn as a non-local goto.
1035 Note that this precludes the use of __builtin_longjmp to a
1036 __builtin_setjmp target in the same function. However, we've
1037 already cautioned the user that these functions are for
1038 internal exception handling use only. */
1039 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1040 {
1041 gcc_assert (insn != last);
1042
1043 if (JUMP_P (insn))
1044 {
1045 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1046 break;
1047 }
1048 else if (CALL_P (insn))
1049 break;
1050 }
1051 }
1052
1053 static inline bool
1054 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1055 {
1056 return (iter->i < iter->n);
1057 }
1058
1059 /* This function validates the types of a function call argument list
1060 against a specified list of tree_codes. If the last specifier is a 0,
1061 that represents an ellipses, otherwise the last specifier must be a
1062 VOID_TYPE. */
1063
1064 static bool
1065 validate_arglist (const_tree callexpr, ...)
1066 {
1067 enum tree_code code;
1068 bool res = 0;
1069 va_list ap;
1070 const_call_expr_arg_iterator iter;
1071 const_tree arg;
1072
1073 va_start (ap, callexpr);
1074 init_const_call_expr_arg_iterator (callexpr, &iter);
1075
1076 do
1077 {
1078 code = (enum tree_code) va_arg (ap, int);
1079 switch (code)
1080 {
1081 case 0:
1082 /* This signifies an ellipses, any further arguments are all ok. */
1083 res = true;
1084 goto end;
1085 case VOID_TYPE:
1086 /* This signifies an endlink, if no arguments remain, return
1087 true, otherwise return false. */
1088 res = !more_const_call_expr_args_p (&iter);
1089 goto end;
1090 default:
1091 /* If no parameters remain or the parameter's code does not
1092 match the specified code, return false. Otherwise continue
1093 checking any remaining arguments. */
1094 arg = next_const_call_expr_arg (&iter);
1095 if (!validate_arg (arg, code))
1096 goto end;
1097 break;
1098 }
1099 }
1100 while (1);
1101
1102 /* We need gotos here since we can only have one VA_CLOSE in a
1103 function. */
1104 end: ;
1105 va_end (ap);
1106
1107 return res;
1108 }
1109
1110 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1111 and the address of the save area. */
1112
1113 static rtx
1114 expand_builtin_nonlocal_goto (tree exp)
1115 {
1116 tree t_label, t_save_area;
1117 rtx r_label, r_save_area, r_fp, r_sp;
1118 rtx_insn *insn;
1119
1120 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1121 return NULL_RTX;
1122
1123 t_label = CALL_EXPR_ARG (exp, 0);
1124 t_save_area = CALL_EXPR_ARG (exp, 1);
1125
1126 r_label = expand_normal (t_label);
1127 r_label = convert_memory_address (Pmode, r_label);
1128 r_save_area = expand_normal (t_save_area);
1129 r_save_area = convert_memory_address (Pmode, r_save_area);
1130 /* Copy the address of the save location to a register just in case it was
1131 based on the frame pointer. */
1132 r_save_area = copy_to_reg (r_save_area);
1133 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1134 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1135 plus_constant (Pmode, r_save_area,
1136 GET_MODE_SIZE (Pmode)));
1137
1138 crtl->has_nonlocal_goto = 1;
1139
1140 #ifdef HAVE_nonlocal_goto
1141 /* ??? We no longer need to pass the static chain value, afaik. */
1142 if (HAVE_nonlocal_goto)
1143 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1144 else
1145 #endif
1146 {
1147 r_label = copy_to_reg (r_label);
1148
1149 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1150 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1151
1152 /* Restore frame pointer for containing function. */
1153 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1154 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1155
1156 /* USE of hard_frame_pointer_rtx added for consistency;
1157 not clear if really needed. */
1158 emit_use (hard_frame_pointer_rtx);
1159 emit_use (stack_pointer_rtx);
1160
1161 /* If the architecture is using a GP register, we must
1162 conservatively assume that the target function makes use of it.
1163 The prologue of functions with nonlocal gotos must therefore
1164 initialize the GP register to the appropriate value, and we
1165 must then make sure that this value is live at the point
1166 of the jump. (Note that this doesn't necessarily apply
1167 to targets with a nonlocal_goto pattern; they are free
1168 to implement it in their own way. Note also that this is
1169 a no-op if the GP register is a global invariant.) */
1170 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1171 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1172 emit_use (pic_offset_table_rtx);
1173
1174 emit_indirect_jump (r_label);
1175 }
1176
1177 /* Search backwards to the jump insn and mark it as a
1178 non-local goto. */
1179 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1180 {
1181 if (JUMP_P (insn))
1182 {
1183 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1184 break;
1185 }
1186 else if (CALL_P (insn))
1187 break;
1188 }
1189
1190 return const0_rtx;
1191 }
1192
1193 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1194 (not all will be used on all machines) that was passed to __builtin_setjmp.
1195 It updates the stack pointer in that block to correspond to the current
1196 stack pointer. */
1197
1198 static void
1199 expand_builtin_update_setjmp_buf (rtx buf_addr)
1200 {
1201 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1202 rtx stack_save
1203 = gen_rtx_MEM (sa_mode,
1204 memory_address
1205 (sa_mode,
1206 plus_constant (Pmode, buf_addr,
1207 2 * GET_MODE_SIZE (Pmode))));
1208
1209 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1210 }
1211
1212 /* Expand a call to __builtin_prefetch. For a target that does not support
1213 data prefetch, evaluate the memory address argument in case it has side
1214 effects. */
1215
1216 static void
1217 expand_builtin_prefetch (tree exp)
1218 {
1219 tree arg0, arg1, arg2;
1220 int nargs;
1221 rtx op0, op1, op2;
1222
1223 if (!validate_arglist (exp, POINTER_TYPE, 0))
1224 return;
1225
1226 arg0 = CALL_EXPR_ARG (exp, 0);
1227
1228 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1229 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1230 locality). */
1231 nargs = call_expr_nargs (exp);
1232 if (nargs > 1)
1233 arg1 = CALL_EXPR_ARG (exp, 1);
1234 else
1235 arg1 = integer_zero_node;
1236 if (nargs > 2)
1237 arg2 = CALL_EXPR_ARG (exp, 2);
1238 else
1239 arg2 = integer_three_node;
1240
1241 /* Argument 0 is an address. */
1242 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1243
1244 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1245 if (TREE_CODE (arg1) != INTEGER_CST)
1246 {
1247 error ("second argument to %<__builtin_prefetch%> must be a constant");
1248 arg1 = integer_zero_node;
1249 }
1250 op1 = expand_normal (arg1);
1251 /* Argument 1 must be either zero or one. */
1252 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1253 {
1254 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1255 " using zero");
1256 op1 = const0_rtx;
1257 }
1258
1259 /* Argument 2 (locality) must be a compile-time constant int. */
1260 if (TREE_CODE (arg2) != INTEGER_CST)
1261 {
1262 error ("third argument to %<__builtin_prefetch%> must be a constant");
1263 arg2 = integer_zero_node;
1264 }
1265 op2 = expand_normal (arg2);
1266 /* Argument 2 must be 0, 1, 2, or 3. */
1267 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1268 {
1269 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1270 op2 = const0_rtx;
1271 }
1272
1273 #ifdef HAVE_prefetch
1274 if (HAVE_prefetch)
1275 {
1276 struct expand_operand ops[3];
1277
1278 create_address_operand (&ops[0], op0);
1279 create_integer_operand (&ops[1], INTVAL (op1));
1280 create_integer_operand (&ops[2], INTVAL (op2));
1281 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1282 return;
1283 }
1284 #endif
1285
1286 /* Don't do anything with direct references to volatile memory, but
1287 generate code to handle other side effects. */
1288 if (!MEM_P (op0) && side_effects_p (op0))
1289 emit_insn (op0);
1290 }
1291
1292 /* Get a MEM rtx for expression EXP which is the address of an operand
1293 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1294 the maximum length of the block of memory that might be accessed or
1295 NULL if unknown. */
1296
1297 static rtx
1298 get_memory_rtx (tree exp, tree len)
1299 {
1300 tree orig_exp = exp;
1301 rtx addr, mem;
1302
1303 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1304 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1305 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1306 exp = TREE_OPERAND (exp, 0);
1307
1308 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1309 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1310
1311 /* Get an expression we can use to find the attributes to assign to MEM.
1312 First remove any nops. */
1313 while (CONVERT_EXPR_P (exp)
1314 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1315 exp = TREE_OPERAND (exp, 0);
1316
1317 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1318 (as builtin stringops may alias with anything). */
1319 exp = fold_build2 (MEM_REF,
1320 build_array_type (char_type_node,
1321 build_range_type (sizetype,
1322 size_one_node, len)),
1323 exp, build_int_cst (ptr_type_node, 0));
1324
1325 /* If the MEM_REF has no acceptable address, try to get the base object
1326 from the original address we got, and build an all-aliasing
1327 unknown-sized access to that one. */
1328 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1329 set_mem_attributes (mem, exp, 0);
1330 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1331 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1332 0))))
1333 {
1334 exp = build_fold_addr_expr (exp);
1335 exp = fold_build2 (MEM_REF,
1336 build_array_type (char_type_node,
1337 build_range_type (sizetype,
1338 size_zero_node,
1339 NULL)),
1340 exp, build_int_cst (ptr_type_node, 0));
1341 set_mem_attributes (mem, exp, 0);
1342 }
1343 set_mem_alias_set (mem, 0);
1344 return mem;
1345 }
1346 \f
1347 /* Built-in functions to perform an untyped call and return. */
1348
1349 #define apply_args_mode \
1350 (this_target_builtins->x_apply_args_mode)
1351 #define apply_result_mode \
1352 (this_target_builtins->x_apply_result_mode)
1353
1354 /* Return the size required for the block returned by __builtin_apply_args,
1355 and initialize apply_args_mode. */
1356
1357 static int
1358 apply_args_size (void)
1359 {
1360 static int size = -1;
1361 int align;
1362 unsigned int regno;
1363 machine_mode mode;
1364
1365 /* The values computed by this function never change. */
1366 if (size < 0)
1367 {
1368 /* The first value is the incoming arg-pointer. */
1369 size = GET_MODE_SIZE (Pmode);
1370
1371 /* The second value is the structure value address unless this is
1372 passed as an "invisible" first argument. */
1373 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1374 size += GET_MODE_SIZE (Pmode);
1375
1376 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1377 if (FUNCTION_ARG_REGNO_P (regno))
1378 {
1379 mode = targetm.calls.get_raw_arg_mode (regno);
1380
1381 gcc_assert (mode != VOIDmode);
1382
1383 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1384 if (size % align != 0)
1385 size = CEIL (size, align) * align;
1386 size += GET_MODE_SIZE (mode);
1387 apply_args_mode[regno] = mode;
1388 }
1389 else
1390 {
1391 apply_args_mode[regno] = VOIDmode;
1392 }
1393 }
1394 return size;
1395 }
1396
1397 /* Return the size required for the block returned by __builtin_apply,
1398 and initialize apply_result_mode. */
1399
1400 static int
1401 apply_result_size (void)
1402 {
1403 static int size = -1;
1404 int align, regno;
1405 machine_mode mode;
1406
1407 /* The values computed by this function never change. */
1408 if (size < 0)
1409 {
1410 size = 0;
1411
1412 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1413 if (targetm.calls.function_value_regno_p (regno))
1414 {
1415 mode = targetm.calls.get_raw_result_mode (regno);
1416
1417 gcc_assert (mode != VOIDmode);
1418
1419 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1420 if (size % align != 0)
1421 size = CEIL (size, align) * align;
1422 size += GET_MODE_SIZE (mode);
1423 apply_result_mode[regno] = mode;
1424 }
1425 else
1426 apply_result_mode[regno] = VOIDmode;
1427
1428 /* Allow targets that use untyped_call and untyped_return to override
1429 the size so that machine-specific information can be stored here. */
1430 #ifdef APPLY_RESULT_SIZE
1431 size = APPLY_RESULT_SIZE;
1432 #endif
1433 }
1434 return size;
1435 }
1436
1437 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1438 /* Create a vector describing the result block RESULT. If SAVEP is true,
1439 the result block is used to save the values; otherwise it is used to
1440 restore the values. */
1441
1442 static rtx
1443 result_vector (int savep, rtx result)
1444 {
1445 int regno, size, align, nelts;
1446 machine_mode mode;
1447 rtx reg, mem;
1448 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1449
1450 size = nelts = 0;
1451 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1452 if ((mode = apply_result_mode[regno]) != VOIDmode)
1453 {
1454 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1455 if (size % align != 0)
1456 size = CEIL (size, align) * align;
1457 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1458 mem = adjust_address (result, mode, size);
1459 savevec[nelts++] = (savep
1460 ? gen_rtx_SET (VOIDmode, mem, reg)
1461 : gen_rtx_SET (VOIDmode, reg, mem));
1462 size += GET_MODE_SIZE (mode);
1463 }
1464 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1465 }
1466 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1467
1468 /* Save the state required to perform an untyped call with the same
1469 arguments as were passed to the current function. */
1470
1471 static rtx
1472 expand_builtin_apply_args_1 (void)
1473 {
1474 rtx registers, tem;
1475 int size, align, regno;
1476 machine_mode mode;
1477 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1478
1479 /* Create a block where the arg-pointer, structure value address,
1480 and argument registers can be saved. */
1481 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1482
1483 /* Walk past the arg-pointer and structure value address. */
1484 size = GET_MODE_SIZE (Pmode);
1485 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1486 size += GET_MODE_SIZE (Pmode);
1487
1488 /* Save each register used in calling a function to the block. */
1489 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1490 if ((mode = apply_args_mode[regno]) != VOIDmode)
1491 {
1492 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1493 if (size % align != 0)
1494 size = CEIL (size, align) * align;
1495
1496 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1497
1498 emit_move_insn (adjust_address (registers, mode, size), tem);
1499 size += GET_MODE_SIZE (mode);
1500 }
1501
1502 /* Save the arg pointer to the block. */
1503 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1504 #ifdef STACK_GROWS_DOWNWARD
1505 /* We need the pointer as the caller actually passed them to us, not
1506 as we might have pretended they were passed. Make sure it's a valid
1507 operand, as emit_move_insn isn't expected to handle a PLUS. */
1508 tem
1509 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1510 NULL_RTX);
1511 #endif
1512 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1513
1514 size = GET_MODE_SIZE (Pmode);
1515
1516 /* Save the structure value address unless this is passed as an
1517 "invisible" first argument. */
1518 if (struct_incoming_value)
1519 {
1520 emit_move_insn (adjust_address (registers, Pmode, size),
1521 copy_to_reg (struct_incoming_value));
1522 size += GET_MODE_SIZE (Pmode);
1523 }
1524
1525 /* Return the address of the block. */
1526 return copy_addr_to_reg (XEXP (registers, 0));
1527 }
1528
1529 /* __builtin_apply_args returns block of memory allocated on
1530 the stack into which is stored the arg pointer, structure
1531 value address, static chain, and all the registers that might
1532 possibly be used in performing a function call. The code is
1533 moved to the start of the function so the incoming values are
1534 saved. */
1535
1536 static rtx
1537 expand_builtin_apply_args (void)
1538 {
1539 /* Don't do __builtin_apply_args more than once in a function.
1540 Save the result of the first call and reuse it. */
1541 if (apply_args_value != 0)
1542 return apply_args_value;
1543 {
1544 /* When this function is called, it means that registers must be
1545 saved on entry to this function. So we migrate the
1546 call to the first insn of this function. */
1547 rtx temp;
1548 rtx seq;
1549
1550 start_sequence ();
1551 temp = expand_builtin_apply_args_1 ();
1552 seq = get_insns ();
1553 end_sequence ();
1554
1555 apply_args_value = temp;
1556
1557 /* Put the insns after the NOTE that starts the function.
1558 If this is inside a start_sequence, make the outer-level insn
1559 chain current, so the code is placed at the start of the
1560 function. If internal_arg_pointer is a non-virtual pseudo,
1561 it needs to be placed after the function that initializes
1562 that pseudo. */
1563 push_topmost_sequence ();
1564 if (REG_P (crtl->args.internal_arg_pointer)
1565 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1566 emit_insn_before (seq, parm_birth_insn);
1567 else
1568 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1569 pop_topmost_sequence ();
1570 return temp;
1571 }
1572 }
1573
1574 /* Perform an untyped call and save the state required to perform an
1575 untyped return of whatever value was returned by the given function. */
1576
1577 static rtx
1578 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1579 {
1580 int size, align, regno;
1581 machine_mode mode;
1582 rtx incoming_args, result, reg, dest, src;
1583 rtx_call_insn *call_insn;
1584 rtx old_stack_level = 0;
1585 rtx call_fusage = 0;
1586 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1587
1588 arguments = convert_memory_address (Pmode, arguments);
1589
1590 /* Create a block where the return registers can be saved. */
1591 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1592
1593 /* Fetch the arg pointer from the ARGUMENTS block. */
1594 incoming_args = gen_reg_rtx (Pmode);
1595 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1596 #ifndef STACK_GROWS_DOWNWARD
1597 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1598 incoming_args, 0, OPTAB_LIB_WIDEN);
1599 #endif
1600
1601 /* Push a new argument block and copy the arguments. Do not allow
1602 the (potential) memcpy call below to interfere with our stack
1603 manipulations. */
1604 do_pending_stack_adjust ();
1605 NO_DEFER_POP;
1606
1607 /* Save the stack with nonlocal if available. */
1608 #ifdef HAVE_save_stack_nonlocal
1609 if (HAVE_save_stack_nonlocal)
1610 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1611 else
1612 #endif
1613 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1614
1615 /* Allocate a block of memory onto the stack and copy the memory
1616 arguments to the outgoing arguments address. We can pass TRUE
1617 as the 4th argument because we just saved the stack pointer
1618 and will restore it right after the call. */
1619 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1620
1621 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1622 may have already set current_function_calls_alloca to true.
1623 current_function_calls_alloca won't be set if argsize is zero,
1624 so we have to guarantee need_drap is true here. */
1625 if (SUPPORTS_STACK_ALIGNMENT)
1626 crtl->need_drap = true;
1627
1628 dest = virtual_outgoing_args_rtx;
1629 #ifndef STACK_GROWS_DOWNWARD
1630 if (CONST_INT_P (argsize))
1631 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1632 else
1633 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1634 #endif
1635 dest = gen_rtx_MEM (BLKmode, dest);
1636 set_mem_align (dest, PARM_BOUNDARY);
1637 src = gen_rtx_MEM (BLKmode, incoming_args);
1638 set_mem_align (src, PARM_BOUNDARY);
1639 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1640
1641 /* Refer to the argument block. */
1642 apply_args_size ();
1643 arguments = gen_rtx_MEM (BLKmode, arguments);
1644 set_mem_align (arguments, PARM_BOUNDARY);
1645
1646 /* Walk past the arg-pointer and structure value address. */
1647 size = GET_MODE_SIZE (Pmode);
1648 if (struct_value)
1649 size += GET_MODE_SIZE (Pmode);
1650
1651 /* Restore each of the registers previously saved. Make USE insns
1652 for each of these registers for use in making the call. */
1653 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1654 if ((mode = apply_args_mode[regno]) != VOIDmode)
1655 {
1656 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1657 if (size % align != 0)
1658 size = CEIL (size, align) * align;
1659 reg = gen_rtx_REG (mode, regno);
1660 emit_move_insn (reg, adjust_address (arguments, mode, size));
1661 use_reg (&call_fusage, reg);
1662 size += GET_MODE_SIZE (mode);
1663 }
1664
1665 /* Restore the structure value address unless this is passed as an
1666 "invisible" first argument. */
1667 size = GET_MODE_SIZE (Pmode);
1668 if (struct_value)
1669 {
1670 rtx value = gen_reg_rtx (Pmode);
1671 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1672 emit_move_insn (struct_value, value);
1673 if (REG_P (struct_value))
1674 use_reg (&call_fusage, struct_value);
1675 size += GET_MODE_SIZE (Pmode);
1676 }
1677
1678 /* All arguments and registers used for the call are set up by now! */
1679 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1680
1681 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1682 and we don't want to load it into a register as an optimization,
1683 because prepare_call_address already did it if it should be done. */
1684 if (GET_CODE (function) != SYMBOL_REF)
1685 function = memory_address (FUNCTION_MODE, function);
1686
1687 /* Generate the actual call instruction and save the return value. */
1688 #ifdef HAVE_untyped_call
1689 if (HAVE_untyped_call)
1690 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1691 result, result_vector (1, result)));
1692 else
1693 #endif
1694 #ifdef HAVE_call_value
1695 if (HAVE_call_value)
1696 {
1697 rtx valreg = 0;
1698
1699 /* Locate the unique return register. It is not possible to
1700 express a call that sets more than one return register using
1701 call_value; use untyped_call for that. In fact, untyped_call
1702 only needs to save the return registers in the given block. */
1703 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1704 if ((mode = apply_result_mode[regno]) != VOIDmode)
1705 {
1706 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1707
1708 valreg = gen_rtx_REG (mode, regno);
1709 }
1710
1711 emit_call_insn (GEN_CALL_VALUE (valreg,
1712 gen_rtx_MEM (FUNCTION_MODE, function),
1713 const0_rtx, NULL_RTX, const0_rtx));
1714
1715 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1716 }
1717 else
1718 #endif
1719 gcc_unreachable ();
1720
1721 /* Find the CALL insn we just emitted, and attach the register usage
1722 information. */
1723 call_insn = last_call_insn ();
1724 add_function_usage_to (call_insn, call_fusage);
1725
1726 /* Restore the stack. */
1727 #ifdef HAVE_save_stack_nonlocal
1728 if (HAVE_save_stack_nonlocal)
1729 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1730 else
1731 #endif
1732 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1733 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1734
1735 OK_DEFER_POP;
1736
1737 /* Return the address of the result block. */
1738 result = copy_addr_to_reg (XEXP (result, 0));
1739 return convert_memory_address (ptr_mode, result);
1740 }
1741
1742 /* Perform an untyped return. */
1743
1744 static void
1745 expand_builtin_return (rtx result)
1746 {
1747 int size, align, regno;
1748 machine_mode mode;
1749 rtx reg;
1750 rtx_insn *call_fusage = 0;
1751
1752 result = convert_memory_address (Pmode, result);
1753
1754 apply_result_size ();
1755 result = gen_rtx_MEM (BLKmode, result);
1756
1757 #ifdef HAVE_untyped_return
1758 if (HAVE_untyped_return)
1759 {
1760 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1761 emit_barrier ();
1762 return;
1763 }
1764 #endif
1765
1766 /* Restore the return value and note that each value is used. */
1767 size = 0;
1768 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1769 if ((mode = apply_result_mode[regno]) != VOIDmode)
1770 {
1771 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1772 if (size % align != 0)
1773 size = CEIL (size, align) * align;
1774 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1775 emit_move_insn (reg, adjust_address (result, mode, size));
1776
1777 push_to_sequence (call_fusage);
1778 emit_use (reg);
1779 call_fusage = get_insns ();
1780 end_sequence ();
1781 size += GET_MODE_SIZE (mode);
1782 }
1783
1784 /* Put the USE insns before the return. */
1785 emit_insn (call_fusage);
1786
1787 /* Return whatever values was restored by jumping directly to the end
1788 of the function. */
1789 expand_naked_return ();
1790 }
1791
1792 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1793
1794 static enum type_class
1795 type_to_class (tree type)
1796 {
1797 switch (TREE_CODE (type))
1798 {
1799 case VOID_TYPE: return void_type_class;
1800 case INTEGER_TYPE: return integer_type_class;
1801 case ENUMERAL_TYPE: return enumeral_type_class;
1802 case BOOLEAN_TYPE: return boolean_type_class;
1803 case POINTER_TYPE: return pointer_type_class;
1804 case REFERENCE_TYPE: return reference_type_class;
1805 case OFFSET_TYPE: return offset_type_class;
1806 case REAL_TYPE: return real_type_class;
1807 case COMPLEX_TYPE: return complex_type_class;
1808 case FUNCTION_TYPE: return function_type_class;
1809 case METHOD_TYPE: return method_type_class;
1810 case RECORD_TYPE: return record_type_class;
1811 case UNION_TYPE:
1812 case QUAL_UNION_TYPE: return union_type_class;
1813 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1814 ? string_type_class : array_type_class);
1815 case LANG_TYPE: return lang_type_class;
1816 default: return no_type_class;
1817 }
1818 }
1819
1820 /* Expand a call EXP to __builtin_classify_type. */
1821
1822 static rtx
1823 expand_builtin_classify_type (tree exp)
1824 {
1825 if (call_expr_nargs (exp))
1826 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1827 return GEN_INT (no_type_class);
1828 }
1829
1830 /* This helper macro, meant to be used in mathfn_built_in below,
1831 determines which among a set of three builtin math functions is
1832 appropriate for a given type mode. The `F' and `L' cases are
1833 automatically generated from the `double' case. */
1834 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1835 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1836 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1837 fcodel = BUILT_IN_MATHFN##L ; break;
1838 /* Similar to above, but appends _R after any F/L suffix. */
1839 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1840 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1841 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1842 fcodel = BUILT_IN_MATHFN##L_R ; break;
1843
1844 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1845 if available. If IMPLICIT is true use the implicit builtin declaration,
1846 otherwise use the explicit declaration. If we can't do the conversion,
1847 return zero. */
1848
1849 static tree
1850 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1851 {
1852 enum built_in_function fcode, fcodef, fcodel, fcode2;
1853
1854 switch (fn)
1855 {
1856 CASE_MATHFN (BUILT_IN_ACOS)
1857 CASE_MATHFN (BUILT_IN_ACOSH)
1858 CASE_MATHFN (BUILT_IN_ASIN)
1859 CASE_MATHFN (BUILT_IN_ASINH)
1860 CASE_MATHFN (BUILT_IN_ATAN)
1861 CASE_MATHFN (BUILT_IN_ATAN2)
1862 CASE_MATHFN (BUILT_IN_ATANH)
1863 CASE_MATHFN (BUILT_IN_CBRT)
1864 CASE_MATHFN (BUILT_IN_CEIL)
1865 CASE_MATHFN (BUILT_IN_CEXPI)
1866 CASE_MATHFN (BUILT_IN_COPYSIGN)
1867 CASE_MATHFN (BUILT_IN_COS)
1868 CASE_MATHFN (BUILT_IN_COSH)
1869 CASE_MATHFN (BUILT_IN_DREM)
1870 CASE_MATHFN (BUILT_IN_ERF)
1871 CASE_MATHFN (BUILT_IN_ERFC)
1872 CASE_MATHFN (BUILT_IN_EXP)
1873 CASE_MATHFN (BUILT_IN_EXP10)
1874 CASE_MATHFN (BUILT_IN_EXP2)
1875 CASE_MATHFN (BUILT_IN_EXPM1)
1876 CASE_MATHFN (BUILT_IN_FABS)
1877 CASE_MATHFN (BUILT_IN_FDIM)
1878 CASE_MATHFN (BUILT_IN_FLOOR)
1879 CASE_MATHFN (BUILT_IN_FMA)
1880 CASE_MATHFN (BUILT_IN_FMAX)
1881 CASE_MATHFN (BUILT_IN_FMIN)
1882 CASE_MATHFN (BUILT_IN_FMOD)
1883 CASE_MATHFN (BUILT_IN_FREXP)
1884 CASE_MATHFN (BUILT_IN_GAMMA)
1885 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1886 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1887 CASE_MATHFN (BUILT_IN_HYPOT)
1888 CASE_MATHFN (BUILT_IN_ILOGB)
1889 CASE_MATHFN (BUILT_IN_ICEIL)
1890 CASE_MATHFN (BUILT_IN_IFLOOR)
1891 CASE_MATHFN (BUILT_IN_INF)
1892 CASE_MATHFN (BUILT_IN_IRINT)
1893 CASE_MATHFN (BUILT_IN_IROUND)
1894 CASE_MATHFN (BUILT_IN_ISINF)
1895 CASE_MATHFN (BUILT_IN_J0)
1896 CASE_MATHFN (BUILT_IN_J1)
1897 CASE_MATHFN (BUILT_IN_JN)
1898 CASE_MATHFN (BUILT_IN_LCEIL)
1899 CASE_MATHFN (BUILT_IN_LDEXP)
1900 CASE_MATHFN (BUILT_IN_LFLOOR)
1901 CASE_MATHFN (BUILT_IN_LGAMMA)
1902 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1903 CASE_MATHFN (BUILT_IN_LLCEIL)
1904 CASE_MATHFN (BUILT_IN_LLFLOOR)
1905 CASE_MATHFN (BUILT_IN_LLRINT)
1906 CASE_MATHFN (BUILT_IN_LLROUND)
1907 CASE_MATHFN (BUILT_IN_LOG)
1908 CASE_MATHFN (BUILT_IN_LOG10)
1909 CASE_MATHFN (BUILT_IN_LOG1P)
1910 CASE_MATHFN (BUILT_IN_LOG2)
1911 CASE_MATHFN (BUILT_IN_LOGB)
1912 CASE_MATHFN (BUILT_IN_LRINT)
1913 CASE_MATHFN (BUILT_IN_LROUND)
1914 CASE_MATHFN (BUILT_IN_MODF)
1915 CASE_MATHFN (BUILT_IN_NAN)
1916 CASE_MATHFN (BUILT_IN_NANS)
1917 CASE_MATHFN (BUILT_IN_NEARBYINT)
1918 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1919 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1920 CASE_MATHFN (BUILT_IN_POW)
1921 CASE_MATHFN (BUILT_IN_POWI)
1922 CASE_MATHFN (BUILT_IN_POW10)
1923 CASE_MATHFN (BUILT_IN_REMAINDER)
1924 CASE_MATHFN (BUILT_IN_REMQUO)
1925 CASE_MATHFN (BUILT_IN_RINT)
1926 CASE_MATHFN (BUILT_IN_ROUND)
1927 CASE_MATHFN (BUILT_IN_SCALB)
1928 CASE_MATHFN (BUILT_IN_SCALBLN)
1929 CASE_MATHFN (BUILT_IN_SCALBN)
1930 CASE_MATHFN (BUILT_IN_SIGNBIT)
1931 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1932 CASE_MATHFN (BUILT_IN_SIN)
1933 CASE_MATHFN (BUILT_IN_SINCOS)
1934 CASE_MATHFN (BUILT_IN_SINH)
1935 CASE_MATHFN (BUILT_IN_SQRT)
1936 CASE_MATHFN (BUILT_IN_TAN)
1937 CASE_MATHFN (BUILT_IN_TANH)
1938 CASE_MATHFN (BUILT_IN_TGAMMA)
1939 CASE_MATHFN (BUILT_IN_TRUNC)
1940 CASE_MATHFN (BUILT_IN_Y0)
1941 CASE_MATHFN (BUILT_IN_Y1)
1942 CASE_MATHFN (BUILT_IN_YN)
1943
1944 default:
1945 return NULL_TREE;
1946 }
1947
1948 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1949 fcode2 = fcode;
1950 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1951 fcode2 = fcodef;
1952 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1953 fcode2 = fcodel;
1954 else
1955 return NULL_TREE;
1956
1957 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1958 return NULL_TREE;
1959
1960 return builtin_decl_explicit (fcode2);
1961 }
1962
1963 /* Like mathfn_built_in_1(), but always use the implicit array. */
1964
1965 tree
1966 mathfn_built_in (tree type, enum built_in_function fn)
1967 {
1968 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1969 }
1970
1971 /* If errno must be maintained, expand the RTL to check if the result,
1972 TARGET, of a built-in function call, EXP, is NaN, and if so set
1973 errno to EDOM. */
1974
1975 static void
1976 expand_errno_check (tree exp, rtx target)
1977 {
1978 rtx_code_label *lab = gen_label_rtx ();
1979
1980 /* Test the result; if it is NaN, set errno=EDOM because
1981 the argument was not in the domain. */
1982 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1983 NULL_RTX, NULL_RTX, lab,
1984 /* The jump is very likely. */
1985 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1986
1987 #ifdef TARGET_EDOM
1988 /* If this built-in doesn't throw an exception, set errno directly. */
1989 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1990 {
1991 #ifdef GEN_ERRNO_RTX
1992 rtx errno_rtx = GEN_ERRNO_RTX;
1993 #else
1994 rtx errno_rtx
1995 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1996 #endif
1997 emit_move_insn (errno_rtx,
1998 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1999 emit_label (lab);
2000 return;
2001 }
2002 #endif
2003
2004 /* Make sure the library call isn't expanded as a tail call. */
2005 CALL_EXPR_TAILCALL (exp) = 0;
2006
2007 /* We can't set errno=EDOM directly; let the library call do it.
2008 Pop the arguments right away in case the call gets deleted. */
2009 NO_DEFER_POP;
2010 expand_call (exp, target, 0);
2011 OK_DEFER_POP;
2012 emit_label (lab);
2013 }
2014
2015 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2016 Return NULL_RTX if a normal call should be emitted rather than expanding
2017 the function in-line. EXP is the expression that is a call to the builtin
2018 function; if convenient, the result should be placed in TARGET.
2019 SUBTARGET may be used as the target for computing one of EXP's operands. */
2020
2021 static rtx
2022 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2023 {
2024 optab builtin_optab;
2025 rtx op0;
2026 rtx_insn *insns;
2027 tree fndecl = get_callee_fndecl (exp);
2028 machine_mode mode;
2029 bool errno_set = false;
2030 bool try_widening = false;
2031 tree arg;
2032
2033 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2034 return NULL_RTX;
2035
2036 arg = CALL_EXPR_ARG (exp, 0);
2037
2038 switch (DECL_FUNCTION_CODE (fndecl))
2039 {
2040 CASE_FLT_FN (BUILT_IN_SQRT):
2041 errno_set = ! tree_expr_nonnegative_p (arg);
2042 try_widening = true;
2043 builtin_optab = sqrt_optab;
2044 break;
2045 CASE_FLT_FN (BUILT_IN_EXP):
2046 errno_set = true; builtin_optab = exp_optab; break;
2047 CASE_FLT_FN (BUILT_IN_EXP10):
2048 CASE_FLT_FN (BUILT_IN_POW10):
2049 errno_set = true; builtin_optab = exp10_optab; break;
2050 CASE_FLT_FN (BUILT_IN_EXP2):
2051 errno_set = true; builtin_optab = exp2_optab; break;
2052 CASE_FLT_FN (BUILT_IN_EXPM1):
2053 errno_set = true; builtin_optab = expm1_optab; break;
2054 CASE_FLT_FN (BUILT_IN_LOGB):
2055 errno_set = true; builtin_optab = logb_optab; break;
2056 CASE_FLT_FN (BUILT_IN_LOG):
2057 errno_set = true; builtin_optab = log_optab; break;
2058 CASE_FLT_FN (BUILT_IN_LOG10):
2059 errno_set = true; builtin_optab = log10_optab; break;
2060 CASE_FLT_FN (BUILT_IN_LOG2):
2061 errno_set = true; builtin_optab = log2_optab; break;
2062 CASE_FLT_FN (BUILT_IN_LOG1P):
2063 errno_set = true; builtin_optab = log1p_optab; break;
2064 CASE_FLT_FN (BUILT_IN_ASIN):
2065 builtin_optab = asin_optab; break;
2066 CASE_FLT_FN (BUILT_IN_ACOS):
2067 builtin_optab = acos_optab; break;
2068 CASE_FLT_FN (BUILT_IN_TAN):
2069 builtin_optab = tan_optab; break;
2070 CASE_FLT_FN (BUILT_IN_ATAN):
2071 builtin_optab = atan_optab; break;
2072 CASE_FLT_FN (BUILT_IN_FLOOR):
2073 builtin_optab = floor_optab; break;
2074 CASE_FLT_FN (BUILT_IN_CEIL):
2075 builtin_optab = ceil_optab; break;
2076 CASE_FLT_FN (BUILT_IN_TRUNC):
2077 builtin_optab = btrunc_optab; break;
2078 CASE_FLT_FN (BUILT_IN_ROUND):
2079 builtin_optab = round_optab; break;
2080 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2081 builtin_optab = nearbyint_optab;
2082 if (flag_trapping_math)
2083 break;
2084 /* Else fallthrough and expand as rint. */
2085 CASE_FLT_FN (BUILT_IN_RINT):
2086 builtin_optab = rint_optab; break;
2087 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2088 builtin_optab = significand_optab; break;
2089 default:
2090 gcc_unreachable ();
2091 }
2092
2093 /* Make a suitable register to place result in. */
2094 mode = TYPE_MODE (TREE_TYPE (exp));
2095
2096 if (! flag_errno_math || ! HONOR_NANS (mode))
2097 errno_set = false;
2098
2099 /* Before working hard, check whether the instruction is available, but try
2100 to widen the mode for specific operations. */
2101 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2102 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2103 && (!errno_set || !optimize_insn_for_size_p ()))
2104 {
2105 rtx result = gen_reg_rtx (mode);
2106
2107 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2108 need to expand the argument again. This way, we will not perform
2109 side-effects more the once. */
2110 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2111
2112 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2113
2114 start_sequence ();
2115
2116 /* Compute into RESULT.
2117 Set RESULT to wherever the result comes back. */
2118 result = expand_unop (mode, builtin_optab, op0, result, 0);
2119
2120 if (result != 0)
2121 {
2122 if (errno_set)
2123 expand_errno_check (exp, result);
2124
2125 /* Output the entire sequence. */
2126 insns = get_insns ();
2127 end_sequence ();
2128 emit_insn (insns);
2129 return result;
2130 }
2131
2132 /* If we were unable to expand via the builtin, stop the sequence
2133 (without outputting the insns) and call to the library function
2134 with the stabilized argument list. */
2135 end_sequence ();
2136 }
2137
2138 return expand_call (exp, target, target == const0_rtx);
2139 }
2140
2141 /* Expand a call to the builtin binary math functions (pow and atan2).
2142 Return NULL_RTX if a normal call should be emitted rather than expanding the
2143 function in-line. EXP is the expression that is a call to the builtin
2144 function; if convenient, the result should be placed in TARGET.
2145 SUBTARGET may be used as the target for computing one of EXP's
2146 operands. */
2147
2148 static rtx
2149 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2150 {
2151 optab builtin_optab;
2152 rtx op0, op1, result;
2153 rtx_insn *insns;
2154 int op1_type = REAL_TYPE;
2155 tree fndecl = get_callee_fndecl (exp);
2156 tree arg0, arg1;
2157 machine_mode mode;
2158 bool errno_set = true;
2159
2160 switch (DECL_FUNCTION_CODE (fndecl))
2161 {
2162 CASE_FLT_FN (BUILT_IN_SCALBN):
2163 CASE_FLT_FN (BUILT_IN_SCALBLN):
2164 CASE_FLT_FN (BUILT_IN_LDEXP):
2165 op1_type = INTEGER_TYPE;
2166 default:
2167 break;
2168 }
2169
2170 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2171 return NULL_RTX;
2172
2173 arg0 = CALL_EXPR_ARG (exp, 0);
2174 arg1 = CALL_EXPR_ARG (exp, 1);
2175
2176 switch (DECL_FUNCTION_CODE (fndecl))
2177 {
2178 CASE_FLT_FN (BUILT_IN_POW):
2179 builtin_optab = pow_optab; break;
2180 CASE_FLT_FN (BUILT_IN_ATAN2):
2181 builtin_optab = atan2_optab; break;
2182 CASE_FLT_FN (BUILT_IN_SCALB):
2183 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2184 return 0;
2185 builtin_optab = scalb_optab; break;
2186 CASE_FLT_FN (BUILT_IN_SCALBN):
2187 CASE_FLT_FN (BUILT_IN_SCALBLN):
2188 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2189 return 0;
2190 /* Fall through... */
2191 CASE_FLT_FN (BUILT_IN_LDEXP):
2192 builtin_optab = ldexp_optab; break;
2193 CASE_FLT_FN (BUILT_IN_FMOD):
2194 builtin_optab = fmod_optab; break;
2195 CASE_FLT_FN (BUILT_IN_REMAINDER):
2196 CASE_FLT_FN (BUILT_IN_DREM):
2197 builtin_optab = remainder_optab; break;
2198 default:
2199 gcc_unreachable ();
2200 }
2201
2202 /* Make a suitable register to place result in. */
2203 mode = TYPE_MODE (TREE_TYPE (exp));
2204
2205 /* Before working hard, check whether the instruction is available. */
2206 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2207 return NULL_RTX;
2208
2209 result = gen_reg_rtx (mode);
2210
2211 if (! flag_errno_math || ! HONOR_NANS (mode))
2212 errno_set = false;
2213
2214 if (errno_set && optimize_insn_for_size_p ())
2215 return 0;
2216
2217 /* Always stabilize the argument list. */
2218 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2219 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2220
2221 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2222 op1 = expand_normal (arg1);
2223
2224 start_sequence ();
2225
2226 /* Compute into RESULT.
2227 Set RESULT to wherever the result comes back. */
2228 result = expand_binop (mode, builtin_optab, op0, op1,
2229 result, 0, OPTAB_DIRECT);
2230
2231 /* If we were unable to expand via the builtin, stop the sequence
2232 (without outputting the insns) and call to the library function
2233 with the stabilized argument list. */
2234 if (result == 0)
2235 {
2236 end_sequence ();
2237 return expand_call (exp, target, target == const0_rtx);
2238 }
2239
2240 if (errno_set)
2241 expand_errno_check (exp, result);
2242
2243 /* Output the entire sequence. */
2244 insns = get_insns ();
2245 end_sequence ();
2246 emit_insn (insns);
2247
2248 return result;
2249 }
2250
2251 /* Expand a call to the builtin trinary math functions (fma).
2252 Return NULL_RTX if a normal call should be emitted rather than expanding the
2253 function in-line. EXP is the expression that is a call to the builtin
2254 function; if convenient, the result should be placed in TARGET.
2255 SUBTARGET may be used as the target for computing one of EXP's
2256 operands. */
2257
2258 static rtx
2259 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2260 {
2261 optab builtin_optab;
2262 rtx op0, op1, op2, result;
2263 rtx_insn *insns;
2264 tree fndecl = get_callee_fndecl (exp);
2265 tree arg0, arg1, arg2;
2266 machine_mode mode;
2267
2268 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2269 return NULL_RTX;
2270
2271 arg0 = CALL_EXPR_ARG (exp, 0);
2272 arg1 = CALL_EXPR_ARG (exp, 1);
2273 arg2 = CALL_EXPR_ARG (exp, 2);
2274
2275 switch (DECL_FUNCTION_CODE (fndecl))
2276 {
2277 CASE_FLT_FN (BUILT_IN_FMA):
2278 builtin_optab = fma_optab; break;
2279 default:
2280 gcc_unreachable ();
2281 }
2282
2283 /* Make a suitable register to place result in. */
2284 mode = TYPE_MODE (TREE_TYPE (exp));
2285
2286 /* Before working hard, check whether the instruction is available. */
2287 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2288 return NULL_RTX;
2289
2290 result = gen_reg_rtx (mode);
2291
2292 /* Always stabilize the argument list. */
2293 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2294 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2295 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2296
2297 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2298 op1 = expand_normal (arg1);
2299 op2 = expand_normal (arg2);
2300
2301 start_sequence ();
2302
2303 /* Compute into RESULT.
2304 Set RESULT to wherever the result comes back. */
2305 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2306 result, 0);
2307
2308 /* If we were unable to expand via the builtin, stop the sequence
2309 (without outputting the insns) and call to the library function
2310 with the stabilized argument list. */
2311 if (result == 0)
2312 {
2313 end_sequence ();
2314 return expand_call (exp, target, target == const0_rtx);
2315 }
2316
2317 /* Output the entire sequence. */
2318 insns = get_insns ();
2319 end_sequence ();
2320 emit_insn (insns);
2321
2322 return result;
2323 }
2324
2325 /* Expand a call to the builtin sin and cos math functions.
2326 Return NULL_RTX if a normal call should be emitted rather than expanding the
2327 function in-line. EXP is the expression that is a call to the builtin
2328 function; if convenient, the result should be placed in TARGET.
2329 SUBTARGET may be used as the target for computing one of EXP's
2330 operands. */
2331
2332 static rtx
2333 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2334 {
2335 optab builtin_optab;
2336 rtx op0;
2337 rtx_insn *insns;
2338 tree fndecl = get_callee_fndecl (exp);
2339 machine_mode mode;
2340 tree arg;
2341
2342 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2343 return NULL_RTX;
2344
2345 arg = CALL_EXPR_ARG (exp, 0);
2346
2347 switch (DECL_FUNCTION_CODE (fndecl))
2348 {
2349 CASE_FLT_FN (BUILT_IN_SIN):
2350 CASE_FLT_FN (BUILT_IN_COS):
2351 builtin_optab = sincos_optab; break;
2352 default:
2353 gcc_unreachable ();
2354 }
2355
2356 /* Make a suitable register to place result in. */
2357 mode = TYPE_MODE (TREE_TYPE (exp));
2358
2359 /* Check if sincos insn is available, otherwise fallback
2360 to sin or cos insn. */
2361 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2362 switch (DECL_FUNCTION_CODE (fndecl))
2363 {
2364 CASE_FLT_FN (BUILT_IN_SIN):
2365 builtin_optab = sin_optab; break;
2366 CASE_FLT_FN (BUILT_IN_COS):
2367 builtin_optab = cos_optab; break;
2368 default:
2369 gcc_unreachable ();
2370 }
2371
2372 /* Before working hard, check whether the instruction is available. */
2373 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2374 {
2375 rtx result = gen_reg_rtx (mode);
2376
2377 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2378 need to expand the argument again. This way, we will not perform
2379 side-effects more the once. */
2380 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2381
2382 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2383
2384 start_sequence ();
2385
2386 /* Compute into RESULT.
2387 Set RESULT to wherever the result comes back. */
2388 if (builtin_optab == sincos_optab)
2389 {
2390 int ok;
2391
2392 switch (DECL_FUNCTION_CODE (fndecl))
2393 {
2394 CASE_FLT_FN (BUILT_IN_SIN):
2395 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2396 break;
2397 CASE_FLT_FN (BUILT_IN_COS):
2398 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2399 break;
2400 default:
2401 gcc_unreachable ();
2402 }
2403 gcc_assert (ok);
2404 }
2405 else
2406 result = expand_unop (mode, builtin_optab, op0, result, 0);
2407
2408 if (result != 0)
2409 {
2410 /* Output the entire sequence. */
2411 insns = get_insns ();
2412 end_sequence ();
2413 emit_insn (insns);
2414 return result;
2415 }
2416
2417 /* If we were unable to expand via the builtin, stop the sequence
2418 (without outputting the insns) and call to the library function
2419 with the stabilized argument list. */
2420 end_sequence ();
2421 }
2422
2423 return expand_call (exp, target, target == const0_rtx);
2424 }
2425
2426 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2427 return an RTL instruction code that implements the functionality.
2428 If that isn't possible or available return CODE_FOR_nothing. */
2429
2430 static enum insn_code
2431 interclass_mathfn_icode (tree arg, tree fndecl)
2432 {
2433 bool errno_set = false;
2434 optab builtin_optab = unknown_optab;
2435 machine_mode mode;
2436
2437 switch (DECL_FUNCTION_CODE (fndecl))
2438 {
2439 CASE_FLT_FN (BUILT_IN_ILOGB):
2440 errno_set = true; builtin_optab = ilogb_optab; break;
2441 CASE_FLT_FN (BUILT_IN_ISINF):
2442 builtin_optab = isinf_optab; break;
2443 case BUILT_IN_ISNORMAL:
2444 case BUILT_IN_ISFINITE:
2445 CASE_FLT_FN (BUILT_IN_FINITE):
2446 case BUILT_IN_FINITED32:
2447 case BUILT_IN_FINITED64:
2448 case BUILT_IN_FINITED128:
2449 case BUILT_IN_ISINFD32:
2450 case BUILT_IN_ISINFD64:
2451 case BUILT_IN_ISINFD128:
2452 /* These builtins have no optabs (yet). */
2453 break;
2454 default:
2455 gcc_unreachable ();
2456 }
2457
2458 /* There's no easy way to detect the case we need to set EDOM. */
2459 if (flag_errno_math && errno_set)
2460 return CODE_FOR_nothing;
2461
2462 /* Optab mode depends on the mode of the input argument. */
2463 mode = TYPE_MODE (TREE_TYPE (arg));
2464
2465 if (builtin_optab)
2466 return optab_handler (builtin_optab, mode);
2467 return CODE_FOR_nothing;
2468 }
2469
2470 /* Expand a call to one of the builtin math functions that operate on
2471 floating point argument and output an integer result (ilogb, isinf,
2472 isnan, etc).
2473 Return 0 if a normal call should be emitted rather than expanding the
2474 function in-line. EXP is the expression that is a call to the builtin
2475 function; if convenient, the result should be placed in TARGET. */
2476
2477 static rtx
2478 expand_builtin_interclass_mathfn (tree exp, rtx target)
2479 {
2480 enum insn_code icode = CODE_FOR_nothing;
2481 rtx op0;
2482 tree fndecl = get_callee_fndecl (exp);
2483 machine_mode mode;
2484 tree arg;
2485
2486 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2487 return NULL_RTX;
2488
2489 arg = CALL_EXPR_ARG (exp, 0);
2490 icode = interclass_mathfn_icode (arg, fndecl);
2491 mode = TYPE_MODE (TREE_TYPE (arg));
2492
2493 if (icode != CODE_FOR_nothing)
2494 {
2495 struct expand_operand ops[1];
2496 rtx_insn *last = get_last_insn ();
2497 tree orig_arg = arg;
2498
2499 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2500 need to expand the argument again. This way, we will not perform
2501 side-effects more the once. */
2502 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2503
2504 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2505
2506 if (mode != GET_MODE (op0))
2507 op0 = convert_to_mode (mode, op0, 0);
2508
2509 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2510 if (maybe_legitimize_operands (icode, 0, 1, ops)
2511 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2512 return ops[0].value;
2513
2514 delete_insns_since (last);
2515 CALL_EXPR_ARG (exp, 0) = orig_arg;
2516 }
2517
2518 return NULL_RTX;
2519 }
2520
2521 /* Expand a call to the builtin sincos math function.
2522 Return NULL_RTX if a normal call should be emitted rather than expanding the
2523 function in-line. EXP is the expression that is a call to the builtin
2524 function. */
2525
2526 static rtx
2527 expand_builtin_sincos (tree exp)
2528 {
2529 rtx op0, op1, op2, target1, target2;
2530 machine_mode mode;
2531 tree arg, sinp, cosp;
2532 int result;
2533 location_t loc = EXPR_LOCATION (exp);
2534 tree alias_type, alias_off;
2535
2536 if (!validate_arglist (exp, REAL_TYPE,
2537 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2538 return NULL_RTX;
2539
2540 arg = CALL_EXPR_ARG (exp, 0);
2541 sinp = CALL_EXPR_ARG (exp, 1);
2542 cosp = CALL_EXPR_ARG (exp, 2);
2543
2544 /* Make a suitable register to place result in. */
2545 mode = TYPE_MODE (TREE_TYPE (arg));
2546
2547 /* Check if sincos insn is available, otherwise emit the call. */
2548 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2549 return NULL_RTX;
2550
2551 target1 = gen_reg_rtx (mode);
2552 target2 = gen_reg_rtx (mode);
2553
2554 op0 = expand_normal (arg);
2555 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2556 alias_off = build_int_cst (alias_type, 0);
2557 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2558 sinp, alias_off));
2559 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2560 cosp, alias_off));
2561
2562 /* Compute into target1 and target2.
2563 Set TARGET to wherever the result comes back. */
2564 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2565 gcc_assert (result);
2566
2567 /* Move target1 and target2 to the memory locations indicated
2568 by op1 and op2. */
2569 emit_move_insn (op1, target1);
2570 emit_move_insn (op2, target2);
2571
2572 return const0_rtx;
2573 }
2574
2575 /* Expand a call to the internal cexpi builtin to the sincos math function.
2576 EXP is the expression that is a call to the builtin function; if convenient,
2577 the result should be placed in TARGET. */
2578
2579 static rtx
2580 expand_builtin_cexpi (tree exp, rtx target)
2581 {
2582 tree fndecl = get_callee_fndecl (exp);
2583 tree arg, type;
2584 machine_mode mode;
2585 rtx op0, op1, op2;
2586 location_t loc = EXPR_LOCATION (exp);
2587
2588 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2589 return NULL_RTX;
2590
2591 arg = CALL_EXPR_ARG (exp, 0);
2592 type = TREE_TYPE (arg);
2593 mode = TYPE_MODE (TREE_TYPE (arg));
2594
2595 /* Try expanding via a sincos optab, fall back to emitting a libcall
2596 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2597 is only generated from sincos, cexp or if we have either of them. */
2598 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2599 {
2600 op1 = gen_reg_rtx (mode);
2601 op2 = gen_reg_rtx (mode);
2602
2603 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2604
2605 /* Compute into op1 and op2. */
2606 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2607 }
2608 else if (targetm.libc_has_function (function_sincos))
2609 {
2610 tree call, fn = NULL_TREE;
2611 tree top1, top2;
2612 rtx op1a, op2a;
2613
2614 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2615 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2616 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2617 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2618 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2619 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2620 else
2621 gcc_unreachable ();
2622
2623 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2624 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2625 op1a = copy_addr_to_reg (XEXP (op1, 0));
2626 op2a = copy_addr_to_reg (XEXP (op2, 0));
2627 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2628 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2629
2630 /* Make sure not to fold the sincos call again. */
2631 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2632 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2633 call, 3, arg, top1, top2));
2634 }
2635 else
2636 {
2637 tree call, fn = NULL_TREE, narg;
2638 tree ctype = build_complex_type (type);
2639
2640 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2641 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2642 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2643 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2644 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2645 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2646 else
2647 gcc_unreachable ();
2648
2649 /* If we don't have a decl for cexp create one. This is the
2650 friendliest fallback if the user calls __builtin_cexpi
2651 without full target C99 function support. */
2652 if (fn == NULL_TREE)
2653 {
2654 tree fntype;
2655 const char *name = NULL;
2656
2657 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2658 name = "cexpf";
2659 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2660 name = "cexp";
2661 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2662 name = "cexpl";
2663
2664 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2665 fn = build_fn_decl (name, fntype);
2666 }
2667
2668 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2669 build_real (type, dconst0), arg);
2670
2671 /* Make sure not to fold the cexp call again. */
2672 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2673 return expand_expr (build_call_nary (ctype, call, 1, narg),
2674 target, VOIDmode, EXPAND_NORMAL);
2675 }
2676
2677 /* Now build the proper return type. */
2678 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2679 make_tree (TREE_TYPE (arg), op2),
2680 make_tree (TREE_TYPE (arg), op1)),
2681 target, VOIDmode, EXPAND_NORMAL);
2682 }
2683
2684 /* Conveniently construct a function call expression. FNDECL names the
2685 function to be called, N is the number of arguments, and the "..."
2686 parameters are the argument expressions. Unlike build_call_exr
2687 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2688
2689 static tree
2690 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2691 {
2692 va_list ap;
2693 tree fntype = TREE_TYPE (fndecl);
2694 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2695
2696 va_start (ap, n);
2697 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2698 va_end (ap);
2699 SET_EXPR_LOCATION (fn, loc);
2700 return fn;
2701 }
2702
2703 /* Expand a call to one of the builtin rounding functions gcc defines
2704 as an extension (lfloor and lceil). As these are gcc extensions we
2705 do not need to worry about setting errno to EDOM.
2706 If expanding via optab fails, lower expression to (int)(floor(x)).
2707 EXP is the expression that is a call to the builtin function;
2708 if convenient, the result should be placed in TARGET. */
2709
2710 static rtx
2711 expand_builtin_int_roundingfn (tree exp, rtx target)
2712 {
2713 convert_optab builtin_optab;
2714 rtx op0, tmp;
2715 rtx_insn *insns;
2716 tree fndecl = get_callee_fndecl (exp);
2717 enum built_in_function fallback_fn;
2718 tree fallback_fndecl;
2719 machine_mode mode;
2720 tree arg;
2721
2722 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2723 gcc_unreachable ();
2724
2725 arg = CALL_EXPR_ARG (exp, 0);
2726
2727 switch (DECL_FUNCTION_CODE (fndecl))
2728 {
2729 CASE_FLT_FN (BUILT_IN_ICEIL):
2730 CASE_FLT_FN (BUILT_IN_LCEIL):
2731 CASE_FLT_FN (BUILT_IN_LLCEIL):
2732 builtin_optab = lceil_optab;
2733 fallback_fn = BUILT_IN_CEIL;
2734 break;
2735
2736 CASE_FLT_FN (BUILT_IN_IFLOOR):
2737 CASE_FLT_FN (BUILT_IN_LFLOOR):
2738 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2739 builtin_optab = lfloor_optab;
2740 fallback_fn = BUILT_IN_FLOOR;
2741 break;
2742
2743 default:
2744 gcc_unreachable ();
2745 }
2746
2747 /* Make a suitable register to place result in. */
2748 mode = TYPE_MODE (TREE_TYPE (exp));
2749
2750 target = gen_reg_rtx (mode);
2751
2752 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2753 need to expand the argument again. This way, we will not perform
2754 side-effects more the once. */
2755 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2756
2757 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2758
2759 start_sequence ();
2760
2761 /* Compute into TARGET. */
2762 if (expand_sfix_optab (target, op0, builtin_optab))
2763 {
2764 /* Output the entire sequence. */
2765 insns = get_insns ();
2766 end_sequence ();
2767 emit_insn (insns);
2768 return target;
2769 }
2770
2771 /* If we were unable to expand via the builtin, stop the sequence
2772 (without outputting the insns). */
2773 end_sequence ();
2774
2775 /* Fall back to floating point rounding optab. */
2776 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2777
2778 /* For non-C99 targets we may end up without a fallback fndecl here
2779 if the user called __builtin_lfloor directly. In this case emit
2780 a call to the floor/ceil variants nevertheless. This should result
2781 in the best user experience for not full C99 targets. */
2782 if (fallback_fndecl == NULL_TREE)
2783 {
2784 tree fntype;
2785 const char *name = NULL;
2786
2787 switch (DECL_FUNCTION_CODE (fndecl))
2788 {
2789 case BUILT_IN_ICEIL:
2790 case BUILT_IN_LCEIL:
2791 case BUILT_IN_LLCEIL:
2792 name = "ceil";
2793 break;
2794 case BUILT_IN_ICEILF:
2795 case BUILT_IN_LCEILF:
2796 case BUILT_IN_LLCEILF:
2797 name = "ceilf";
2798 break;
2799 case BUILT_IN_ICEILL:
2800 case BUILT_IN_LCEILL:
2801 case BUILT_IN_LLCEILL:
2802 name = "ceill";
2803 break;
2804 case BUILT_IN_IFLOOR:
2805 case BUILT_IN_LFLOOR:
2806 case BUILT_IN_LLFLOOR:
2807 name = "floor";
2808 break;
2809 case BUILT_IN_IFLOORF:
2810 case BUILT_IN_LFLOORF:
2811 case BUILT_IN_LLFLOORF:
2812 name = "floorf";
2813 break;
2814 case BUILT_IN_IFLOORL:
2815 case BUILT_IN_LFLOORL:
2816 case BUILT_IN_LLFLOORL:
2817 name = "floorl";
2818 break;
2819 default:
2820 gcc_unreachable ();
2821 }
2822
2823 fntype = build_function_type_list (TREE_TYPE (arg),
2824 TREE_TYPE (arg), NULL_TREE);
2825 fallback_fndecl = build_fn_decl (name, fntype);
2826 }
2827
2828 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2829
2830 tmp = expand_normal (exp);
2831 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2832
2833 /* Truncate the result of floating point optab to integer
2834 via expand_fix (). */
2835 target = gen_reg_rtx (mode);
2836 expand_fix (target, tmp, 0);
2837
2838 return target;
2839 }
2840
2841 /* Expand a call to one of the builtin math functions doing integer
2842 conversion (lrint).
2843 Return 0 if a normal call should be emitted rather than expanding the
2844 function in-line. EXP is the expression that is a call to the builtin
2845 function; if convenient, the result should be placed in TARGET. */
2846
2847 static rtx
2848 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2849 {
2850 convert_optab builtin_optab;
2851 rtx op0;
2852 rtx_insn *insns;
2853 tree fndecl = get_callee_fndecl (exp);
2854 tree arg;
2855 machine_mode mode;
2856 enum built_in_function fallback_fn = BUILT_IN_NONE;
2857
2858 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2859 gcc_unreachable ();
2860
2861 arg = CALL_EXPR_ARG (exp, 0);
2862
2863 switch (DECL_FUNCTION_CODE (fndecl))
2864 {
2865 CASE_FLT_FN (BUILT_IN_IRINT):
2866 fallback_fn = BUILT_IN_LRINT;
2867 /* FALLTHRU */
2868 CASE_FLT_FN (BUILT_IN_LRINT):
2869 CASE_FLT_FN (BUILT_IN_LLRINT):
2870 builtin_optab = lrint_optab;
2871 break;
2872
2873 CASE_FLT_FN (BUILT_IN_IROUND):
2874 fallback_fn = BUILT_IN_LROUND;
2875 /* FALLTHRU */
2876 CASE_FLT_FN (BUILT_IN_LROUND):
2877 CASE_FLT_FN (BUILT_IN_LLROUND):
2878 builtin_optab = lround_optab;
2879 break;
2880
2881 default:
2882 gcc_unreachable ();
2883 }
2884
2885 /* There's no easy way to detect the case we need to set EDOM. */
2886 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2887 return NULL_RTX;
2888
2889 /* Make a suitable register to place result in. */
2890 mode = TYPE_MODE (TREE_TYPE (exp));
2891
2892 /* There's no easy way to detect the case we need to set EDOM. */
2893 if (!flag_errno_math)
2894 {
2895 rtx result = gen_reg_rtx (mode);
2896
2897 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2898 need to expand the argument again. This way, we will not perform
2899 side-effects more the once. */
2900 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2901
2902 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2903
2904 start_sequence ();
2905
2906 if (expand_sfix_optab (result, op0, builtin_optab))
2907 {
2908 /* Output the entire sequence. */
2909 insns = get_insns ();
2910 end_sequence ();
2911 emit_insn (insns);
2912 return result;
2913 }
2914
2915 /* If we were unable to expand via the builtin, stop the sequence
2916 (without outputting the insns) and call to the library function
2917 with the stabilized argument list. */
2918 end_sequence ();
2919 }
2920
2921 if (fallback_fn != BUILT_IN_NONE)
2922 {
2923 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2924 targets, (int) round (x) should never be transformed into
2925 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2926 a call to lround in the hope that the target provides at least some
2927 C99 functions. This should result in the best user experience for
2928 not full C99 targets. */
2929 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2930 fallback_fn, 0);
2931
2932 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2933 fallback_fndecl, 1, arg);
2934
2935 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2936 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2937 return convert_to_mode (mode, target, 0);
2938 }
2939
2940 return expand_call (exp, target, target == const0_rtx);
2941 }
2942
2943 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2944 a normal call should be emitted rather than expanding the function
2945 in-line. EXP is the expression that is a call to the builtin
2946 function; if convenient, the result should be placed in TARGET. */
2947
2948 static rtx
2949 expand_builtin_powi (tree exp, rtx target)
2950 {
2951 tree arg0, arg1;
2952 rtx op0, op1;
2953 machine_mode mode;
2954 machine_mode mode2;
2955
2956 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2957 return NULL_RTX;
2958
2959 arg0 = CALL_EXPR_ARG (exp, 0);
2960 arg1 = CALL_EXPR_ARG (exp, 1);
2961 mode = TYPE_MODE (TREE_TYPE (exp));
2962
2963 /* Emit a libcall to libgcc. */
2964
2965 /* Mode of the 2nd argument must match that of an int. */
2966 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2967
2968 if (target == NULL_RTX)
2969 target = gen_reg_rtx (mode);
2970
2971 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2972 if (GET_MODE (op0) != mode)
2973 op0 = convert_to_mode (mode, op0, 0);
2974 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2975 if (GET_MODE (op1) != mode2)
2976 op1 = convert_to_mode (mode2, op1, 0);
2977
2978 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2979 target, LCT_CONST, mode, 2,
2980 op0, mode, op1, mode2);
2981
2982 return target;
2983 }
2984
2985 /* Expand expression EXP which is a call to the strlen builtin. Return
2986 NULL_RTX if we failed the caller should emit a normal call, otherwise
2987 try to get the result in TARGET, if convenient. */
2988
2989 static rtx
2990 expand_builtin_strlen (tree exp, rtx target,
2991 machine_mode target_mode)
2992 {
2993 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2994 return NULL_RTX;
2995 else
2996 {
2997 struct expand_operand ops[4];
2998 rtx pat;
2999 tree len;
3000 tree src = CALL_EXPR_ARG (exp, 0);
3001 rtx src_reg;
3002 rtx_insn *before_strlen;
3003 machine_mode insn_mode = target_mode;
3004 enum insn_code icode = CODE_FOR_nothing;
3005 unsigned int align;
3006
3007 /* If the length can be computed at compile-time, return it. */
3008 len = c_strlen (src, 0);
3009 if (len)
3010 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3011
3012 /* If the length can be computed at compile-time and is constant
3013 integer, but there are side-effects in src, evaluate
3014 src for side-effects, then return len.
3015 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3016 can be optimized into: i++; x = 3; */
3017 len = c_strlen (src, 1);
3018 if (len && TREE_CODE (len) == INTEGER_CST)
3019 {
3020 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3021 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3022 }
3023
3024 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3025
3026 /* If SRC is not a pointer type, don't do this operation inline. */
3027 if (align == 0)
3028 return NULL_RTX;
3029
3030 /* Bail out if we can't compute strlen in the right mode. */
3031 while (insn_mode != VOIDmode)
3032 {
3033 icode = optab_handler (strlen_optab, insn_mode);
3034 if (icode != CODE_FOR_nothing)
3035 break;
3036
3037 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3038 }
3039 if (insn_mode == VOIDmode)
3040 return NULL_RTX;
3041
3042 /* Make a place to hold the source address. We will not expand
3043 the actual source until we are sure that the expansion will
3044 not fail -- there are trees that cannot be expanded twice. */
3045 src_reg = gen_reg_rtx (Pmode);
3046
3047 /* Mark the beginning of the strlen sequence so we can emit the
3048 source operand later. */
3049 before_strlen = get_last_insn ();
3050
3051 create_output_operand (&ops[0], target, insn_mode);
3052 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3053 create_integer_operand (&ops[2], 0);
3054 create_integer_operand (&ops[3], align);
3055 if (!maybe_expand_insn (icode, 4, ops))
3056 return NULL_RTX;
3057
3058 /* Now that we are assured of success, expand the source. */
3059 start_sequence ();
3060 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3061 if (pat != src_reg)
3062 {
3063 #ifdef POINTERS_EXTEND_UNSIGNED
3064 if (GET_MODE (pat) != Pmode)
3065 pat = convert_to_mode (Pmode, pat,
3066 POINTERS_EXTEND_UNSIGNED);
3067 #endif
3068 emit_move_insn (src_reg, pat);
3069 }
3070 pat = get_insns ();
3071 end_sequence ();
3072
3073 if (before_strlen)
3074 emit_insn_after (pat, before_strlen);
3075 else
3076 emit_insn_before (pat, get_insns ());
3077
3078 /* Return the value in the proper mode for this function. */
3079 if (GET_MODE (ops[0].value) == target_mode)
3080 target = ops[0].value;
3081 else if (target != 0)
3082 convert_move (target, ops[0].value, 0);
3083 else
3084 target = convert_to_mode (target_mode, ops[0].value, 0);
3085
3086 return target;
3087 }
3088 }
3089
3090 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3091 bytes from constant string DATA + OFFSET and return it as target
3092 constant. */
3093
3094 static rtx
3095 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3096 machine_mode mode)
3097 {
3098 const char *str = (const char *) data;
3099
3100 gcc_assert (offset >= 0
3101 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3102 <= strlen (str) + 1));
3103
3104 return c_readstr (str + offset, mode);
3105 }
3106
3107 /* LEN specify length of the block of memcpy/memset operation.
3108 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3109 In some cases we can make very likely guess on max size, then we
3110 set it into PROBABLE_MAX_SIZE. */
3111
3112 static void
3113 determine_block_size (tree len, rtx len_rtx,
3114 unsigned HOST_WIDE_INT *min_size,
3115 unsigned HOST_WIDE_INT *max_size,
3116 unsigned HOST_WIDE_INT *probable_max_size)
3117 {
3118 if (CONST_INT_P (len_rtx))
3119 {
3120 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3121 return;
3122 }
3123 else
3124 {
3125 wide_int min, max;
3126 enum value_range_type range_type = VR_UNDEFINED;
3127
3128 /* Determine bounds from the type. */
3129 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3130 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3131 else
3132 *min_size = 0;
3133 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3134 *probable_max_size = *max_size
3135 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3136 else
3137 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3138
3139 if (TREE_CODE (len) == SSA_NAME)
3140 range_type = get_range_info (len, &min, &max);
3141 if (range_type == VR_RANGE)
3142 {
3143 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3144 *min_size = min.to_uhwi ();
3145 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3146 *probable_max_size = *max_size = max.to_uhwi ();
3147 }
3148 else if (range_type == VR_ANTI_RANGE)
3149 {
3150 /* Anti range 0...N lets us to determine minimal size to N+1. */
3151 if (min == 0)
3152 {
3153 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3154 *min_size = max.to_uhwi () + 1;
3155 }
3156 /* Code like
3157
3158 int n;
3159 if (n < 100)
3160 memcpy (a, b, n)
3161
3162 Produce anti range allowing negative values of N. We still
3163 can use the information and make a guess that N is not negative.
3164 */
3165 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3166 *probable_max_size = min.to_uhwi () - 1;
3167 }
3168 }
3169 gcc_checking_assert (*max_size <=
3170 (unsigned HOST_WIDE_INT)
3171 GET_MODE_MASK (GET_MODE (len_rtx)));
3172 }
3173
3174 /* Expand a call EXP to the memcpy builtin.
3175 Return NULL_RTX if we failed, the caller should emit a normal call,
3176 otherwise try to get the result in TARGET, if convenient (and in
3177 mode MODE if that's convenient). */
3178
3179 static rtx
3180 expand_builtin_memcpy (tree exp, rtx target)
3181 {
3182 if (!validate_arglist (exp,
3183 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3184 return NULL_RTX;
3185 else
3186 {
3187 tree dest = CALL_EXPR_ARG (exp, 0);
3188 tree src = CALL_EXPR_ARG (exp, 1);
3189 tree len = CALL_EXPR_ARG (exp, 2);
3190 const char *src_str;
3191 unsigned int src_align = get_pointer_alignment (src);
3192 unsigned int dest_align = get_pointer_alignment (dest);
3193 rtx dest_mem, src_mem, dest_addr, len_rtx;
3194 HOST_WIDE_INT expected_size = -1;
3195 unsigned int expected_align = 0;
3196 unsigned HOST_WIDE_INT min_size;
3197 unsigned HOST_WIDE_INT max_size;
3198 unsigned HOST_WIDE_INT probable_max_size;
3199
3200 /* If DEST is not a pointer type, call the normal function. */
3201 if (dest_align == 0)
3202 return NULL_RTX;
3203
3204 /* If either SRC is not a pointer type, don't do this
3205 operation in-line. */
3206 if (src_align == 0)
3207 return NULL_RTX;
3208
3209 if (currently_expanding_gimple_stmt)
3210 stringop_block_profile (currently_expanding_gimple_stmt,
3211 &expected_align, &expected_size);
3212
3213 if (expected_align < dest_align)
3214 expected_align = dest_align;
3215 dest_mem = get_memory_rtx (dest, len);
3216 set_mem_align (dest_mem, dest_align);
3217 len_rtx = expand_normal (len);
3218 determine_block_size (len, len_rtx, &min_size, &max_size,
3219 &probable_max_size);
3220 src_str = c_getstr (src);
3221
3222 /* If SRC is a string constant and block move would be done
3223 by pieces, we can avoid loading the string from memory
3224 and only stored the computed constants. */
3225 if (src_str
3226 && CONST_INT_P (len_rtx)
3227 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3228 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3229 CONST_CAST (char *, src_str),
3230 dest_align, false))
3231 {
3232 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3233 builtin_memcpy_read_str,
3234 CONST_CAST (char *, src_str),
3235 dest_align, false, 0);
3236 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3237 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3238 return dest_mem;
3239 }
3240
3241 src_mem = get_memory_rtx (src, len);
3242 set_mem_align (src_mem, src_align);
3243
3244 /* Copy word part most expediently. */
3245 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3246 CALL_EXPR_TAILCALL (exp)
3247 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3248 expected_align, expected_size,
3249 min_size, max_size, probable_max_size);
3250
3251 if (dest_addr == 0)
3252 {
3253 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3254 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3255 }
3256 return dest_addr;
3257 }
3258 }
3259
3260 /* Expand a call EXP to the mempcpy builtin.
3261 Return NULL_RTX if we failed; the caller should emit a normal call,
3262 otherwise try to get the result in TARGET, if convenient (and in
3263 mode MODE if that's convenient). If ENDP is 0 return the
3264 destination pointer, if ENDP is 1 return the end pointer ala
3265 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3266 stpcpy. */
3267
3268 static rtx
3269 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3270 {
3271 if (!validate_arglist (exp,
3272 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3273 return NULL_RTX;
3274 else
3275 {
3276 tree dest = CALL_EXPR_ARG (exp, 0);
3277 tree src = CALL_EXPR_ARG (exp, 1);
3278 tree len = CALL_EXPR_ARG (exp, 2);
3279 return expand_builtin_mempcpy_args (dest, src, len,
3280 target, mode, /*endp=*/ 1);
3281 }
3282 }
3283
3284 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3285 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3286 so that this can also be called without constructing an actual CALL_EXPR.
3287 The other arguments and return value are the same as for
3288 expand_builtin_mempcpy. */
3289
3290 static rtx
3291 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3292 rtx target, machine_mode mode, int endp)
3293 {
3294 /* If return value is ignored, transform mempcpy into memcpy. */
3295 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3296 {
3297 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3298 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3299 dest, src, len);
3300 return expand_expr (result, target, mode, EXPAND_NORMAL);
3301 }
3302 else
3303 {
3304 const char *src_str;
3305 unsigned int src_align = get_pointer_alignment (src);
3306 unsigned int dest_align = get_pointer_alignment (dest);
3307 rtx dest_mem, src_mem, len_rtx;
3308
3309 /* If either SRC or DEST is not a pointer type, don't do this
3310 operation in-line. */
3311 if (dest_align == 0 || src_align == 0)
3312 return NULL_RTX;
3313
3314 /* If LEN is not constant, call the normal function. */
3315 if (! tree_fits_uhwi_p (len))
3316 return NULL_RTX;
3317
3318 len_rtx = expand_normal (len);
3319 src_str = c_getstr (src);
3320
3321 /* If SRC is a string constant and block move would be done
3322 by pieces, we can avoid loading the string from memory
3323 and only stored the computed constants. */
3324 if (src_str
3325 && CONST_INT_P (len_rtx)
3326 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3327 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3328 CONST_CAST (char *, src_str),
3329 dest_align, false))
3330 {
3331 dest_mem = get_memory_rtx (dest, len);
3332 set_mem_align (dest_mem, dest_align);
3333 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3334 builtin_memcpy_read_str,
3335 CONST_CAST (char *, src_str),
3336 dest_align, false, endp);
3337 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3338 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3339 return dest_mem;
3340 }
3341
3342 if (CONST_INT_P (len_rtx)
3343 && can_move_by_pieces (INTVAL (len_rtx),
3344 MIN (dest_align, src_align)))
3345 {
3346 dest_mem = get_memory_rtx (dest, len);
3347 set_mem_align (dest_mem, dest_align);
3348 src_mem = get_memory_rtx (src, len);
3349 set_mem_align (src_mem, src_align);
3350 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3351 MIN (dest_align, src_align), endp);
3352 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3353 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3354 return dest_mem;
3355 }
3356
3357 return NULL_RTX;
3358 }
3359 }
3360
3361 #ifndef HAVE_movstr
3362 # define HAVE_movstr 0
3363 # define CODE_FOR_movstr CODE_FOR_nothing
3364 #endif
3365
3366 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3367 we failed, the caller should emit a normal call, otherwise try to
3368 get the result in TARGET, if convenient. If ENDP is 0 return the
3369 destination pointer, if ENDP is 1 return the end pointer ala
3370 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3371 stpcpy. */
3372
3373 static rtx
3374 expand_movstr (tree dest, tree src, rtx target, int endp)
3375 {
3376 struct expand_operand ops[3];
3377 rtx dest_mem;
3378 rtx src_mem;
3379
3380 if (!HAVE_movstr)
3381 return NULL_RTX;
3382
3383 dest_mem = get_memory_rtx (dest, NULL);
3384 src_mem = get_memory_rtx (src, NULL);
3385 if (!endp)
3386 {
3387 target = force_reg (Pmode, XEXP (dest_mem, 0));
3388 dest_mem = replace_equiv_address (dest_mem, target);
3389 }
3390
3391 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3392 create_fixed_operand (&ops[1], dest_mem);
3393 create_fixed_operand (&ops[2], src_mem);
3394 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3395 return NULL_RTX;
3396
3397 if (endp && target != const0_rtx)
3398 {
3399 target = ops[0].value;
3400 /* movstr is supposed to set end to the address of the NUL
3401 terminator. If the caller requested a mempcpy-like return value,
3402 adjust it. */
3403 if (endp == 1)
3404 {
3405 rtx tem = plus_constant (GET_MODE (target),
3406 gen_lowpart (GET_MODE (target), target), 1);
3407 emit_move_insn (target, force_operand (tem, NULL_RTX));
3408 }
3409 }
3410 return target;
3411 }
3412
3413 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3414 NULL_RTX if we failed the caller should emit a normal call, otherwise
3415 try to get the result in TARGET, if convenient (and in mode MODE if that's
3416 convenient). */
3417
3418 static rtx
3419 expand_builtin_strcpy (tree exp, rtx target)
3420 {
3421 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3422 {
3423 tree dest = CALL_EXPR_ARG (exp, 0);
3424 tree src = CALL_EXPR_ARG (exp, 1);
3425 return expand_builtin_strcpy_args (dest, src, target);
3426 }
3427 return NULL_RTX;
3428 }
3429
3430 /* Helper function to do the actual work for expand_builtin_strcpy. The
3431 arguments to the builtin_strcpy call DEST and SRC are broken out
3432 so that this can also be called without constructing an actual CALL_EXPR.
3433 The other arguments and return value are the same as for
3434 expand_builtin_strcpy. */
3435
3436 static rtx
3437 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3438 {
3439 return expand_movstr (dest, src, target, /*endp=*/0);
3440 }
3441
3442 /* Expand a call EXP to the stpcpy builtin.
3443 Return NULL_RTX if we failed the caller should emit a normal call,
3444 otherwise try to get the result in TARGET, if convenient (and in
3445 mode MODE if that's convenient). */
3446
3447 static rtx
3448 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3449 {
3450 tree dst, src;
3451 location_t loc = EXPR_LOCATION (exp);
3452
3453 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3454 return NULL_RTX;
3455
3456 dst = CALL_EXPR_ARG (exp, 0);
3457 src = CALL_EXPR_ARG (exp, 1);
3458
3459 /* If return value is ignored, transform stpcpy into strcpy. */
3460 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3461 {
3462 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3463 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3464 return expand_expr (result, target, mode, EXPAND_NORMAL);
3465 }
3466 else
3467 {
3468 tree len, lenp1;
3469 rtx ret;
3470
3471 /* Ensure we get an actual string whose length can be evaluated at
3472 compile-time, not an expression containing a string. This is
3473 because the latter will potentially produce pessimized code
3474 when used to produce the return value. */
3475 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3476 return expand_movstr (dst, src, target, /*endp=*/2);
3477
3478 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3479 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3480 target, mode, /*endp=*/2);
3481
3482 if (ret)
3483 return ret;
3484
3485 if (TREE_CODE (len) == INTEGER_CST)
3486 {
3487 rtx len_rtx = expand_normal (len);
3488
3489 if (CONST_INT_P (len_rtx))
3490 {
3491 ret = expand_builtin_strcpy_args (dst, src, target);
3492
3493 if (ret)
3494 {
3495 if (! target)
3496 {
3497 if (mode != VOIDmode)
3498 target = gen_reg_rtx (mode);
3499 else
3500 target = gen_reg_rtx (GET_MODE (ret));
3501 }
3502 if (GET_MODE (target) != GET_MODE (ret))
3503 ret = gen_lowpart (GET_MODE (target), ret);
3504
3505 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3506 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3507 gcc_assert (ret);
3508
3509 return target;
3510 }
3511 }
3512 }
3513
3514 return expand_movstr (dst, src, target, /*endp=*/2);
3515 }
3516 }
3517
3518 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3519 bytes from constant string DATA + OFFSET and return it as target
3520 constant. */
3521
3522 rtx
3523 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3524 machine_mode mode)
3525 {
3526 const char *str = (const char *) data;
3527
3528 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3529 return const0_rtx;
3530
3531 return c_readstr (str + offset, mode);
3532 }
3533
3534 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3535 NULL_RTX if we failed the caller should emit a normal call. */
3536
3537 static rtx
3538 expand_builtin_strncpy (tree exp, rtx target)
3539 {
3540 location_t loc = EXPR_LOCATION (exp);
3541
3542 if (validate_arglist (exp,
3543 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3544 {
3545 tree dest = CALL_EXPR_ARG (exp, 0);
3546 tree src = CALL_EXPR_ARG (exp, 1);
3547 tree len = CALL_EXPR_ARG (exp, 2);
3548 tree slen = c_strlen (src, 1);
3549
3550 /* We must be passed a constant len and src parameter. */
3551 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3552 return NULL_RTX;
3553
3554 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3555
3556 /* We're required to pad with trailing zeros if the requested
3557 len is greater than strlen(s2)+1. In that case try to
3558 use store_by_pieces, if it fails, punt. */
3559 if (tree_int_cst_lt (slen, len))
3560 {
3561 unsigned int dest_align = get_pointer_alignment (dest);
3562 const char *p = c_getstr (src);
3563 rtx dest_mem;
3564
3565 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3566 || !can_store_by_pieces (tree_to_uhwi (len),
3567 builtin_strncpy_read_str,
3568 CONST_CAST (char *, p),
3569 dest_align, false))
3570 return NULL_RTX;
3571
3572 dest_mem = get_memory_rtx (dest, len);
3573 store_by_pieces (dest_mem, tree_to_uhwi (len),
3574 builtin_strncpy_read_str,
3575 CONST_CAST (char *, p), dest_align, false, 0);
3576 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3577 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3578 return dest_mem;
3579 }
3580 }
3581 return NULL_RTX;
3582 }
3583
3584 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3585 bytes from constant string DATA + OFFSET and return it as target
3586 constant. */
3587
3588 rtx
3589 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3590 machine_mode mode)
3591 {
3592 const char *c = (const char *) data;
3593 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3594
3595 memset (p, *c, GET_MODE_SIZE (mode));
3596
3597 return c_readstr (p, mode);
3598 }
3599
3600 /* Callback routine for store_by_pieces. Return the RTL of a register
3601 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3602 char value given in the RTL register data. For example, if mode is
3603 4 bytes wide, return the RTL for 0x01010101*data. */
3604
3605 static rtx
3606 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3607 machine_mode mode)
3608 {
3609 rtx target, coeff;
3610 size_t size;
3611 char *p;
3612
3613 size = GET_MODE_SIZE (mode);
3614 if (size == 1)
3615 return (rtx) data;
3616
3617 p = XALLOCAVEC (char, size);
3618 memset (p, 1, size);
3619 coeff = c_readstr (p, mode);
3620
3621 target = convert_to_mode (mode, (rtx) data, 1);
3622 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3623 return force_reg (mode, target);
3624 }
3625
3626 /* Expand expression EXP, which is a call to the memset builtin. Return
3627 NULL_RTX if we failed the caller should emit a normal call, otherwise
3628 try to get the result in TARGET, if convenient (and in mode MODE if that's
3629 convenient). */
3630
3631 static rtx
3632 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3633 {
3634 if (!validate_arglist (exp,
3635 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3636 return NULL_RTX;
3637 else
3638 {
3639 tree dest = CALL_EXPR_ARG (exp, 0);
3640 tree val = CALL_EXPR_ARG (exp, 1);
3641 tree len = CALL_EXPR_ARG (exp, 2);
3642 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3643 }
3644 }
3645
3646 /* Helper function to do the actual work for expand_builtin_memset. The
3647 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3648 so that this can also be called without constructing an actual CALL_EXPR.
3649 The other arguments and return value are the same as for
3650 expand_builtin_memset. */
3651
3652 static rtx
3653 expand_builtin_memset_args (tree dest, tree val, tree len,
3654 rtx target, machine_mode mode, tree orig_exp)
3655 {
3656 tree fndecl, fn;
3657 enum built_in_function fcode;
3658 machine_mode val_mode;
3659 char c;
3660 unsigned int dest_align;
3661 rtx dest_mem, dest_addr, len_rtx;
3662 HOST_WIDE_INT expected_size = -1;
3663 unsigned int expected_align = 0;
3664 unsigned HOST_WIDE_INT min_size;
3665 unsigned HOST_WIDE_INT max_size;
3666 unsigned HOST_WIDE_INT probable_max_size;
3667
3668 dest_align = get_pointer_alignment (dest);
3669
3670 /* If DEST is not a pointer type, don't do this operation in-line. */
3671 if (dest_align == 0)
3672 return NULL_RTX;
3673
3674 if (currently_expanding_gimple_stmt)
3675 stringop_block_profile (currently_expanding_gimple_stmt,
3676 &expected_align, &expected_size);
3677
3678 if (expected_align < dest_align)
3679 expected_align = dest_align;
3680
3681 /* If the LEN parameter is zero, return DEST. */
3682 if (integer_zerop (len))
3683 {
3684 /* Evaluate and ignore VAL in case it has side-effects. */
3685 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3686 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3687 }
3688
3689 /* Stabilize the arguments in case we fail. */
3690 dest = builtin_save_expr (dest);
3691 val = builtin_save_expr (val);
3692 len = builtin_save_expr (len);
3693
3694 len_rtx = expand_normal (len);
3695 determine_block_size (len, len_rtx, &min_size, &max_size,
3696 &probable_max_size);
3697 dest_mem = get_memory_rtx (dest, len);
3698 val_mode = TYPE_MODE (unsigned_char_type_node);
3699
3700 if (TREE_CODE (val) != INTEGER_CST)
3701 {
3702 rtx val_rtx;
3703
3704 val_rtx = expand_normal (val);
3705 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3706
3707 /* Assume that we can memset by pieces if we can store
3708 * the coefficients by pieces (in the required modes).
3709 * We can't pass builtin_memset_gen_str as that emits RTL. */
3710 c = 1;
3711 if (tree_fits_uhwi_p (len)
3712 && can_store_by_pieces (tree_to_uhwi (len),
3713 builtin_memset_read_str, &c, dest_align,
3714 true))
3715 {
3716 val_rtx = force_reg (val_mode, val_rtx);
3717 store_by_pieces (dest_mem, tree_to_uhwi (len),
3718 builtin_memset_gen_str, val_rtx, dest_align,
3719 true, 0);
3720 }
3721 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3722 dest_align, expected_align,
3723 expected_size, min_size, max_size,
3724 probable_max_size))
3725 goto do_libcall;
3726
3727 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3728 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3729 return dest_mem;
3730 }
3731
3732 if (target_char_cast (val, &c))
3733 goto do_libcall;
3734
3735 if (c)
3736 {
3737 if (tree_fits_uhwi_p (len)
3738 && can_store_by_pieces (tree_to_uhwi (len),
3739 builtin_memset_read_str, &c, dest_align,
3740 true))
3741 store_by_pieces (dest_mem, tree_to_uhwi (len),
3742 builtin_memset_read_str, &c, dest_align, true, 0);
3743 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3744 gen_int_mode (c, val_mode),
3745 dest_align, expected_align,
3746 expected_size, min_size, max_size,
3747 probable_max_size))
3748 goto do_libcall;
3749
3750 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3751 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3752 return dest_mem;
3753 }
3754
3755 set_mem_align (dest_mem, dest_align);
3756 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3757 CALL_EXPR_TAILCALL (orig_exp)
3758 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3759 expected_align, expected_size,
3760 min_size, max_size,
3761 probable_max_size);
3762
3763 if (dest_addr == 0)
3764 {
3765 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3766 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3767 }
3768
3769 return dest_addr;
3770
3771 do_libcall:
3772 fndecl = get_callee_fndecl (orig_exp);
3773 fcode = DECL_FUNCTION_CODE (fndecl);
3774 if (fcode == BUILT_IN_MEMSET)
3775 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3776 dest, val, len);
3777 else if (fcode == BUILT_IN_BZERO)
3778 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3779 dest, len);
3780 else
3781 gcc_unreachable ();
3782 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3783 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3784 return expand_call (fn, target, target == const0_rtx);
3785 }
3786
3787 /* Expand expression EXP, which is a call to the bzero builtin. Return
3788 NULL_RTX if we failed the caller should emit a normal call. */
3789
3790 static rtx
3791 expand_builtin_bzero (tree exp)
3792 {
3793 tree dest, size;
3794 location_t loc = EXPR_LOCATION (exp);
3795
3796 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3797 return NULL_RTX;
3798
3799 dest = CALL_EXPR_ARG (exp, 0);
3800 size = CALL_EXPR_ARG (exp, 1);
3801
3802 /* New argument list transforming bzero(ptr x, int y) to
3803 memset(ptr x, int 0, size_t y). This is done this way
3804 so that if it isn't expanded inline, we fallback to
3805 calling bzero instead of memset. */
3806
3807 return expand_builtin_memset_args (dest, integer_zero_node,
3808 fold_convert_loc (loc,
3809 size_type_node, size),
3810 const0_rtx, VOIDmode, exp);
3811 }
3812
3813 /* Expand expression EXP, which is a call to the memcmp built-in function.
3814 Return NULL_RTX if we failed and the caller should emit a normal call,
3815 otherwise try to get the result in TARGET, if convenient (and in mode
3816 MODE, if that's convenient). */
3817
3818 static rtx
3819 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3820 ATTRIBUTE_UNUSED machine_mode mode)
3821 {
3822 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3823
3824 if (!validate_arglist (exp,
3825 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3826 return NULL_RTX;
3827
3828 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3829 implementing memcmp because it will stop if it encounters two
3830 zero bytes. */
3831 #if defined HAVE_cmpmemsi
3832 {
3833 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3834 rtx result;
3835 rtx insn;
3836 tree arg1 = CALL_EXPR_ARG (exp, 0);
3837 tree arg2 = CALL_EXPR_ARG (exp, 1);
3838 tree len = CALL_EXPR_ARG (exp, 2);
3839
3840 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3841 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3842 machine_mode insn_mode;
3843
3844 if (HAVE_cmpmemsi)
3845 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3846 else
3847 return NULL_RTX;
3848
3849 /* If we don't have POINTER_TYPE, call the function. */
3850 if (arg1_align == 0 || arg2_align == 0)
3851 return NULL_RTX;
3852
3853 /* Make a place to write the result of the instruction. */
3854 result = target;
3855 if (! (result != 0
3856 && REG_P (result) && GET_MODE (result) == insn_mode
3857 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3858 result = gen_reg_rtx (insn_mode);
3859
3860 arg1_rtx = get_memory_rtx (arg1, len);
3861 arg2_rtx = get_memory_rtx (arg2, len);
3862 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3863
3864 /* Set MEM_SIZE as appropriate. */
3865 if (CONST_INT_P (arg3_rtx))
3866 {
3867 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3868 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3869 }
3870
3871 if (HAVE_cmpmemsi)
3872 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3873 GEN_INT (MIN (arg1_align, arg2_align)));
3874 else
3875 gcc_unreachable ();
3876
3877 if (insn)
3878 emit_insn (insn);
3879 else
3880 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3881 TYPE_MODE (integer_type_node), 3,
3882 XEXP (arg1_rtx, 0), Pmode,
3883 XEXP (arg2_rtx, 0), Pmode,
3884 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3885 TYPE_UNSIGNED (sizetype)),
3886 TYPE_MODE (sizetype));
3887
3888 /* Return the value in the proper mode for this function. */
3889 mode = TYPE_MODE (TREE_TYPE (exp));
3890 if (GET_MODE (result) == mode)
3891 return result;
3892 else if (target != 0)
3893 {
3894 convert_move (target, result, 0);
3895 return target;
3896 }
3897 else
3898 return convert_to_mode (mode, result, 0);
3899 }
3900 #endif /* HAVE_cmpmemsi. */
3901
3902 return NULL_RTX;
3903 }
3904
3905 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3906 if we failed the caller should emit a normal call, otherwise try to get
3907 the result in TARGET, if convenient. */
3908
3909 static rtx
3910 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3911 {
3912 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3913 return NULL_RTX;
3914
3915 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3916 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3917 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3918 {
3919 rtx arg1_rtx, arg2_rtx;
3920 rtx result, insn = NULL_RTX;
3921 tree fndecl, fn;
3922 tree arg1 = CALL_EXPR_ARG (exp, 0);
3923 tree arg2 = CALL_EXPR_ARG (exp, 1);
3924
3925 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3926 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3927
3928 /* If we don't have POINTER_TYPE, call the function. */
3929 if (arg1_align == 0 || arg2_align == 0)
3930 return NULL_RTX;
3931
3932 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3933 arg1 = builtin_save_expr (arg1);
3934 arg2 = builtin_save_expr (arg2);
3935
3936 arg1_rtx = get_memory_rtx (arg1, NULL);
3937 arg2_rtx = get_memory_rtx (arg2, NULL);
3938
3939 #ifdef HAVE_cmpstrsi
3940 /* Try to call cmpstrsi. */
3941 if (HAVE_cmpstrsi)
3942 {
3943 machine_mode insn_mode
3944 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3945
3946 /* Make a place to write the result of the instruction. */
3947 result = target;
3948 if (! (result != 0
3949 && REG_P (result) && GET_MODE (result) == insn_mode
3950 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3951 result = gen_reg_rtx (insn_mode);
3952
3953 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3954 GEN_INT (MIN (arg1_align, arg2_align)));
3955 }
3956 #endif
3957 #ifdef HAVE_cmpstrnsi
3958 /* Try to determine at least one length and call cmpstrnsi. */
3959 if (!insn && HAVE_cmpstrnsi)
3960 {
3961 tree len;
3962 rtx arg3_rtx;
3963
3964 machine_mode insn_mode
3965 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3966 tree len1 = c_strlen (arg1, 1);
3967 tree len2 = c_strlen (arg2, 1);
3968
3969 if (len1)
3970 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3971 if (len2)
3972 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3973
3974 /* If we don't have a constant length for the first, use the length
3975 of the second, if we know it. We don't require a constant for
3976 this case; some cost analysis could be done if both are available
3977 but neither is constant. For now, assume they're equally cheap,
3978 unless one has side effects. If both strings have constant lengths,
3979 use the smaller. */
3980
3981 if (!len1)
3982 len = len2;
3983 else if (!len2)
3984 len = len1;
3985 else if (TREE_SIDE_EFFECTS (len1))
3986 len = len2;
3987 else if (TREE_SIDE_EFFECTS (len2))
3988 len = len1;
3989 else if (TREE_CODE (len1) != INTEGER_CST)
3990 len = len2;
3991 else if (TREE_CODE (len2) != INTEGER_CST)
3992 len = len1;
3993 else if (tree_int_cst_lt (len1, len2))
3994 len = len1;
3995 else
3996 len = len2;
3997
3998 /* If both arguments have side effects, we cannot optimize. */
3999 if (!len || TREE_SIDE_EFFECTS (len))
4000 goto do_libcall;
4001
4002 arg3_rtx = expand_normal (len);
4003
4004 /* Make a place to write the result of the instruction. */
4005 result = target;
4006 if (! (result != 0
4007 && REG_P (result) && GET_MODE (result) == insn_mode
4008 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4009 result = gen_reg_rtx (insn_mode);
4010
4011 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4012 GEN_INT (MIN (arg1_align, arg2_align)));
4013 }
4014 #endif
4015
4016 if (insn)
4017 {
4018 machine_mode mode;
4019 emit_insn (insn);
4020
4021 /* Return the value in the proper mode for this function. */
4022 mode = TYPE_MODE (TREE_TYPE (exp));
4023 if (GET_MODE (result) == mode)
4024 return result;
4025 if (target == 0)
4026 return convert_to_mode (mode, result, 0);
4027 convert_move (target, result, 0);
4028 return target;
4029 }
4030
4031 /* Expand the library call ourselves using a stabilized argument
4032 list to avoid re-evaluating the function's arguments twice. */
4033 #ifdef HAVE_cmpstrnsi
4034 do_libcall:
4035 #endif
4036 fndecl = get_callee_fndecl (exp);
4037 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4038 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4039 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4040 return expand_call (fn, target, target == const0_rtx);
4041 }
4042 #endif
4043 return NULL_RTX;
4044 }
4045
4046 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4047 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4048 the result in TARGET, if convenient. */
4049
4050 static rtx
4051 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4052 ATTRIBUTE_UNUSED machine_mode mode)
4053 {
4054 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4055
4056 if (!validate_arglist (exp,
4057 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4058 return NULL_RTX;
4059
4060 /* If c_strlen can determine an expression for one of the string
4061 lengths, and it doesn't have side effects, then emit cmpstrnsi
4062 using length MIN(strlen(string)+1, arg3). */
4063 #ifdef HAVE_cmpstrnsi
4064 if (HAVE_cmpstrnsi)
4065 {
4066 tree len, len1, len2;
4067 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4068 rtx result, insn;
4069 tree fndecl, fn;
4070 tree arg1 = CALL_EXPR_ARG (exp, 0);
4071 tree arg2 = CALL_EXPR_ARG (exp, 1);
4072 tree arg3 = CALL_EXPR_ARG (exp, 2);
4073
4074 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4075 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4076 machine_mode insn_mode
4077 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4078
4079 len1 = c_strlen (arg1, 1);
4080 len2 = c_strlen (arg2, 1);
4081
4082 if (len1)
4083 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4084 if (len2)
4085 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4086
4087 /* If we don't have a constant length for the first, use the length
4088 of the second, if we know it. We don't require a constant for
4089 this case; some cost analysis could be done if both are available
4090 but neither is constant. For now, assume they're equally cheap,
4091 unless one has side effects. If both strings have constant lengths,
4092 use the smaller. */
4093
4094 if (!len1)
4095 len = len2;
4096 else if (!len2)
4097 len = len1;
4098 else if (TREE_SIDE_EFFECTS (len1))
4099 len = len2;
4100 else if (TREE_SIDE_EFFECTS (len2))
4101 len = len1;
4102 else if (TREE_CODE (len1) != INTEGER_CST)
4103 len = len2;
4104 else if (TREE_CODE (len2) != INTEGER_CST)
4105 len = len1;
4106 else if (tree_int_cst_lt (len1, len2))
4107 len = len1;
4108 else
4109 len = len2;
4110
4111 /* If both arguments have side effects, we cannot optimize. */
4112 if (!len || TREE_SIDE_EFFECTS (len))
4113 return NULL_RTX;
4114
4115 /* The actual new length parameter is MIN(len,arg3). */
4116 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4117 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4118
4119 /* If we don't have POINTER_TYPE, call the function. */
4120 if (arg1_align == 0 || arg2_align == 0)
4121 return NULL_RTX;
4122
4123 /* Make a place to write the result of the instruction. */
4124 result = target;
4125 if (! (result != 0
4126 && REG_P (result) && GET_MODE (result) == insn_mode
4127 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4128 result = gen_reg_rtx (insn_mode);
4129
4130 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4131 arg1 = builtin_save_expr (arg1);
4132 arg2 = builtin_save_expr (arg2);
4133 len = builtin_save_expr (len);
4134
4135 arg1_rtx = get_memory_rtx (arg1, len);
4136 arg2_rtx = get_memory_rtx (arg2, len);
4137 arg3_rtx = expand_normal (len);
4138 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4139 GEN_INT (MIN (arg1_align, arg2_align)));
4140 if (insn)
4141 {
4142 emit_insn (insn);
4143
4144 /* Return the value in the proper mode for this function. */
4145 mode = TYPE_MODE (TREE_TYPE (exp));
4146 if (GET_MODE (result) == mode)
4147 return result;
4148 if (target == 0)
4149 return convert_to_mode (mode, result, 0);
4150 convert_move (target, result, 0);
4151 return target;
4152 }
4153
4154 /* Expand the library call ourselves using a stabilized argument
4155 list to avoid re-evaluating the function's arguments twice. */
4156 fndecl = get_callee_fndecl (exp);
4157 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4158 arg1, arg2, len);
4159 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4160 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4161 return expand_call (fn, target, target == const0_rtx);
4162 }
4163 #endif
4164 return NULL_RTX;
4165 }
4166
4167 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4168 if that's convenient. */
4169
4170 rtx
4171 expand_builtin_saveregs (void)
4172 {
4173 rtx val;
4174 rtx_insn *seq;
4175
4176 /* Don't do __builtin_saveregs more than once in a function.
4177 Save the result of the first call and reuse it. */
4178 if (saveregs_value != 0)
4179 return saveregs_value;
4180
4181 /* When this function is called, it means that registers must be
4182 saved on entry to this function. So we migrate the call to the
4183 first insn of this function. */
4184
4185 start_sequence ();
4186
4187 /* Do whatever the machine needs done in this case. */
4188 val = targetm.calls.expand_builtin_saveregs ();
4189
4190 seq = get_insns ();
4191 end_sequence ();
4192
4193 saveregs_value = val;
4194
4195 /* Put the insns after the NOTE that starts the function. If this
4196 is inside a start_sequence, make the outer-level insn chain current, so
4197 the code is placed at the start of the function. */
4198 push_topmost_sequence ();
4199 emit_insn_after (seq, entry_of_function ());
4200 pop_topmost_sequence ();
4201
4202 return val;
4203 }
4204
4205 /* Expand a call to __builtin_next_arg. */
4206
4207 static rtx
4208 expand_builtin_next_arg (void)
4209 {
4210 /* Checking arguments is already done in fold_builtin_next_arg
4211 that must be called before this function. */
4212 return expand_binop (ptr_mode, add_optab,
4213 crtl->args.internal_arg_pointer,
4214 crtl->args.arg_offset_rtx,
4215 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4216 }
4217
4218 /* Make it easier for the backends by protecting the valist argument
4219 from multiple evaluations. */
4220
4221 static tree
4222 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4223 {
4224 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4225
4226 /* The current way of determining the type of valist is completely
4227 bogus. We should have the information on the va builtin instead. */
4228 if (!vatype)
4229 vatype = targetm.fn_abi_va_list (cfun->decl);
4230
4231 if (TREE_CODE (vatype) == ARRAY_TYPE)
4232 {
4233 if (TREE_SIDE_EFFECTS (valist))
4234 valist = save_expr (valist);
4235
4236 /* For this case, the backends will be expecting a pointer to
4237 vatype, but it's possible we've actually been given an array
4238 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4239 So fix it. */
4240 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4241 {
4242 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4243 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4244 }
4245 }
4246 else
4247 {
4248 tree pt = build_pointer_type (vatype);
4249
4250 if (! needs_lvalue)
4251 {
4252 if (! TREE_SIDE_EFFECTS (valist))
4253 return valist;
4254
4255 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4256 TREE_SIDE_EFFECTS (valist) = 1;
4257 }
4258
4259 if (TREE_SIDE_EFFECTS (valist))
4260 valist = save_expr (valist);
4261 valist = fold_build2_loc (loc, MEM_REF,
4262 vatype, valist, build_int_cst (pt, 0));
4263 }
4264
4265 return valist;
4266 }
4267
4268 /* The "standard" definition of va_list is void*. */
4269
4270 tree
4271 std_build_builtin_va_list (void)
4272 {
4273 return ptr_type_node;
4274 }
4275
4276 /* The "standard" abi va_list is va_list_type_node. */
4277
4278 tree
4279 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4280 {
4281 return va_list_type_node;
4282 }
4283
4284 /* The "standard" type of va_list is va_list_type_node. */
4285
4286 tree
4287 std_canonical_va_list_type (tree type)
4288 {
4289 tree wtype, htype;
4290
4291 if (INDIRECT_REF_P (type))
4292 type = TREE_TYPE (type);
4293 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4294 type = TREE_TYPE (type);
4295 wtype = va_list_type_node;
4296 htype = type;
4297 /* Treat structure va_list types. */
4298 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4299 htype = TREE_TYPE (htype);
4300 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4301 {
4302 /* If va_list is an array type, the argument may have decayed
4303 to a pointer type, e.g. by being passed to another function.
4304 In that case, unwrap both types so that we can compare the
4305 underlying records. */
4306 if (TREE_CODE (htype) == ARRAY_TYPE
4307 || POINTER_TYPE_P (htype))
4308 {
4309 wtype = TREE_TYPE (wtype);
4310 htype = TREE_TYPE (htype);
4311 }
4312 }
4313 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4314 return va_list_type_node;
4315
4316 return NULL_TREE;
4317 }
4318
4319 /* The "standard" implementation of va_start: just assign `nextarg' to
4320 the variable. */
4321
4322 void
4323 std_expand_builtin_va_start (tree valist, rtx nextarg)
4324 {
4325 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4326 convert_move (va_r, nextarg, 0);
4327 }
4328
4329 /* Expand EXP, a call to __builtin_va_start. */
4330
4331 static rtx
4332 expand_builtin_va_start (tree exp)
4333 {
4334 rtx nextarg;
4335 tree valist;
4336 location_t loc = EXPR_LOCATION (exp);
4337
4338 if (call_expr_nargs (exp) < 2)
4339 {
4340 error_at (loc, "too few arguments to function %<va_start%>");
4341 return const0_rtx;
4342 }
4343
4344 if (fold_builtin_next_arg (exp, true))
4345 return const0_rtx;
4346
4347 nextarg = expand_builtin_next_arg ();
4348 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4349
4350 if (targetm.expand_builtin_va_start)
4351 targetm.expand_builtin_va_start (valist, nextarg);
4352 else
4353 std_expand_builtin_va_start (valist, nextarg);
4354
4355 return const0_rtx;
4356 }
4357
4358 /* Expand EXP, a call to __builtin_va_end. */
4359
4360 static rtx
4361 expand_builtin_va_end (tree exp)
4362 {
4363 tree valist = CALL_EXPR_ARG (exp, 0);
4364
4365 /* Evaluate for side effects, if needed. I hate macros that don't
4366 do that. */
4367 if (TREE_SIDE_EFFECTS (valist))
4368 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4369
4370 return const0_rtx;
4371 }
4372
4373 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4374 builtin rather than just as an assignment in stdarg.h because of the
4375 nastiness of array-type va_list types. */
4376
4377 static rtx
4378 expand_builtin_va_copy (tree exp)
4379 {
4380 tree dst, src, t;
4381 location_t loc = EXPR_LOCATION (exp);
4382
4383 dst = CALL_EXPR_ARG (exp, 0);
4384 src = CALL_EXPR_ARG (exp, 1);
4385
4386 dst = stabilize_va_list_loc (loc, dst, 1);
4387 src = stabilize_va_list_loc (loc, src, 0);
4388
4389 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4390
4391 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4392 {
4393 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4394 TREE_SIDE_EFFECTS (t) = 1;
4395 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4396 }
4397 else
4398 {
4399 rtx dstb, srcb, size;
4400
4401 /* Evaluate to pointers. */
4402 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4403 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4404 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4405 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4406
4407 dstb = convert_memory_address (Pmode, dstb);
4408 srcb = convert_memory_address (Pmode, srcb);
4409
4410 /* "Dereference" to BLKmode memories. */
4411 dstb = gen_rtx_MEM (BLKmode, dstb);
4412 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4413 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4414 srcb = gen_rtx_MEM (BLKmode, srcb);
4415 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4416 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4417
4418 /* Copy. */
4419 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4420 }
4421
4422 return const0_rtx;
4423 }
4424
4425 /* Expand a call to one of the builtin functions __builtin_frame_address or
4426 __builtin_return_address. */
4427
4428 static rtx
4429 expand_builtin_frame_address (tree fndecl, tree exp)
4430 {
4431 /* The argument must be a nonnegative integer constant.
4432 It counts the number of frames to scan up the stack.
4433 The value is the return address saved in that frame. */
4434 if (call_expr_nargs (exp) == 0)
4435 /* Warning about missing arg was already issued. */
4436 return const0_rtx;
4437 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4438 {
4439 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4440 error ("invalid argument to %<__builtin_frame_address%>");
4441 else
4442 error ("invalid argument to %<__builtin_return_address%>");
4443 return const0_rtx;
4444 }
4445 else
4446 {
4447 rtx tem
4448 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4449 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4450
4451 /* Some ports cannot access arbitrary stack frames. */
4452 if (tem == NULL)
4453 {
4454 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4455 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4456 else
4457 warning (0, "unsupported argument to %<__builtin_return_address%>");
4458 return const0_rtx;
4459 }
4460
4461 /* For __builtin_frame_address, return what we've got. */
4462 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4463 return tem;
4464
4465 if (!REG_P (tem)
4466 && ! CONSTANT_P (tem))
4467 tem = copy_addr_to_reg (tem);
4468 return tem;
4469 }
4470 }
4471
4472 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4473 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4474 is the same as for allocate_dynamic_stack_space. */
4475
4476 static rtx
4477 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4478 {
4479 rtx op0;
4480 rtx result;
4481 bool valid_arglist;
4482 unsigned int align;
4483 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4484 == BUILT_IN_ALLOCA_WITH_ALIGN);
4485
4486 valid_arglist
4487 = (alloca_with_align
4488 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4489 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4490
4491 if (!valid_arglist)
4492 return NULL_RTX;
4493
4494 /* Compute the argument. */
4495 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4496
4497 /* Compute the alignment. */
4498 align = (alloca_with_align
4499 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4500 : BIGGEST_ALIGNMENT);
4501
4502 /* Allocate the desired space. */
4503 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4504 result = convert_memory_address (ptr_mode, result);
4505
4506 return result;
4507 }
4508
4509 /* Expand a call to bswap builtin in EXP.
4510 Return NULL_RTX if a normal call should be emitted rather than expanding the
4511 function in-line. If convenient, the result should be placed in TARGET.
4512 SUBTARGET may be used as the target for computing one of EXP's operands. */
4513
4514 static rtx
4515 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4516 rtx subtarget)
4517 {
4518 tree arg;
4519 rtx op0;
4520
4521 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4522 return NULL_RTX;
4523
4524 arg = CALL_EXPR_ARG (exp, 0);
4525 op0 = expand_expr (arg,
4526 subtarget && GET_MODE (subtarget) == target_mode
4527 ? subtarget : NULL_RTX,
4528 target_mode, EXPAND_NORMAL);
4529 if (GET_MODE (op0) != target_mode)
4530 op0 = convert_to_mode (target_mode, op0, 1);
4531
4532 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4533
4534 gcc_assert (target);
4535
4536 return convert_to_mode (target_mode, target, 1);
4537 }
4538
4539 /* Expand a call to a unary builtin in EXP.
4540 Return NULL_RTX if a normal call should be emitted rather than expanding the
4541 function in-line. If convenient, the result should be placed in TARGET.
4542 SUBTARGET may be used as the target for computing one of EXP's operands. */
4543
4544 static rtx
4545 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4546 rtx subtarget, optab op_optab)
4547 {
4548 rtx op0;
4549
4550 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4551 return NULL_RTX;
4552
4553 /* Compute the argument. */
4554 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4555 (subtarget
4556 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4557 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4558 VOIDmode, EXPAND_NORMAL);
4559 /* Compute op, into TARGET if possible.
4560 Set TARGET to wherever the result comes back. */
4561 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4562 op_optab, op0, target, op_optab != clrsb_optab);
4563 gcc_assert (target);
4564
4565 return convert_to_mode (target_mode, target, 0);
4566 }
4567
4568 /* Expand a call to __builtin_expect. We just return our argument
4569 as the builtin_expect semantic should've been already executed by
4570 tree branch prediction pass. */
4571
4572 static rtx
4573 expand_builtin_expect (tree exp, rtx target)
4574 {
4575 tree arg;
4576
4577 if (call_expr_nargs (exp) < 2)
4578 return const0_rtx;
4579 arg = CALL_EXPR_ARG (exp, 0);
4580
4581 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4582 /* When guessing was done, the hints should be already stripped away. */
4583 gcc_assert (!flag_guess_branch_prob
4584 || optimize == 0 || seen_error ());
4585 return target;
4586 }
4587
4588 /* Expand a call to __builtin_assume_aligned. We just return our first
4589 argument as the builtin_assume_aligned semantic should've been already
4590 executed by CCP. */
4591
4592 static rtx
4593 expand_builtin_assume_aligned (tree exp, rtx target)
4594 {
4595 if (call_expr_nargs (exp) < 2)
4596 return const0_rtx;
4597 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4598 EXPAND_NORMAL);
4599 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4600 && (call_expr_nargs (exp) < 3
4601 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4602 return target;
4603 }
4604
4605 void
4606 expand_builtin_trap (void)
4607 {
4608 #ifdef HAVE_trap
4609 if (HAVE_trap)
4610 {
4611 rtx insn = emit_insn (gen_trap ());
4612 /* For trap insns when not accumulating outgoing args force
4613 REG_ARGS_SIZE note to prevent crossjumping of calls with
4614 different args sizes. */
4615 if (!ACCUMULATE_OUTGOING_ARGS)
4616 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4617 }
4618 else
4619 #endif
4620 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4621 emit_barrier ();
4622 }
4623
4624 /* Expand a call to __builtin_unreachable. We do nothing except emit
4625 a barrier saying that control flow will not pass here.
4626
4627 It is the responsibility of the program being compiled to ensure
4628 that control flow does never reach __builtin_unreachable. */
4629 static void
4630 expand_builtin_unreachable (void)
4631 {
4632 emit_barrier ();
4633 }
4634
4635 /* Expand EXP, a call to fabs, fabsf or fabsl.
4636 Return NULL_RTX if a normal call should be emitted rather than expanding
4637 the function inline. If convenient, the result should be placed
4638 in TARGET. SUBTARGET may be used as the target for computing
4639 the operand. */
4640
4641 static rtx
4642 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4643 {
4644 machine_mode mode;
4645 tree arg;
4646 rtx op0;
4647
4648 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4649 return NULL_RTX;
4650
4651 arg = CALL_EXPR_ARG (exp, 0);
4652 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4653 mode = TYPE_MODE (TREE_TYPE (arg));
4654 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4655 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4656 }
4657
4658 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4659 Return NULL is a normal call should be emitted rather than expanding the
4660 function inline. If convenient, the result should be placed in TARGET.
4661 SUBTARGET may be used as the target for computing the operand. */
4662
4663 static rtx
4664 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4665 {
4666 rtx op0, op1;
4667 tree arg;
4668
4669 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4670 return NULL_RTX;
4671
4672 arg = CALL_EXPR_ARG (exp, 0);
4673 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4674
4675 arg = CALL_EXPR_ARG (exp, 1);
4676 op1 = expand_normal (arg);
4677
4678 return expand_copysign (op0, op1, target);
4679 }
4680
4681 /* Expand a call to __builtin___clear_cache. */
4682
4683 static rtx
4684 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4685 {
4686 #ifndef HAVE_clear_cache
4687 #ifdef CLEAR_INSN_CACHE
4688 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4689 does something. Just do the default expansion to a call to
4690 __clear_cache(). */
4691 return NULL_RTX;
4692 #else
4693 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4694 does nothing. There is no need to call it. Do nothing. */
4695 return const0_rtx;
4696 #endif /* CLEAR_INSN_CACHE */
4697 #else
4698 /* We have a "clear_cache" insn, and it will handle everything. */
4699 tree begin, end;
4700 rtx begin_rtx, end_rtx;
4701
4702 /* We must not expand to a library call. If we did, any
4703 fallback library function in libgcc that might contain a call to
4704 __builtin___clear_cache() would recurse infinitely. */
4705 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4706 {
4707 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4708 return const0_rtx;
4709 }
4710
4711 if (HAVE_clear_cache)
4712 {
4713 struct expand_operand ops[2];
4714
4715 begin = CALL_EXPR_ARG (exp, 0);
4716 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4717
4718 end = CALL_EXPR_ARG (exp, 1);
4719 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4720
4721 create_address_operand (&ops[0], begin_rtx);
4722 create_address_operand (&ops[1], end_rtx);
4723 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4724 return const0_rtx;
4725 }
4726 return const0_rtx;
4727 #endif /* HAVE_clear_cache */
4728 }
4729
4730 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4731
4732 static rtx
4733 round_trampoline_addr (rtx tramp)
4734 {
4735 rtx temp, addend, mask;
4736
4737 /* If we don't need too much alignment, we'll have been guaranteed
4738 proper alignment by get_trampoline_type. */
4739 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4740 return tramp;
4741
4742 /* Round address up to desired boundary. */
4743 temp = gen_reg_rtx (Pmode);
4744 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4745 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4746
4747 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4748 temp, 0, OPTAB_LIB_WIDEN);
4749 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4750 temp, 0, OPTAB_LIB_WIDEN);
4751
4752 return tramp;
4753 }
4754
4755 static rtx
4756 expand_builtin_init_trampoline (tree exp, bool onstack)
4757 {
4758 tree t_tramp, t_func, t_chain;
4759 rtx m_tramp, r_tramp, r_chain, tmp;
4760
4761 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4762 POINTER_TYPE, VOID_TYPE))
4763 return NULL_RTX;
4764
4765 t_tramp = CALL_EXPR_ARG (exp, 0);
4766 t_func = CALL_EXPR_ARG (exp, 1);
4767 t_chain = CALL_EXPR_ARG (exp, 2);
4768
4769 r_tramp = expand_normal (t_tramp);
4770 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4771 MEM_NOTRAP_P (m_tramp) = 1;
4772
4773 /* If ONSTACK, the TRAMP argument should be the address of a field
4774 within the local function's FRAME decl. Either way, let's see if
4775 we can fill in the MEM_ATTRs for this memory. */
4776 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4777 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4778
4779 /* Creator of a heap trampoline is responsible for making sure the
4780 address is aligned to at least STACK_BOUNDARY. Normally malloc
4781 will ensure this anyhow. */
4782 tmp = round_trampoline_addr (r_tramp);
4783 if (tmp != r_tramp)
4784 {
4785 m_tramp = change_address (m_tramp, BLKmode, tmp);
4786 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4787 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4788 }
4789
4790 /* The FUNC argument should be the address of the nested function.
4791 Extract the actual function decl to pass to the hook. */
4792 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4793 t_func = TREE_OPERAND (t_func, 0);
4794 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4795
4796 r_chain = expand_normal (t_chain);
4797
4798 /* Generate insns to initialize the trampoline. */
4799 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4800
4801 if (onstack)
4802 {
4803 trampolines_created = 1;
4804
4805 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4806 "trampoline generated for nested function %qD", t_func);
4807 }
4808
4809 return const0_rtx;
4810 }
4811
4812 static rtx
4813 expand_builtin_adjust_trampoline (tree exp)
4814 {
4815 rtx tramp;
4816
4817 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4818 return NULL_RTX;
4819
4820 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4821 tramp = round_trampoline_addr (tramp);
4822 if (targetm.calls.trampoline_adjust_address)
4823 tramp = targetm.calls.trampoline_adjust_address (tramp);
4824
4825 return tramp;
4826 }
4827
4828 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4829 function. The function first checks whether the back end provides
4830 an insn to implement signbit for the respective mode. If not, it
4831 checks whether the floating point format of the value is such that
4832 the sign bit can be extracted. If that is not the case, the
4833 function returns NULL_RTX to indicate that a normal call should be
4834 emitted rather than expanding the function in-line. EXP is the
4835 expression that is a call to the builtin function; if convenient,
4836 the result should be placed in TARGET. */
4837 static rtx
4838 expand_builtin_signbit (tree exp, rtx target)
4839 {
4840 const struct real_format *fmt;
4841 machine_mode fmode, imode, rmode;
4842 tree arg;
4843 int word, bitpos;
4844 enum insn_code icode;
4845 rtx temp;
4846 location_t loc = EXPR_LOCATION (exp);
4847
4848 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4849 return NULL_RTX;
4850
4851 arg = CALL_EXPR_ARG (exp, 0);
4852 fmode = TYPE_MODE (TREE_TYPE (arg));
4853 rmode = TYPE_MODE (TREE_TYPE (exp));
4854 fmt = REAL_MODE_FORMAT (fmode);
4855
4856 arg = builtin_save_expr (arg);
4857
4858 /* Expand the argument yielding a RTX expression. */
4859 temp = expand_normal (arg);
4860
4861 /* Check if the back end provides an insn that handles signbit for the
4862 argument's mode. */
4863 icode = optab_handler (signbit_optab, fmode);
4864 if (icode != CODE_FOR_nothing)
4865 {
4866 rtx_insn *last = get_last_insn ();
4867 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4868 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4869 return target;
4870 delete_insns_since (last);
4871 }
4872
4873 /* For floating point formats without a sign bit, implement signbit
4874 as "ARG < 0.0". */
4875 bitpos = fmt->signbit_ro;
4876 if (bitpos < 0)
4877 {
4878 /* But we can't do this if the format supports signed zero. */
4879 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4880 return NULL_RTX;
4881
4882 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4883 build_real (TREE_TYPE (arg), dconst0));
4884 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4885 }
4886
4887 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4888 {
4889 imode = int_mode_for_mode (fmode);
4890 if (imode == BLKmode)
4891 return NULL_RTX;
4892 temp = gen_lowpart (imode, temp);
4893 }
4894 else
4895 {
4896 imode = word_mode;
4897 /* Handle targets with different FP word orders. */
4898 if (FLOAT_WORDS_BIG_ENDIAN)
4899 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4900 else
4901 word = bitpos / BITS_PER_WORD;
4902 temp = operand_subword_force (temp, word, fmode);
4903 bitpos = bitpos % BITS_PER_WORD;
4904 }
4905
4906 /* Force the intermediate word_mode (or narrower) result into a
4907 register. This avoids attempting to create paradoxical SUBREGs
4908 of floating point modes below. */
4909 temp = force_reg (imode, temp);
4910
4911 /* If the bitpos is within the "result mode" lowpart, the operation
4912 can be implement with a single bitwise AND. Otherwise, we need
4913 a right shift and an AND. */
4914
4915 if (bitpos < GET_MODE_BITSIZE (rmode))
4916 {
4917 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4918
4919 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4920 temp = gen_lowpart (rmode, temp);
4921 temp = expand_binop (rmode, and_optab, temp,
4922 immed_wide_int_const (mask, rmode),
4923 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4924 }
4925 else
4926 {
4927 /* Perform a logical right shift to place the signbit in the least
4928 significant bit, then truncate the result to the desired mode
4929 and mask just this bit. */
4930 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4931 temp = gen_lowpart (rmode, temp);
4932 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4933 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4934 }
4935
4936 return temp;
4937 }
4938
4939 /* Expand fork or exec calls. TARGET is the desired target of the
4940 call. EXP is the call. FN is the
4941 identificator of the actual function. IGNORE is nonzero if the
4942 value is to be ignored. */
4943
4944 static rtx
4945 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4946 {
4947 tree id, decl;
4948 tree call;
4949
4950 /* If we are not profiling, just call the function. */
4951 if (!profile_arc_flag)
4952 return NULL_RTX;
4953
4954 /* Otherwise call the wrapper. This should be equivalent for the rest of
4955 compiler, so the code does not diverge, and the wrapper may run the
4956 code necessary for keeping the profiling sane. */
4957
4958 switch (DECL_FUNCTION_CODE (fn))
4959 {
4960 case BUILT_IN_FORK:
4961 id = get_identifier ("__gcov_fork");
4962 break;
4963
4964 case BUILT_IN_EXECL:
4965 id = get_identifier ("__gcov_execl");
4966 break;
4967
4968 case BUILT_IN_EXECV:
4969 id = get_identifier ("__gcov_execv");
4970 break;
4971
4972 case BUILT_IN_EXECLP:
4973 id = get_identifier ("__gcov_execlp");
4974 break;
4975
4976 case BUILT_IN_EXECLE:
4977 id = get_identifier ("__gcov_execle");
4978 break;
4979
4980 case BUILT_IN_EXECVP:
4981 id = get_identifier ("__gcov_execvp");
4982 break;
4983
4984 case BUILT_IN_EXECVE:
4985 id = get_identifier ("__gcov_execve");
4986 break;
4987
4988 default:
4989 gcc_unreachable ();
4990 }
4991
4992 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4993 FUNCTION_DECL, id, TREE_TYPE (fn));
4994 DECL_EXTERNAL (decl) = 1;
4995 TREE_PUBLIC (decl) = 1;
4996 DECL_ARTIFICIAL (decl) = 1;
4997 TREE_NOTHROW (decl) = 1;
4998 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4999 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5000 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5001 return expand_call (call, target, ignore);
5002 }
5003
5004
5005 \f
5006 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5007 the pointer in these functions is void*, the tree optimizers may remove
5008 casts. The mode computed in expand_builtin isn't reliable either, due
5009 to __sync_bool_compare_and_swap.
5010
5011 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5012 group of builtins. This gives us log2 of the mode size. */
5013
5014 static inline machine_mode
5015 get_builtin_sync_mode (int fcode_diff)
5016 {
5017 /* The size is not negotiable, so ask not to get BLKmode in return
5018 if the target indicates that a smaller size would be better. */
5019 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5020 }
5021
5022 /* Expand the memory expression LOC and return the appropriate memory operand
5023 for the builtin_sync operations. */
5024
5025 static rtx
5026 get_builtin_sync_mem (tree loc, machine_mode mode)
5027 {
5028 rtx addr, mem;
5029
5030 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5031 addr = convert_memory_address (Pmode, addr);
5032
5033 /* Note that we explicitly do not want any alias information for this
5034 memory, so that we kill all other live memories. Otherwise we don't
5035 satisfy the full barrier semantics of the intrinsic. */
5036 mem = validize_mem (gen_rtx_MEM (mode, addr));
5037
5038 /* The alignment needs to be at least according to that of the mode. */
5039 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5040 get_pointer_alignment (loc)));
5041 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5042 MEM_VOLATILE_P (mem) = 1;
5043
5044 return mem;
5045 }
5046
5047 /* Make sure an argument is in the right mode.
5048 EXP is the tree argument.
5049 MODE is the mode it should be in. */
5050
5051 static rtx
5052 expand_expr_force_mode (tree exp, machine_mode mode)
5053 {
5054 rtx val;
5055 machine_mode old_mode;
5056
5057 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5058 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5059 of CONST_INTs, where we know the old_mode only from the call argument. */
5060
5061 old_mode = GET_MODE (val);
5062 if (old_mode == VOIDmode)
5063 old_mode = TYPE_MODE (TREE_TYPE (exp));
5064 val = convert_modes (mode, old_mode, val, 1);
5065 return val;
5066 }
5067
5068
5069 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5070 EXP is the CALL_EXPR. CODE is the rtx code
5071 that corresponds to the arithmetic or logical operation from the name;
5072 an exception here is that NOT actually means NAND. TARGET is an optional
5073 place for us to store the results; AFTER is true if this is the
5074 fetch_and_xxx form. */
5075
5076 static rtx
5077 expand_builtin_sync_operation (machine_mode mode, tree exp,
5078 enum rtx_code code, bool after,
5079 rtx target)
5080 {
5081 rtx val, mem;
5082 location_t loc = EXPR_LOCATION (exp);
5083
5084 if (code == NOT && warn_sync_nand)
5085 {
5086 tree fndecl = get_callee_fndecl (exp);
5087 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5088
5089 static bool warned_f_a_n, warned_n_a_f;
5090
5091 switch (fcode)
5092 {
5093 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5094 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5095 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5096 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5097 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5098 if (warned_f_a_n)
5099 break;
5100
5101 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5102 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5103 warned_f_a_n = true;
5104 break;
5105
5106 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5107 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5108 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5109 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5110 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5111 if (warned_n_a_f)
5112 break;
5113
5114 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5115 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5116 warned_n_a_f = true;
5117 break;
5118
5119 default:
5120 gcc_unreachable ();
5121 }
5122 }
5123
5124 /* Expand the operands. */
5125 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5126 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5127
5128 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5129 after);
5130 }
5131
5132 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5133 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5134 true if this is the boolean form. TARGET is a place for us to store the
5135 results; this is NOT optional if IS_BOOL is true. */
5136
5137 static rtx
5138 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5139 bool is_bool, rtx target)
5140 {
5141 rtx old_val, new_val, mem;
5142 rtx *pbool, *poval;
5143
5144 /* Expand the operands. */
5145 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5146 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5147 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5148
5149 pbool = poval = NULL;
5150 if (target != const0_rtx)
5151 {
5152 if (is_bool)
5153 pbool = &target;
5154 else
5155 poval = &target;
5156 }
5157 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5158 false, MEMMODEL_SEQ_CST,
5159 MEMMODEL_SEQ_CST))
5160 return NULL_RTX;
5161
5162 return target;
5163 }
5164
5165 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5166 general form is actually an atomic exchange, and some targets only
5167 support a reduced form with the second argument being a constant 1.
5168 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5169 the results. */
5170
5171 static rtx
5172 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5173 rtx target)
5174 {
5175 rtx val, mem;
5176
5177 /* Expand the operands. */
5178 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5179 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5180
5181 return expand_sync_lock_test_and_set (target, mem, val);
5182 }
5183
5184 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5185
5186 static void
5187 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5188 {
5189 rtx mem;
5190
5191 /* Expand the operands. */
5192 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5193
5194 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5195 }
5196
5197 /* Given an integer representing an ``enum memmodel'', verify its
5198 correctness and return the memory model enum. */
5199
5200 static enum memmodel
5201 get_memmodel (tree exp)
5202 {
5203 rtx op;
5204 unsigned HOST_WIDE_INT val;
5205
5206 /* If the parameter is not a constant, it's a run time value so we'll just
5207 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5208 if (TREE_CODE (exp) != INTEGER_CST)
5209 return MEMMODEL_SEQ_CST;
5210
5211 op = expand_normal (exp);
5212
5213 val = INTVAL (op);
5214 if (targetm.memmodel_check)
5215 val = targetm.memmodel_check (val);
5216 else if (val & ~MEMMODEL_MASK)
5217 {
5218 warning (OPT_Winvalid_memory_model,
5219 "Unknown architecture specifier in memory model to builtin.");
5220 return MEMMODEL_SEQ_CST;
5221 }
5222
5223 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5224 {
5225 warning (OPT_Winvalid_memory_model,
5226 "invalid memory model argument to builtin");
5227 return MEMMODEL_SEQ_CST;
5228 }
5229
5230 return (enum memmodel) val;
5231 }
5232
5233 /* Expand the __atomic_exchange intrinsic:
5234 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5235 EXP is the CALL_EXPR.
5236 TARGET is an optional place for us to store the results. */
5237
5238 static rtx
5239 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5240 {
5241 rtx val, mem;
5242 enum memmodel model;
5243
5244 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5245 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5246 {
5247 error ("invalid memory model for %<__atomic_exchange%>");
5248 return NULL_RTX;
5249 }
5250
5251 if (!flag_inline_atomics)
5252 return NULL_RTX;
5253
5254 /* Expand the operands. */
5255 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5256 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5257
5258 return expand_atomic_exchange (target, mem, val, model);
5259 }
5260
5261 /* Expand the __atomic_compare_exchange intrinsic:
5262 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5263 TYPE desired, BOOL weak,
5264 enum memmodel success,
5265 enum memmodel failure)
5266 EXP is the CALL_EXPR.
5267 TARGET is an optional place for us to store the results. */
5268
5269 static rtx
5270 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5271 rtx target)
5272 {
5273 rtx expect, desired, mem, oldval;
5274 rtx_code_label *label;
5275 enum memmodel success, failure;
5276 tree weak;
5277 bool is_weak;
5278
5279 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5280 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5281
5282 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5283 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5284 {
5285 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5286 return NULL_RTX;
5287 }
5288
5289 if (failure > success)
5290 {
5291 error ("failure memory model cannot be stronger than success "
5292 "memory model for %<__atomic_compare_exchange%>");
5293 return NULL_RTX;
5294 }
5295
5296 if (!flag_inline_atomics)
5297 return NULL_RTX;
5298
5299 /* Expand the operands. */
5300 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5301
5302 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5303 expect = convert_memory_address (Pmode, expect);
5304 expect = gen_rtx_MEM (mode, expect);
5305 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5306
5307 weak = CALL_EXPR_ARG (exp, 3);
5308 is_weak = false;
5309 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5310 is_weak = true;
5311
5312 if (target == const0_rtx)
5313 target = NULL;
5314
5315 /* Lest the rtl backend create a race condition with an imporoper store
5316 to memory, always create a new pseudo for OLDVAL. */
5317 oldval = NULL;
5318
5319 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5320 is_weak, success, failure))
5321 return NULL_RTX;
5322
5323 /* Conditionally store back to EXPECT, lest we create a race condition
5324 with an improper store to memory. */
5325 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5326 the normal case where EXPECT is totally private, i.e. a register. At
5327 which point the store can be unconditional. */
5328 label = gen_label_rtx ();
5329 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5330 emit_move_insn (expect, oldval);
5331 emit_label (label);
5332
5333 return target;
5334 }
5335
5336 /* Expand the __atomic_load intrinsic:
5337 TYPE __atomic_load (TYPE *object, enum memmodel)
5338 EXP is the CALL_EXPR.
5339 TARGET is an optional place for us to store the results. */
5340
5341 static rtx
5342 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5343 {
5344 rtx mem;
5345 enum memmodel model;
5346
5347 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5348 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5349 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5350 {
5351 error ("invalid memory model for %<__atomic_load%>");
5352 return NULL_RTX;
5353 }
5354
5355 if (!flag_inline_atomics)
5356 return NULL_RTX;
5357
5358 /* Expand the operand. */
5359 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5360
5361 return expand_atomic_load (target, mem, model);
5362 }
5363
5364
5365 /* Expand the __atomic_store intrinsic:
5366 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5367 EXP is the CALL_EXPR.
5368 TARGET is an optional place for us to store the results. */
5369
5370 static rtx
5371 expand_builtin_atomic_store (machine_mode mode, tree exp)
5372 {
5373 rtx mem, val;
5374 enum memmodel model;
5375
5376 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5377 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5378 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5379 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5380 {
5381 error ("invalid memory model for %<__atomic_store%>");
5382 return NULL_RTX;
5383 }
5384
5385 if (!flag_inline_atomics)
5386 return NULL_RTX;
5387
5388 /* Expand the operands. */
5389 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5390 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5391
5392 return expand_atomic_store (mem, val, model, false);
5393 }
5394
5395 /* Expand the __atomic_fetch_XXX intrinsic:
5396 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5397 EXP is the CALL_EXPR.
5398 TARGET is an optional place for us to store the results.
5399 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5400 FETCH_AFTER is true if returning the result of the operation.
5401 FETCH_AFTER is false if returning the value before the operation.
5402 IGNORE is true if the result is not used.
5403 EXT_CALL is the correct builtin for an external call if this cannot be
5404 resolved to an instruction sequence. */
5405
5406 static rtx
5407 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5408 enum rtx_code code, bool fetch_after,
5409 bool ignore, enum built_in_function ext_call)
5410 {
5411 rtx val, mem, ret;
5412 enum memmodel model;
5413 tree fndecl;
5414 tree addr;
5415
5416 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5417
5418 /* Expand the operands. */
5419 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5420 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5421
5422 /* Only try generating instructions if inlining is turned on. */
5423 if (flag_inline_atomics)
5424 {
5425 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5426 if (ret)
5427 return ret;
5428 }
5429
5430 /* Return if a different routine isn't needed for the library call. */
5431 if (ext_call == BUILT_IN_NONE)
5432 return NULL_RTX;
5433
5434 /* Change the call to the specified function. */
5435 fndecl = get_callee_fndecl (exp);
5436 addr = CALL_EXPR_FN (exp);
5437 STRIP_NOPS (addr);
5438
5439 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5440 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5441
5442 /* Expand the call here so we can emit trailing code. */
5443 ret = expand_call (exp, target, ignore);
5444
5445 /* Replace the original function just in case it matters. */
5446 TREE_OPERAND (addr, 0) = fndecl;
5447
5448 /* Then issue the arithmetic correction to return the right result. */
5449 if (!ignore)
5450 {
5451 if (code == NOT)
5452 {
5453 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5454 OPTAB_LIB_WIDEN);
5455 ret = expand_simple_unop (mode, NOT, ret, target, true);
5456 }
5457 else
5458 ret = expand_simple_binop (mode, code, ret, val, target, true,
5459 OPTAB_LIB_WIDEN);
5460 }
5461 return ret;
5462 }
5463
5464
5465 #ifndef HAVE_atomic_clear
5466 # define HAVE_atomic_clear 0
5467 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5468 #endif
5469
5470 /* Expand an atomic clear operation.
5471 void _atomic_clear (BOOL *obj, enum memmodel)
5472 EXP is the call expression. */
5473
5474 static rtx
5475 expand_builtin_atomic_clear (tree exp)
5476 {
5477 machine_mode mode;
5478 rtx mem, ret;
5479 enum memmodel model;
5480
5481 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5482 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5483 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5484
5485 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5486 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5487 {
5488 error ("invalid memory model for %<__atomic_store%>");
5489 return const0_rtx;
5490 }
5491
5492 if (HAVE_atomic_clear)
5493 {
5494 emit_insn (gen_atomic_clear (mem, model));
5495 return const0_rtx;
5496 }
5497
5498 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5499 Failing that, a store is issued by __atomic_store. The only way this can
5500 fail is if the bool type is larger than a word size. Unlikely, but
5501 handle it anyway for completeness. Assume a single threaded model since
5502 there is no atomic support in this case, and no barriers are required. */
5503 ret = expand_atomic_store (mem, const0_rtx, model, true);
5504 if (!ret)
5505 emit_move_insn (mem, const0_rtx);
5506 return const0_rtx;
5507 }
5508
5509 /* Expand an atomic test_and_set operation.
5510 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5511 EXP is the call expression. */
5512
5513 static rtx
5514 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5515 {
5516 rtx mem;
5517 enum memmodel model;
5518 machine_mode mode;
5519
5520 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5521 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5522 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5523
5524 return expand_atomic_test_and_set (target, mem, model);
5525 }
5526
5527
5528 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5529 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5530
5531 static tree
5532 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5533 {
5534 int size;
5535 machine_mode mode;
5536 unsigned int mode_align, type_align;
5537
5538 if (TREE_CODE (arg0) != INTEGER_CST)
5539 return NULL_TREE;
5540
5541 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5542 mode = mode_for_size (size, MODE_INT, 0);
5543 mode_align = GET_MODE_ALIGNMENT (mode);
5544
5545 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5546 type_align = mode_align;
5547 else
5548 {
5549 tree ttype = TREE_TYPE (arg1);
5550
5551 /* This function is usually invoked and folded immediately by the front
5552 end before anything else has a chance to look at it. The pointer
5553 parameter at this point is usually cast to a void *, so check for that
5554 and look past the cast. */
5555 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5556 && VOID_TYPE_P (TREE_TYPE (ttype)))
5557 arg1 = TREE_OPERAND (arg1, 0);
5558
5559 ttype = TREE_TYPE (arg1);
5560 gcc_assert (POINTER_TYPE_P (ttype));
5561
5562 /* Get the underlying type of the object. */
5563 ttype = TREE_TYPE (ttype);
5564 type_align = TYPE_ALIGN (ttype);
5565 }
5566
5567 /* If the object has smaller alignment, the the lock free routines cannot
5568 be used. */
5569 if (type_align < mode_align)
5570 return boolean_false_node;
5571
5572 /* Check if a compare_and_swap pattern exists for the mode which represents
5573 the required size. The pattern is not allowed to fail, so the existence
5574 of the pattern indicates support is present. */
5575 if (can_compare_and_swap_p (mode, true))
5576 return boolean_true_node;
5577 else
5578 return boolean_false_node;
5579 }
5580
5581 /* Return true if the parameters to call EXP represent an object which will
5582 always generate lock free instructions. The first argument represents the
5583 size of the object, and the second parameter is a pointer to the object
5584 itself. If NULL is passed for the object, then the result is based on
5585 typical alignment for an object of the specified size. Otherwise return
5586 false. */
5587
5588 static rtx
5589 expand_builtin_atomic_always_lock_free (tree exp)
5590 {
5591 tree size;
5592 tree arg0 = CALL_EXPR_ARG (exp, 0);
5593 tree arg1 = CALL_EXPR_ARG (exp, 1);
5594
5595 if (TREE_CODE (arg0) != INTEGER_CST)
5596 {
5597 error ("non-constant argument 1 to __atomic_always_lock_free");
5598 return const0_rtx;
5599 }
5600
5601 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5602 if (size == boolean_true_node)
5603 return const1_rtx;
5604 return const0_rtx;
5605 }
5606
5607 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5608 is lock free on this architecture. */
5609
5610 static tree
5611 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5612 {
5613 if (!flag_inline_atomics)
5614 return NULL_TREE;
5615
5616 /* If it isn't always lock free, don't generate a result. */
5617 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5618 return boolean_true_node;
5619
5620 return NULL_TREE;
5621 }
5622
5623 /* Return true if the parameters to call EXP represent an object which will
5624 always generate lock free instructions. The first argument represents the
5625 size of the object, and the second parameter is a pointer to the object
5626 itself. If NULL is passed for the object, then the result is based on
5627 typical alignment for an object of the specified size. Otherwise return
5628 NULL*/
5629
5630 static rtx
5631 expand_builtin_atomic_is_lock_free (tree exp)
5632 {
5633 tree size;
5634 tree arg0 = CALL_EXPR_ARG (exp, 0);
5635 tree arg1 = CALL_EXPR_ARG (exp, 1);
5636
5637 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5638 {
5639 error ("non-integer argument 1 to __atomic_is_lock_free");
5640 return NULL_RTX;
5641 }
5642
5643 if (!flag_inline_atomics)
5644 return NULL_RTX;
5645
5646 /* If the value is known at compile time, return the RTX for it. */
5647 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5648 if (size == boolean_true_node)
5649 return const1_rtx;
5650
5651 return NULL_RTX;
5652 }
5653
5654 /* Expand the __atomic_thread_fence intrinsic:
5655 void __atomic_thread_fence (enum memmodel)
5656 EXP is the CALL_EXPR. */
5657
5658 static void
5659 expand_builtin_atomic_thread_fence (tree exp)
5660 {
5661 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5662 expand_mem_thread_fence (model);
5663 }
5664
5665 /* Expand the __atomic_signal_fence intrinsic:
5666 void __atomic_signal_fence (enum memmodel)
5667 EXP is the CALL_EXPR. */
5668
5669 static void
5670 expand_builtin_atomic_signal_fence (tree exp)
5671 {
5672 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5673 expand_mem_signal_fence (model);
5674 }
5675
5676 /* Expand the __sync_synchronize intrinsic. */
5677
5678 static void
5679 expand_builtin_sync_synchronize (void)
5680 {
5681 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5682 }
5683
5684 static rtx
5685 expand_builtin_thread_pointer (tree exp, rtx target)
5686 {
5687 enum insn_code icode;
5688 if (!validate_arglist (exp, VOID_TYPE))
5689 return const0_rtx;
5690 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5691 if (icode != CODE_FOR_nothing)
5692 {
5693 struct expand_operand op;
5694 /* If the target is not sutitable then create a new target. */
5695 if (target == NULL_RTX
5696 || !REG_P (target)
5697 || GET_MODE (target) != Pmode)
5698 target = gen_reg_rtx (Pmode);
5699 create_output_operand (&op, target, Pmode);
5700 expand_insn (icode, 1, &op);
5701 return target;
5702 }
5703 error ("__builtin_thread_pointer is not supported on this target");
5704 return const0_rtx;
5705 }
5706
5707 static void
5708 expand_builtin_set_thread_pointer (tree exp)
5709 {
5710 enum insn_code icode;
5711 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5712 return;
5713 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5714 if (icode != CODE_FOR_nothing)
5715 {
5716 struct expand_operand op;
5717 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5718 Pmode, EXPAND_NORMAL);
5719 create_input_operand (&op, val, Pmode);
5720 expand_insn (icode, 1, &op);
5721 return;
5722 }
5723 error ("__builtin_set_thread_pointer is not supported on this target");
5724 }
5725
5726 \f
5727 /* Emit code to restore the current value of stack. */
5728
5729 static void
5730 expand_stack_restore (tree var)
5731 {
5732 rtx_insn *prev;
5733 rtx sa = expand_normal (var);
5734
5735 sa = convert_memory_address (Pmode, sa);
5736
5737 prev = get_last_insn ();
5738 emit_stack_restore (SAVE_BLOCK, sa);
5739 fixup_args_size_notes (prev, get_last_insn (), 0);
5740 }
5741
5742
5743 /* Emit code to save the current value of stack. */
5744
5745 static rtx
5746 expand_stack_save (void)
5747 {
5748 rtx ret = NULL_RTX;
5749
5750 do_pending_stack_adjust ();
5751 emit_stack_save (SAVE_BLOCK, &ret);
5752 return ret;
5753 }
5754
5755 /* Expand an expression EXP that calls a built-in function,
5756 with result going to TARGET if that's convenient
5757 (and in mode MODE if that's convenient).
5758 SUBTARGET may be used as the target for computing one of EXP's operands.
5759 IGNORE is nonzero if the value is to be ignored. */
5760
5761 rtx
5762 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5763 int ignore)
5764 {
5765 tree fndecl = get_callee_fndecl (exp);
5766 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5767 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5768 int flags;
5769
5770 /* When ASan is enabled, we don't want to expand some memory/string
5771 builtins and rely on libsanitizer's hooks. This allows us to avoid
5772 redundant checks and be sure, that possible overflow will be detected
5773 by ASan. */
5774
5775 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5776 return expand_call (exp, target, ignore);
5777
5778 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5779 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5780
5781 /* When not optimizing, generate calls to library functions for a certain
5782 set of builtins. */
5783 if (!optimize
5784 && !called_as_built_in (fndecl)
5785 && fcode != BUILT_IN_FORK
5786 && fcode != BUILT_IN_EXECL
5787 && fcode != BUILT_IN_EXECV
5788 && fcode != BUILT_IN_EXECLP
5789 && fcode != BUILT_IN_EXECLE
5790 && fcode != BUILT_IN_EXECVP
5791 && fcode != BUILT_IN_EXECVE
5792 && fcode != BUILT_IN_ALLOCA
5793 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5794 && fcode != BUILT_IN_FREE)
5795 return expand_call (exp, target, ignore);
5796
5797 /* The built-in function expanders test for target == const0_rtx
5798 to determine whether the function's result will be ignored. */
5799 if (ignore)
5800 target = const0_rtx;
5801
5802 /* If the result of a pure or const built-in function is ignored, and
5803 none of its arguments are volatile, we can avoid expanding the
5804 built-in call and just evaluate the arguments for side-effects. */
5805 if (target == const0_rtx
5806 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5807 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5808 {
5809 bool volatilep = false;
5810 tree arg;
5811 call_expr_arg_iterator iter;
5812
5813 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5814 if (TREE_THIS_VOLATILE (arg))
5815 {
5816 volatilep = true;
5817 break;
5818 }
5819
5820 if (! volatilep)
5821 {
5822 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5823 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5824 return const0_rtx;
5825 }
5826 }
5827
5828 switch (fcode)
5829 {
5830 CASE_FLT_FN (BUILT_IN_FABS):
5831 case BUILT_IN_FABSD32:
5832 case BUILT_IN_FABSD64:
5833 case BUILT_IN_FABSD128:
5834 target = expand_builtin_fabs (exp, target, subtarget);
5835 if (target)
5836 return target;
5837 break;
5838
5839 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5840 target = expand_builtin_copysign (exp, target, subtarget);
5841 if (target)
5842 return target;
5843 break;
5844
5845 /* Just do a normal library call if we were unable to fold
5846 the values. */
5847 CASE_FLT_FN (BUILT_IN_CABS):
5848 break;
5849
5850 CASE_FLT_FN (BUILT_IN_EXP):
5851 CASE_FLT_FN (BUILT_IN_EXP10):
5852 CASE_FLT_FN (BUILT_IN_POW10):
5853 CASE_FLT_FN (BUILT_IN_EXP2):
5854 CASE_FLT_FN (BUILT_IN_EXPM1):
5855 CASE_FLT_FN (BUILT_IN_LOGB):
5856 CASE_FLT_FN (BUILT_IN_LOG):
5857 CASE_FLT_FN (BUILT_IN_LOG10):
5858 CASE_FLT_FN (BUILT_IN_LOG2):
5859 CASE_FLT_FN (BUILT_IN_LOG1P):
5860 CASE_FLT_FN (BUILT_IN_TAN):
5861 CASE_FLT_FN (BUILT_IN_ASIN):
5862 CASE_FLT_FN (BUILT_IN_ACOS):
5863 CASE_FLT_FN (BUILT_IN_ATAN):
5864 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5865 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5866 because of possible accuracy problems. */
5867 if (! flag_unsafe_math_optimizations)
5868 break;
5869 CASE_FLT_FN (BUILT_IN_SQRT):
5870 CASE_FLT_FN (BUILT_IN_FLOOR):
5871 CASE_FLT_FN (BUILT_IN_CEIL):
5872 CASE_FLT_FN (BUILT_IN_TRUNC):
5873 CASE_FLT_FN (BUILT_IN_ROUND):
5874 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5875 CASE_FLT_FN (BUILT_IN_RINT):
5876 target = expand_builtin_mathfn (exp, target, subtarget);
5877 if (target)
5878 return target;
5879 break;
5880
5881 CASE_FLT_FN (BUILT_IN_FMA):
5882 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5883 if (target)
5884 return target;
5885 break;
5886
5887 CASE_FLT_FN (BUILT_IN_ILOGB):
5888 if (! flag_unsafe_math_optimizations)
5889 break;
5890 CASE_FLT_FN (BUILT_IN_ISINF):
5891 CASE_FLT_FN (BUILT_IN_FINITE):
5892 case BUILT_IN_ISFINITE:
5893 case BUILT_IN_ISNORMAL:
5894 target = expand_builtin_interclass_mathfn (exp, target);
5895 if (target)
5896 return target;
5897 break;
5898
5899 CASE_FLT_FN (BUILT_IN_ICEIL):
5900 CASE_FLT_FN (BUILT_IN_LCEIL):
5901 CASE_FLT_FN (BUILT_IN_LLCEIL):
5902 CASE_FLT_FN (BUILT_IN_LFLOOR):
5903 CASE_FLT_FN (BUILT_IN_IFLOOR):
5904 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5905 target = expand_builtin_int_roundingfn (exp, target);
5906 if (target)
5907 return target;
5908 break;
5909
5910 CASE_FLT_FN (BUILT_IN_IRINT):
5911 CASE_FLT_FN (BUILT_IN_LRINT):
5912 CASE_FLT_FN (BUILT_IN_LLRINT):
5913 CASE_FLT_FN (BUILT_IN_IROUND):
5914 CASE_FLT_FN (BUILT_IN_LROUND):
5915 CASE_FLT_FN (BUILT_IN_LLROUND):
5916 target = expand_builtin_int_roundingfn_2 (exp, target);
5917 if (target)
5918 return target;
5919 break;
5920
5921 CASE_FLT_FN (BUILT_IN_POWI):
5922 target = expand_builtin_powi (exp, target);
5923 if (target)
5924 return target;
5925 break;
5926
5927 CASE_FLT_FN (BUILT_IN_ATAN2):
5928 CASE_FLT_FN (BUILT_IN_LDEXP):
5929 CASE_FLT_FN (BUILT_IN_SCALB):
5930 CASE_FLT_FN (BUILT_IN_SCALBN):
5931 CASE_FLT_FN (BUILT_IN_SCALBLN):
5932 if (! flag_unsafe_math_optimizations)
5933 break;
5934
5935 CASE_FLT_FN (BUILT_IN_FMOD):
5936 CASE_FLT_FN (BUILT_IN_REMAINDER):
5937 CASE_FLT_FN (BUILT_IN_DREM):
5938 CASE_FLT_FN (BUILT_IN_POW):
5939 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5940 if (target)
5941 return target;
5942 break;
5943
5944 CASE_FLT_FN (BUILT_IN_CEXPI):
5945 target = expand_builtin_cexpi (exp, target);
5946 gcc_assert (target);
5947 return target;
5948
5949 CASE_FLT_FN (BUILT_IN_SIN):
5950 CASE_FLT_FN (BUILT_IN_COS):
5951 if (! flag_unsafe_math_optimizations)
5952 break;
5953 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5954 if (target)
5955 return target;
5956 break;
5957
5958 CASE_FLT_FN (BUILT_IN_SINCOS):
5959 if (! flag_unsafe_math_optimizations)
5960 break;
5961 target = expand_builtin_sincos (exp);
5962 if (target)
5963 return target;
5964 break;
5965
5966 case BUILT_IN_APPLY_ARGS:
5967 return expand_builtin_apply_args ();
5968
5969 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5970 FUNCTION with a copy of the parameters described by
5971 ARGUMENTS, and ARGSIZE. It returns a block of memory
5972 allocated on the stack into which is stored all the registers
5973 that might possibly be used for returning the result of a
5974 function. ARGUMENTS is the value returned by
5975 __builtin_apply_args. ARGSIZE is the number of bytes of
5976 arguments that must be copied. ??? How should this value be
5977 computed? We'll also need a safe worst case value for varargs
5978 functions. */
5979 case BUILT_IN_APPLY:
5980 if (!validate_arglist (exp, POINTER_TYPE,
5981 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5982 && !validate_arglist (exp, REFERENCE_TYPE,
5983 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5984 return const0_rtx;
5985 else
5986 {
5987 rtx ops[3];
5988
5989 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5990 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5991 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5992
5993 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5994 }
5995
5996 /* __builtin_return (RESULT) causes the function to return the
5997 value described by RESULT. RESULT is address of the block of
5998 memory returned by __builtin_apply. */
5999 case BUILT_IN_RETURN:
6000 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6001 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6002 return const0_rtx;
6003
6004 case BUILT_IN_SAVEREGS:
6005 return expand_builtin_saveregs ();
6006
6007 case BUILT_IN_VA_ARG_PACK:
6008 /* All valid uses of __builtin_va_arg_pack () are removed during
6009 inlining. */
6010 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6011 return const0_rtx;
6012
6013 case BUILT_IN_VA_ARG_PACK_LEN:
6014 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6015 inlining. */
6016 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6017 return const0_rtx;
6018
6019 /* Return the address of the first anonymous stack arg. */
6020 case BUILT_IN_NEXT_ARG:
6021 if (fold_builtin_next_arg (exp, false))
6022 return const0_rtx;
6023 return expand_builtin_next_arg ();
6024
6025 case BUILT_IN_CLEAR_CACHE:
6026 target = expand_builtin___clear_cache (exp);
6027 if (target)
6028 return target;
6029 break;
6030
6031 case BUILT_IN_CLASSIFY_TYPE:
6032 return expand_builtin_classify_type (exp);
6033
6034 case BUILT_IN_CONSTANT_P:
6035 return const0_rtx;
6036
6037 case BUILT_IN_FRAME_ADDRESS:
6038 case BUILT_IN_RETURN_ADDRESS:
6039 return expand_builtin_frame_address (fndecl, exp);
6040
6041 /* Returns the address of the area where the structure is returned.
6042 0 otherwise. */
6043 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6044 if (call_expr_nargs (exp) != 0
6045 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6046 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6047 return const0_rtx;
6048 else
6049 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6050
6051 case BUILT_IN_ALLOCA:
6052 case BUILT_IN_ALLOCA_WITH_ALIGN:
6053 /* If the allocation stems from the declaration of a variable-sized
6054 object, it cannot accumulate. */
6055 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6056 if (target)
6057 return target;
6058 break;
6059
6060 case BUILT_IN_STACK_SAVE:
6061 return expand_stack_save ();
6062
6063 case BUILT_IN_STACK_RESTORE:
6064 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6065 return const0_rtx;
6066
6067 case BUILT_IN_BSWAP16:
6068 case BUILT_IN_BSWAP32:
6069 case BUILT_IN_BSWAP64:
6070 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6071 if (target)
6072 return target;
6073 break;
6074
6075 CASE_INT_FN (BUILT_IN_FFS):
6076 target = expand_builtin_unop (target_mode, exp, target,
6077 subtarget, ffs_optab);
6078 if (target)
6079 return target;
6080 break;
6081
6082 CASE_INT_FN (BUILT_IN_CLZ):
6083 target = expand_builtin_unop (target_mode, exp, target,
6084 subtarget, clz_optab);
6085 if (target)
6086 return target;
6087 break;
6088
6089 CASE_INT_FN (BUILT_IN_CTZ):
6090 target = expand_builtin_unop (target_mode, exp, target,
6091 subtarget, ctz_optab);
6092 if (target)
6093 return target;
6094 break;
6095
6096 CASE_INT_FN (BUILT_IN_CLRSB):
6097 target = expand_builtin_unop (target_mode, exp, target,
6098 subtarget, clrsb_optab);
6099 if (target)
6100 return target;
6101 break;
6102
6103 CASE_INT_FN (BUILT_IN_POPCOUNT):
6104 target = expand_builtin_unop (target_mode, exp, target,
6105 subtarget, popcount_optab);
6106 if (target)
6107 return target;
6108 break;
6109
6110 CASE_INT_FN (BUILT_IN_PARITY):
6111 target = expand_builtin_unop (target_mode, exp, target,
6112 subtarget, parity_optab);
6113 if (target)
6114 return target;
6115 break;
6116
6117 case BUILT_IN_STRLEN:
6118 target = expand_builtin_strlen (exp, target, target_mode);
6119 if (target)
6120 return target;
6121 break;
6122
6123 case BUILT_IN_STRCPY:
6124 target = expand_builtin_strcpy (exp, target);
6125 if (target)
6126 return target;
6127 break;
6128
6129 case BUILT_IN_STRNCPY:
6130 target = expand_builtin_strncpy (exp, target);
6131 if (target)
6132 return target;
6133 break;
6134
6135 case BUILT_IN_STPCPY:
6136 target = expand_builtin_stpcpy (exp, target, mode);
6137 if (target)
6138 return target;
6139 break;
6140
6141 case BUILT_IN_MEMCPY:
6142 target = expand_builtin_memcpy (exp, target);
6143 if (target)
6144 return target;
6145 break;
6146
6147 case BUILT_IN_MEMPCPY:
6148 target = expand_builtin_mempcpy (exp, target, mode);
6149 if (target)
6150 return target;
6151 break;
6152
6153 case BUILT_IN_MEMSET:
6154 target = expand_builtin_memset (exp, target, mode);
6155 if (target)
6156 return target;
6157 break;
6158
6159 case BUILT_IN_BZERO:
6160 target = expand_builtin_bzero (exp);
6161 if (target)
6162 return target;
6163 break;
6164
6165 case BUILT_IN_STRCMP:
6166 target = expand_builtin_strcmp (exp, target);
6167 if (target)
6168 return target;
6169 break;
6170
6171 case BUILT_IN_STRNCMP:
6172 target = expand_builtin_strncmp (exp, target, mode);
6173 if (target)
6174 return target;
6175 break;
6176
6177 case BUILT_IN_BCMP:
6178 case BUILT_IN_MEMCMP:
6179 target = expand_builtin_memcmp (exp, target, mode);
6180 if (target)
6181 return target;
6182 break;
6183
6184 case BUILT_IN_SETJMP:
6185 /* This should have been lowered to the builtins below. */
6186 gcc_unreachable ();
6187
6188 case BUILT_IN_SETJMP_SETUP:
6189 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6190 and the receiver label. */
6191 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6192 {
6193 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6194 VOIDmode, EXPAND_NORMAL);
6195 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6196 rtx label_r = label_rtx (label);
6197
6198 /* This is copied from the handling of non-local gotos. */
6199 expand_builtin_setjmp_setup (buf_addr, label_r);
6200 nonlocal_goto_handler_labels
6201 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6202 nonlocal_goto_handler_labels);
6203 /* ??? Do not let expand_label treat us as such since we would
6204 not want to be both on the list of non-local labels and on
6205 the list of forced labels. */
6206 FORCED_LABEL (label) = 0;
6207 return const0_rtx;
6208 }
6209 break;
6210
6211 case BUILT_IN_SETJMP_RECEIVER:
6212 /* __builtin_setjmp_receiver is passed the receiver label. */
6213 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6214 {
6215 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6216 rtx label_r = label_rtx (label);
6217
6218 expand_builtin_setjmp_receiver (label_r);
6219 return const0_rtx;
6220 }
6221 break;
6222
6223 /* __builtin_longjmp is passed a pointer to an array of five words.
6224 It's similar to the C library longjmp function but works with
6225 __builtin_setjmp above. */
6226 case BUILT_IN_LONGJMP:
6227 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6228 {
6229 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6230 VOIDmode, EXPAND_NORMAL);
6231 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6232
6233 if (value != const1_rtx)
6234 {
6235 error ("%<__builtin_longjmp%> second argument must be 1");
6236 return const0_rtx;
6237 }
6238
6239 expand_builtin_longjmp (buf_addr, value);
6240 return const0_rtx;
6241 }
6242 break;
6243
6244 case BUILT_IN_NONLOCAL_GOTO:
6245 target = expand_builtin_nonlocal_goto (exp);
6246 if (target)
6247 return target;
6248 break;
6249
6250 /* This updates the setjmp buffer that is its argument with the value
6251 of the current stack pointer. */
6252 case BUILT_IN_UPDATE_SETJMP_BUF:
6253 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6254 {
6255 rtx buf_addr
6256 = expand_normal (CALL_EXPR_ARG (exp, 0));
6257
6258 expand_builtin_update_setjmp_buf (buf_addr);
6259 return const0_rtx;
6260 }
6261 break;
6262
6263 case BUILT_IN_TRAP:
6264 expand_builtin_trap ();
6265 return const0_rtx;
6266
6267 case BUILT_IN_UNREACHABLE:
6268 expand_builtin_unreachable ();
6269 return const0_rtx;
6270
6271 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6272 case BUILT_IN_SIGNBITD32:
6273 case BUILT_IN_SIGNBITD64:
6274 case BUILT_IN_SIGNBITD128:
6275 target = expand_builtin_signbit (exp, target);
6276 if (target)
6277 return target;
6278 break;
6279
6280 /* Various hooks for the DWARF 2 __throw routine. */
6281 case BUILT_IN_UNWIND_INIT:
6282 expand_builtin_unwind_init ();
6283 return const0_rtx;
6284 case BUILT_IN_DWARF_CFA:
6285 return virtual_cfa_rtx;
6286 #ifdef DWARF2_UNWIND_INFO
6287 case BUILT_IN_DWARF_SP_COLUMN:
6288 return expand_builtin_dwarf_sp_column ();
6289 case BUILT_IN_INIT_DWARF_REG_SIZES:
6290 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6291 return const0_rtx;
6292 #endif
6293 case BUILT_IN_FROB_RETURN_ADDR:
6294 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6295 case BUILT_IN_EXTRACT_RETURN_ADDR:
6296 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6297 case BUILT_IN_EH_RETURN:
6298 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6299 CALL_EXPR_ARG (exp, 1));
6300 return const0_rtx;
6301 #ifdef EH_RETURN_DATA_REGNO
6302 case BUILT_IN_EH_RETURN_DATA_REGNO:
6303 return expand_builtin_eh_return_data_regno (exp);
6304 #endif
6305 case BUILT_IN_EXTEND_POINTER:
6306 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6307 case BUILT_IN_EH_POINTER:
6308 return expand_builtin_eh_pointer (exp);
6309 case BUILT_IN_EH_FILTER:
6310 return expand_builtin_eh_filter (exp);
6311 case BUILT_IN_EH_COPY_VALUES:
6312 return expand_builtin_eh_copy_values (exp);
6313
6314 case BUILT_IN_VA_START:
6315 return expand_builtin_va_start (exp);
6316 case BUILT_IN_VA_END:
6317 return expand_builtin_va_end (exp);
6318 case BUILT_IN_VA_COPY:
6319 return expand_builtin_va_copy (exp);
6320 case BUILT_IN_EXPECT:
6321 return expand_builtin_expect (exp, target);
6322 case BUILT_IN_ASSUME_ALIGNED:
6323 return expand_builtin_assume_aligned (exp, target);
6324 case BUILT_IN_PREFETCH:
6325 expand_builtin_prefetch (exp);
6326 return const0_rtx;
6327
6328 case BUILT_IN_INIT_TRAMPOLINE:
6329 return expand_builtin_init_trampoline (exp, true);
6330 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6331 return expand_builtin_init_trampoline (exp, false);
6332 case BUILT_IN_ADJUST_TRAMPOLINE:
6333 return expand_builtin_adjust_trampoline (exp);
6334
6335 case BUILT_IN_FORK:
6336 case BUILT_IN_EXECL:
6337 case BUILT_IN_EXECV:
6338 case BUILT_IN_EXECLP:
6339 case BUILT_IN_EXECLE:
6340 case BUILT_IN_EXECVP:
6341 case BUILT_IN_EXECVE:
6342 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6343 if (target)
6344 return target;
6345 break;
6346
6347 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6348 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6349 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6350 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6351 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6352 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6353 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6354 if (target)
6355 return target;
6356 break;
6357
6358 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6359 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6360 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6361 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6362 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6363 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6364 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6365 if (target)
6366 return target;
6367 break;
6368
6369 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6370 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6371 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6372 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6373 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6374 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6375 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6376 if (target)
6377 return target;
6378 break;
6379
6380 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6381 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6382 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6383 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6384 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6385 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6386 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6387 if (target)
6388 return target;
6389 break;
6390
6391 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6392 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6393 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6394 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6395 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6396 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6397 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6398 if (target)
6399 return target;
6400 break;
6401
6402 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6403 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6404 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6405 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6406 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6407 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6408 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6409 if (target)
6410 return target;
6411 break;
6412
6413 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6414 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6415 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6416 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6417 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6418 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6419 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6420 if (target)
6421 return target;
6422 break;
6423
6424 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6425 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6426 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6427 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6428 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6429 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6430 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6431 if (target)
6432 return target;
6433 break;
6434
6435 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6436 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6437 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6438 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6439 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6440 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6441 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6442 if (target)
6443 return target;
6444 break;
6445
6446 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6447 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6448 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6449 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6450 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6451 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6452 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6453 if (target)
6454 return target;
6455 break;
6456
6457 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6458 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6459 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6460 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6461 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6462 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6463 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6464 if (target)
6465 return target;
6466 break;
6467
6468 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6469 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6470 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6471 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6472 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6473 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6474 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6475 if (target)
6476 return target;
6477 break;
6478
6479 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6480 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6481 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6482 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6483 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6484 if (mode == VOIDmode)
6485 mode = TYPE_MODE (boolean_type_node);
6486 if (!target || !register_operand (target, mode))
6487 target = gen_reg_rtx (mode);
6488
6489 mode = get_builtin_sync_mode
6490 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6491 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6492 if (target)
6493 return target;
6494 break;
6495
6496 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6497 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6498 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6499 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6500 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6501 mode = get_builtin_sync_mode
6502 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6503 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6504 if (target)
6505 return target;
6506 break;
6507
6508 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6509 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6510 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6511 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6512 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6513 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6514 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6515 if (target)
6516 return target;
6517 break;
6518
6519 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6520 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6521 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6522 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6523 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6524 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6525 expand_builtin_sync_lock_release (mode, exp);
6526 return const0_rtx;
6527
6528 case BUILT_IN_SYNC_SYNCHRONIZE:
6529 expand_builtin_sync_synchronize ();
6530 return const0_rtx;
6531
6532 case BUILT_IN_ATOMIC_EXCHANGE_1:
6533 case BUILT_IN_ATOMIC_EXCHANGE_2:
6534 case BUILT_IN_ATOMIC_EXCHANGE_4:
6535 case BUILT_IN_ATOMIC_EXCHANGE_8:
6536 case BUILT_IN_ATOMIC_EXCHANGE_16:
6537 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6538 target = expand_builtin_atomic_exchange (mode, exp, target);
6539 if (target)
6540 return target;
6541 break;
6542
6543 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6544 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6545 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6546 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6547 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6548 {
6549 unsigned int nargs, z;
6550 vec<tree, va_gc> *vec;
6551
6552 mode =
6553 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6554 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6555 if (target)
6556 return target;
6557
6558 /* If this is turned into an external library call, the weak parameter
6559 must be dropped to match the expected parameter list. */
6560 nargs = call_expr_nargs (exp);
6561 vec_alloc (vec, nargs - 1);
6562 for (z = 0; z < 3; z++)
6563 vec->quick_push (CALL_EXPR_ARG (exp, z));
6564 /* Skip the boolean weak parameter. */
6565 for (z = 4; z < 6; z++)
6566 vec->quick_push (CALL_EXPR_ARG (exp, z));
6567 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6568 break;
6569 }
6570
6571 case BUILT_IN_ATOMIC_LOAD_1:
6572 case BUILT_IN_ATOMIC_LOAD_2:
6573 case BUILT_IN_ATOMIC_LOAD_4:
6574 case BUILT_IN_ATOMIC_LOAD_8:
6575 case BUILT_IN_ATOMIC_LOAD_16:
6576 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6577 target = expand_builtin_atomic_load (mode, exp, target);
6578 if (target)
6579 return target;
6580 break;
6581
6582 case BUILT_IN_ATOMIC_STORE_1:
6583 case BUILT_IN_ATOMIC_STORE_2:
6584 case BUILT_IN_ATOMIC_STORE_4:
6585 case BUILT_IN_ATOMIC_STORE_8:
6586 case BUILT_IN_ATOMIC_STORE_16:
6587 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6588 target = expand_builtin_atomic_store (mode, exp);
6589 if (target)
6590 return const0_rtx;
6591 break;
6592
6593 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6594 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6595 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6596 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6597 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6598 {
6599 enum built_in_function lib;
6600 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6601 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6602 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6603 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6604 ignore, lib);
6605 if (target)
6606 return target;
6607 break;
6608 }
6609 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6610 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6611 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6612 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6613 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6614 {
6615 enum built_in_function lib;
6616 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6617 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6618 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6619 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6620 ignore, lib);
6621 if (target)
6622 return target;
6623 break;
6624 }
6625 case BUILT_IN_ATOMIC_AND_FETCH_1:
6626 case BUILT_IN_ATOMIC_AND_FETCH_2:
6627 case BUILT_IN_ATOMIC_AND_FETCH_4:
6628 case BUILT_IN_ATOMIC_AND_FETCH_8:
6629 case BUILT_IN_ATOMIC_AND_FETCH_16:
6630 {
6631 enum built_in_function lib;
6632 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6633 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6634 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6635 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6636 ignore, lib);
6637 if (target)
6638 return target;
6639 break;
6640 }
6641 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6642 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6643 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6644 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6645 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6646 {
6647 enum built_in_function lib;
6648 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6649 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6650 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6651 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6652 ignore, lib);
6653 if (target)
6654 return target;
6655 break;
6656 }
6657 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6658 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6659 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6660 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6661 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6662 {
6663 enum built_in_function lib;
6664 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6665 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6666 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6667 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6668 ignore, lib);
6669 if (target)
6670 return target;
6671 break;
6672 }
6673 case BUILT_IN_ATOMIC_OR_FETCH_1:
6674 case BUILT_IN_ATOMIC_OR_FETCH_2:
6675 case BUILT_IN_ATOMIC_OR_FETCH_4:
6676 case BUILT_IN_ATOMIC_OR_FETCH_8:
6677 case BUILT_IN_ATOMIC_OR_FETCH_16:
6678 {
6679 enum built_in_function lib;
6680 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6681 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6682 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6683 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6684 ignore, lib);
6685 if (target)
6686 return target;
6687 break;
6688 }
6689 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6690 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6691 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6692 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6693 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6694 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6695 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6696 ignore, BUILT_IN_NONE);
6697 if (target)
6698 return target;
6699 break;
6700
6701 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6702 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6703 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6704 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6705 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6706 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6707 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6708 ignore, BUILT_IN_NONE);
6709 if (target)
6710 return target;
6711 break;
6712
6713 case BUILT_IN_ATOMIC_FETCH_AND_1:
6714 case BUILT_IN_ATOMIC_FETCH_AND_2:
6715 case BUILT_IN_ATOMIC_FETCH_AND_4:
6716 case BUILT_IN_ATOMIC_FETCH_AND_8:
6717 case BUILT_IN_ATOMIC_FETCH_AND_16:
6718 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6719 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6720 ignore, BUILT_IN_NONE);
6721 if (target)
6722 return target;
6723 break;
6724
6725 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6726 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6727 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6728 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6729 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6730 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6731 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6732 ignore, BUILT_IN_NONE);
6733 if (target)
6734 return target;
6735 break;
6736
6737 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6738 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6739 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6740 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6741 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6742 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6743 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6744 ignore, BUILT_IN_NONE);
6745 if (target)
6746 return target;
6747 break;
6748
6749 case BUILT_IN_ATOMIC_FETCH_OR_1:
6750 case BUILT_IN_ATOMIC_FETCH_OR_2:
6751 case BUILT_IN_ATOMIC_FETCH_OR_4:
6752 case BUILT_IN_ATOMIC_FETCH_OR_8:
6753 case BUILT_IN_ATOMIC_FETCH_OR_16:
6754 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6755 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6756 ignore, BUILT_IN_NONE);
6757 if (target)
6758 return target;
6759 break;
6760
6761 case BUILT_IN_ATOMIC_TEST_AND_SET:
6762 return expand_builtin_atomic_test_and_set (exp, target);
6763
6764 case BUILT_IN_ATOMIC_CLEAR:
6765 return expand_builtin_atomic_clear (exp);
6766
6767 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6768 return expand_builtin_atomic_always_lock_free (exp);
6769
6770 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6771 target = expand_builtin_atomic_is_lock_free (exp);
6772 if (target)
6773 return target;
6774 break;
6775
6776 case BUILT_IN_ATOMIC_THREAD_FENCE:
6777 expand_builtin_atomic_thread_fence (exp);
6778 return const0_rtx;
6779
6780 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6781 expand_builtin_atomic_signal_fence (exp);
6782 return const0_rtx;
6783
6784 case BUILT_IN_OBJECT_SIZE:
6785 return expand_builtin_object_size (exp);
6786
6787 case BUILT_IN_MEMCPY_CHK:
6788 case BUILT_IN_MEMPCPY_CHK:
6789 case BUILT_IN_MEMMOVE_CHK:
6790 case BUILT_IN_MEMSET_CHK:
6791 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6792 if (target)
6793 return target;
6794 break;
6795
6796 case BUILT_IN_STRCPY_CHK:
6797 case BUILT_IN_STPCPY_CHK:
6798 case BUILT_IN_STRNCPY_CHK:
6799 case BUILT_IN_STPNCPY_CHK:
6800 case BUILT_IN_STRCAT_CHK:
6801 case BUILT_IN_STRNCAT_CHK:
6802 case BUILT_IN_SNPRINTF_CHK:
6803 case BUILT_IN_VSNPRINTF_CHK:
6804 maybe_emit_chk_warning (exp, fcode);
6805 break;
6806
6807 case BUILT_IN_SPRINTF_CHK:
6808 case BUILT_IN_VSPRINTF_CHK:
6809 maybe_emit_sprintf_chk_warning (exp, fcode);
6810 break;
6811
6812 case BUILT_IN_FREE:
6813 if (warn_free_nonheap_object)
6814 maybe_emit_free_warning (exp);
6815 break;
6816
6817 case BUILT_IN_THREAD_POINTER:
6818 return expand_builtin_thread_pointer (exp, target);
6819
6820 case BUILT_IN_SET_THREAD_POINTER:
6821 expand_builtin_set_thread_pointer (exp);
6822 return const0_rtx;
6823
6824 case BUILT_IN_CILK_DETACH:
6825 expand_builtin_cilk_detach (exp);
6826 return const0_rtx;
6827
6828 case BUILT_IN_CILK_POP_FRAME:
6829 expand_builtin_cilk_pop_frame (exp);
6830 return const0_rtx;
6831
6832 default: /* just do library call, if unknown builtin */
6833 break;
6834 }
6835
6836 /* The switch statement above can drop through to cause the function
6837 to be called normally. */
6838 return expand_call (exp, target, ignore);
6839 }
6840
6841 /* Determine whether a tree node represents a call to a built-in
6842 function. If the tree T is a call to a built-in function with
6843 the right number of arguments of the appropriate types, return
6844 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6845 Otherwise the return value is END_BUILTINS. */
6846
6847 enum built_in_function
6848 builtin_mathfn_code (const_tree t)
6849 {
6850 const_tree fndecl, arg, parmlist;
6851 const_tree argtype, parmtype;
6852 const_call_expr_arg_iterator iter;
6853
6854 if (TREE_CODE (t) != CALL_EXPR
6855 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6856 return END_BUILTINS;
6857
6858 fndecl = get_callee_fndecl (t);
6859 if (fndecl == NULL_TREE
6860 || TREE_CODE (fndecl) != FUNCTION_DECL
6861 || ! DECL_BUILT_IN (fndecl)
6862 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6863 return END_BUILTINS;
6864
6865 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6866 init_const_call_expr_arg_iterator (t, &iter);
6867 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6868 {
6869 /* If a function doesn't take a variable number of arguments,
6870 the last element in the list will have type `void'. */
6871 parmtype = TREE_VALUE (parmlist);
6872 if (VOID_TYPE_P (parmtype))
6873 {
6874 if (more_const_call_expr_args_p (&iter))
6875 return END_BUILTINS;
6876 return DECL_FUNCTION_CODE (fndecl);
6877 }
6878
6879 if (! more_const_call_expr_args_p (&iter))
6880 return END_BUILTINS;
6881
6882 arg = next_const_call_expr_arg (&iter);
6883 argtype = TREE_TYPE (arg);
6884
6885 if (SCALAR_FLOAT_TYPE_P (parmtype))
6886 {
6887 if (! SCALAR_FLOAT_TYPE_P (argtype))
6888 return END_BUILTINS;
6889 }
6890 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6891 {
6892 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6893 return END_BUILTINS;
6894 }
6895 else if (POINTER_TYPE_P (parmtype))
6896 {
6897 if (! POINTER_TYPE_P (argtype))
6898 return END_BUILTINS;
6899 }
6900 else if (INTEGRAL_TYPE_P (parmtype))
6901 {
6902 if (! INTEGRAL_TYPE_P (argtype))
6903 return END_BUILTINS;
6904 }
6905 else
6906 return END_BUILTINS;
6907 }
6908
6909 /* Variable-length argument list. */
6910 return DECL_FUNCTION_CODE (fndecl);
6911 }
6912
6913 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6914 evaluate to a constant. */
6915
6916 static tree
6917 fold_builtin_constant_p (tree arg)
6918 {
6919 /* We return 1 for a numeric type that's known to be a constant
6920 value at compile-time or for an aggregate type that's a
6921 literal constant. */
6922 STRIP_NOPS (arg);
6923
6924 /* If we know this is a constant, emit the constant of one. */
6925 if (CONSTANT_CLASS_P (arg)
6926 || (TREE_CODE (arg) == CONSTRUCTOR
6927 && TREE_CONSTANT (arg)))
6928 return integer_one_node;
6929 if (TREE_CODE (arg) == ADDR_EXPR)
6930 {
6931 tree op = TREE_OPERAND (arg, 0);
6932 if (TREE_CODE (op) == STRING_CST
6933 || (TREE_CODE (op) == ARRAY_REF
6934 && integer_zerop (TREE_OPERAND (op, 1))
6935 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6936 return integer_one_node;
6937 }
6938
6939 /* If this expression has side effects, show we don't know it to be a
6940 constant. Likewise if it's a pointer or aggregate type since in
6941 those case we only want literals, since those are only optimized
6942 when generating RTL, not later.
6943 And finally, if we are compiling an initializer, not code, we
6944 need to return a definite result now; there's not going to be any
6945 more optimization done. */
6946 if (TREE_SIDE_EFFECTS (arg)
6947 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6948 || POINTER_TYPE_P (TREE_TYPE (arg))
6949 || cfun == 0
6950 || folding_initializer
6951 || force_folding_builtin_constant_p)
6952 return integer_zero_node;
6953
6954 return NULL_TREE;
6955 }
6956
6957 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6958 return it as a truthvalue. */
6959
6960 static tree
6961 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
6962 tree predictor)
6963 {
6964 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6965
6966 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6967 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6968 ret_type = TREE_TYPE (TREE_TYPE (fn));
6969 pred_type = TREE_VALUE (arg_types);
6970 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6971
6972 pred = fold_convert_loc (loc, pred_type, pred);
6973 expected = fold_convert_loc (loc, expected_type, expected);
6974 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
6975 predictor);
6976
6977 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6978 build_int_cst (ret_type, 0));
6979 }
6980
6981 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6982 NULL_TREE if no simplification is possible. */
6983
6984 tree
6985 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
6986 {
6987 tree inner, fndecl, inner_arg0;
6988 enum tree_code code;
6989
6990 /* Distribute the expected value over short-circuiting operators.
6991 See through the cast from truthvalue_type_node to long. */
6992 inner_arg0 = arg0;
6993 while (CONVERT_EXPR_P (inner_arg0)
6994 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6995 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6996 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6997
6998 /* If this is a builtin_expect within a builtin_expect keep the
6999 inner one. See through a comparison against a constant. It
7000 might have been added to create a thruthvalue. */
7001 inner = inner_arg0;
7002
7003 if (COMPARISON_CLASS_P (inner)
7004 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7005 inner = TREE_OPERAND (inner, 0);
7006
7007 if (TREE_CODE (inner) == CALL_EXPR
7008 && (fndecl = get_callee_fndecl (inner))
7009 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7010 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7011 return arg0;
7012
7013 inner = inner_arg0;
7014 code = TREE_CODE (inner);
7015 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7016 {
7017 tree op0 = TREE_OPERAND (inner, 0);
7018 tree op1 = TREE_OPERAND (inner, 1);
7019
7020 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7021 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7022 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7023
7024 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7025 }
7026
7027 /* If the argument isn't invariant then there's nothing else we can do. */
7028 if (!TREE_CONSTANT (inner_arg0))
7029 return NULL_TREE;
7030
7031 /* If we expect that a comparison against the argument will fold to
7032 a constant return the constant. In practice, this means a true
7033 constant or the address of a non-weak symbol. */
7034 inner = inner_arg0;
7035 STRIP_NOPS (inner);
7036 if (TREE_CODE (inner) == ADDR_EXPR)
7037 {
7038 do
7039 {
7040 inner = TREE_OPERAND (inner, 0);
7041 }
7042 while (TREE_CODE (inner) == COMPONENT_REF
7043 || TREE_CODE (inner) == ARRAY_REF);
7044 if ((TREE_CODE (inner) == VAR_DECL
7045 || TREE_CODE (inner) == FUNCTION_DECL)
7046 && DECL_WEAK (inner))
7047 return NULL_TREE;
7048 }
7049
7050 /* Otherwise, ARG0 already has the proper type for the return value. */
7051 return arg0;
7052 }
7053
7054 /* Fold a call to __builtin_classify_type with argument ARG. */
7055
7056 static tree
7057 fold_builtin_classify_type (tree arg)
7058 {
7059 if (arg == 0)
7060 return build_int_cst (integer_type_node, no_type_class);
7061
7062 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7063 }
7064
7065 /* Fold a call to __builtin_strlen with argument ARG. */
7066
7067 static tree
7068 fold_builtin_strlen (location_t loc, tree type, tree arg)
7069 {
7070 if (!validate_arg (arg, POINTER_TYPE))
7071 return NULL_TREE;
7072 else
7073 {
7074 tree len = c_strlen (arg, 0);
7075
7076 if (len)
7077 return fold_convert_loc (loc, type, len);
7078
7079 return NULL_TREE;
7080 }
7081 }
7082
7083 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7084
7085 static tree
7086 fold_builtin_inf (location_t loc, tree type, int warn)
7087 {
7088 REAL_VALUE_TYPE real;
7089
7090 /* __builtin_inff is intended to be usable to define INFINITY on all
7091 targets. If an infinity is not available, INFINITY expands "to a
7092 positive constant of type float that overflows at translation
7093 time", footnote "In this case, using INFINITY will violate the
7094 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7095 Thus we pedwarn to ensure this constraint violation is
7096 diagnosed. */
7097 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7098 pedwarn (loc, 0, "target format does not support infinity");
7099
7100 real_inf (&real);
7101 return build_real (type, real);
7102 }
7103
7104 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7105
7106 static tree
7107 fold_builtin_nan (tree arg, tree type, int quiet)
7108 {
7109 REAL_VALUE_TYPE real;
7110 const char *str;
7111
7112 if (!validate_arg (arg, POINTER_TYPE))
7113 return NULL_TREE;
7114 str = c_getstr (arg);
7115 if (!str)
7116 return NULL_TREE;
7117
7118 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7119 return NULL_TREE;
7120
7121 return build_real (type, real);
7122 }
7123
7124 /* Return true if the floating point expression T has an integer value.
7125 We also allow +Inf, -Inf and NaN to be considered integer values. */
7126
7127 static bool
7128 integer_valued_real_p (tree t)
7129 {
7130 switch (TREE_CODE (t))
7131 {
7132 case FLOAT_EXPR:
7133 return true;
7134
7135 case ABS_EXPR:
7136 case SAVE_EXPR:
7137 return integer_valued_real_p (TREE_OPERAND (t, 0));
7138
7139 case COMPOUND_EXPR:
7140 case MODIFY_EXPR:
7141 case BIND_EXPR:
7142 return integer_valued_real_p (TREE_OPERAND (t, 1));
7143
7144 case PLUS_EXPR:
7145 case MINUS_EXPR:
7146 case MULT_EXPR:
7147 case MIN_EXPR:
7148 case MAX_EXPR:
7149 return integer_valued_real_p (TREE_OPERAND (t, 0))
7150 && integer_valued_real_p (TREE_OPERAND (t, 1));
7151
7152 case COND_EXPR:
7153 return integer_valued_real_p (TREE_OPERAND (t, 1))
7154 && integer_valued_real_p (TREE_OPERAND (t, 2));
7155
7156 case REAL_CST:
7157 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7158
7159 CASE_CONVERT:
7160 {
7161 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7162 if (TREE_CODE (type) == INTEGER_TYPE)
7163 return true;
7164 if (TREE_CODE (type) == REAL_TYPE)
7165 return integer_valued_real_p (TREE_OPERAND (t, 0));
7166 break;
7167 }
7168
7169 case CALL_EXPR:
7170 switch (builtin_mathfn_code (t))
7171 {
7172 CASE_FLT_FN (BUILT_IN_CEIL):
7173 CASE_FLT_FN (BUILT_IN_FLOOR):
7174 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7175 CASE_FLT_FN (BUILT_IN_RINT):
7176 CASE_FLT_FN (BUILT_IN_ROUND):
7177 CASE_FLT_FN (BUILT_IN_TRUNC):
7178 return true;
7179
7180 CASE_FLT_FN (BUILT_IN_FMIN):
7181 CASE_FLT_FN (BUILT_IN_FMAX):
7182 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7183 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7184
7185 default:
7186 break;
7187 }
7188 break;
7189
7190 default:
7191 break;
7192 }
7193 return false;
7194 }
7195
7196 /* FNDECL is assumed to be a builtin where truncation can be propagated
7197 across (for instance floor((double)f) == (double)floorf (f).
7198 Do the transformation for a call with argument ARG. */
7199
7200 static tree
7201 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7202 {
7203 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7204
7205 if (!validate_arg (arg, REAL_TYPE))
7206 return NULL_TREE;
7207
7208 /* Integer rounding functions are idempotent. */
7209 if (fcode == builtin_mathfn_code (arg))
7210 return arg;
7211
7212 /* If argument is already integer valued, and we don't need to worry
7213 about setting errno, there's no need to perform rounding. */
7214 if (! flag_errno_math && integer_valued_real_p (arg))
7215 return arg;
7216
7217 if (optimize)
7218 {
7219 tree arg0 = strip_float_extensions (arg);
7220 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7221 tree newtype = TREE_TYPE (arg0);
7222 tree decl;
7223
7224 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7225 && (decl = mathfn_built_in (newtype, fcode)))
7226 return fold_convert_loc (loc, ftype,
7227 build_call_expr_loc (loc, decl, 1,
7228 fold_convert_loc (loc,
7229 newtype,
7230 arg0)));
7231 }
7232 return NULL_TREE;
7233 }
7234
7235 /* FNDECL is assumed to be builtin which can narrow the FP type of
7236 the argument, for instance lround((double)f) -> lroundf (f).
7237 Do the transformation for a call with argument ARG. */
7238
7239 static tree
7240 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7241 {
7242 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7243
7244 if (!validate_arg (arg, REAL_TYPE))
7245 return NULL_TREE;
7246
7247 /* If argument is already integer valued, and we don't need to worry
7248 about setting errno, there's no need to perform rounding. */
7249 if (! flag_errno_math && integer_valued_real_p (arg))
7250 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7251 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7252
7253 if (optimize)
7254 {
7255 tree ftype = TREE_TYPE (arg);
7256 tree arg0 = strip_float_extensions (arg);
7257 tree newtype = TREE_TYPE (arg0);
7258 tree decl;
7259
7260 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7261 && (decl = mathfn_built_in (newtype, fcode)))
7262 return build_call_expr_loc (loc, decl, 1,
7263 fold_convert_loc (loc, newtype, arg0));
7264 }
7265
7266 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7267 sizeof (int) == sizeof (long). */
7268 if (TYPE_PRECISION (integer_type_node)
7269 == TYPE_PRECISION (long_integer_type_node))
7270 {
7271 tree newfn = NULL_TREE;
7272 switch (fcode)
7273 {
7274 CASE_FLT_FN (BUILT_IN_ICEIL):
7275 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7276 break;
7277
7278 CASE_FLT_FN (BUILT_IN_IFLOOR):
7279 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7280 break;
7281
7282 CASE_FLT_FN (BUILT_IN_IROUND):
7283 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7284 break;
7285
7286 CASE_FLT_FN (BUILT_IN_IRINT):
7287 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7288 break;
7289
7290 default:
7291 break;
7292 }
7293
7294 if (newfn)
7295 {
7296 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7297 return fold_convert_loc (loc,
7298 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7299 }
7300 }
7301
7302 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7303 sizeof (long long) == sizeof (long). */
7304 if (TYPE_PRECISION (long_long_integer_type_node)
7305 == TYPE_PRECISION (long_integer_type_node))
7306 {
7307 tree newfn = NULL_TREE;
7308 switch (fcode)
7309 {
7310 CASE_FLT_FN (BUILT_IN_LLCEIL):
7311 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7312 break;
7313
7314 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7315 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7316 break;
7317
7318 CASE_FLT_FN (BUILT_IN_LLROUND):
7319 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7320 break;
7321
7322 CASE_FLT_FN (BUILT_IN_LLRINT):
7323 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7324 break;
7325
7326 default:
7327 break;
7328 }
7329
7330 if (newfn)
7331 {
7332 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7333 return fold_convert_loc (loc,
7334 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7335 }
7336 }
7337
7338 return NULL_TREE;
7339 }
7340
7341 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7342 return type. Return NULL_TREE if no simplification can be made. */
7343
7344 static tree
7345 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7346 {
7347 tree res;
7348
7349 if (!validate_arg (arg, COMPLEX_TYPE)
7350 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7351 return NULL_TREE;
7352
7353 /* Calculate the result when the argument is a constant. */
7354 if (TREE_CODE (arg) == COMPLEX_CST
7355 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7356 type, mpfr_hypot)))
7357 return res;
7358
7359 if (TREE_CODE (arg) == COMPLEX_EXPR)
7360 {
7361 tree real = TREE_OPERAND (arg, 0);
7362 tree imag = TREE_OPERAND (arg, 1);
7363
7364 /* If either part is zero, cabs is fabs of the other. */
7365 if (real_zerop (real))
7366 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7367 if (real_zerop (imag))
7368 return fold_build1_loc (loc, ABS_EXPR, type, real);
7369
7370 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7371 if (flag_unsafe_math_optimizations
7372 && operand_equal_p (real, imag, OEP_PURE_SAME))
7373 {
7374 const REAL_VALUE_TYPE sqrt2_trunc
7375 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7376 STRIP_NOPS (real);
7377 return fold_build2_loc (loc, MULT_EXPR, type,
7378 fold_build1_loc (loc, ABS_EXPR, type, real),
7379 build_real (type, sqrt2_trunc));
7380 }
7381 }
7382
7383 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7384 if (TREE_CODE (arg) == NEGATE_EXPR
7385 || TREE_CODE (arg) == CONJ_EXPR)
7386 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7387
7388 /* Don't do this when optimizing for size. */
7389 if (flag_unsafe_math_optimizations
7390 && optimize && optimize_function_for_speed_p (cfun))
7391 {
7392 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7393
7394 if (sqrtfn != NULL_TREE)
7395 {
7396 tree rpart, ipart, result;
7397
7398 arg = builtin_save_expr (arg);
7399
7400 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7401 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7402
7403 rpart = builtin_save_expr (rpart);
7404 ipart = builtin_save_expr (ipart);
7405
7406 result = fold_build2_loc (loc, PLUS_EXPR, type,
7407 fold_build2_loc (loc, MULT_EXPR, type,
7408 rpart, rpart),
7409 fold_build2_loc (loc, MULT_EXPR, type,
7410 ipart, ipart));
7411
7412 return build_call_expr_loc (loc, sqrtfn, 1, result);
7413 }
7414 }
7415
7416 return NULL_TREE;
7417 }
7418
7419 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7420 complex tree type of the result. If NEG is true, the imaginary
7421 zero is negative. */
7422
7423 static tree
7424 build_complex_cproj (tree type, bool neg)
7425 {
7426 REAL_VALUE_TYPE rinf, rzero = dconst0;
7427
7428 real_inf (&rinf);
7429 rzero.sign = neg;
7430 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7431 build_real (TREE_TYPE (type), rzero));
7432 }
7433
7434 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7435 return type. Return NULL_TREE if no simplification can be made. */
7436
7437 static tree
7438 fold_builtin_cproj (location_t loc, tree arg, tree type)
7439 {
7440 if (!validate_arg (arg, COMPLEX_TYPE)
7441 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7442 return NULL_TREE;
7443
7444 /* If there are no infinities, return arg. */
7445 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7446 return non_lvalue_loc (loc, arg);
7447
7448 /* Calculate the result when the argument is a constant. */
7449 if (TREE_CODE (arg) == COMPLEX_CST)
7450 {
7451 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7452 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7453
7454 if (real_isinf (real) || real_isinf (imag))
7455 return build_complex_cproj (type, imag->sign);
7456 else
7457 return arg;
7458 }
7459 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7460 {
7461 tree real = TREE_OPERAND (arg, 0);
7462 tree imag = TREE_OPERAND (arg, 1);
7463
7464 STRIP_NOPS (real);
7465 STRIP_NOPS (imag);
7466
7467 /* If the real part is inf and the imag part is known to be
7468 nonnegative, return (inf + 0i). Remember side-effects are
7469 possible in the imag part. */
7470 if (TREE_CODE (real) == REAL_CST
7471 && real_isinf (TREE_REAL_CST_PTR (real))
7472 && tree_expr_nonnegative_p (imag))
7473 return omit_one_operand_loc (loc, type,
7474 build_complex_cproj (type, false),
7475 arg);
7476
7477 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7478 Remember side-effects are possible in the real part. */
7479 if (TREE_CODE (imag) == REAL_CST
7480 && real_isinf (TREE_REAL_CST_PTR (imag)))
7481 return
7482 omit_one_operand_loc (loc, type,
7483 build_complex_cproj (type, TREE_REAL_CST_PTR
7484 (imag)->sign), arg);
7485 }
7486
7487 return NULL_TREE;
7488 }
7489
7490 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7491 Return NULL_TREE if no simplification can be made. */
7492
7493 static tree
7494 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7495 {
7496
7497 enum built_in_function fcode;
7498 tree res;
7499
7500 if (!validate_arg (arg, REAL_TYPE))
7501 return NULL_TREE;
7502
7503 /* Calculate the result when the argument is a constant. */
7504 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7505 return res;
7506
7507 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7508 fcode = builtin_mathfn_code (arg);
7509 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7510 {
7511 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7512 arg = fold_build2_loc (loc, MULT_EXPR, type,
7513 CALL_EXPR_ARG (arg, 0),
7514 build_real (type, dconsthalf));
7515 return build_call_expr_loc (loc, expfn, 1, arg);
7516 }
7517
7518 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7519 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7520 {
7521 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7522
7523 if (powfn)
7524 {
7525 tree arg0 = CALL_EXPR_ARG (arg, 0);
7526 tree tree_root;
7527 /* The inner root was either sqrt or cbrt. */
7528 /* This was a conditional expression but it triggered a bug
7529 in Sun C 5.5. */
7530 REAL_VALUE_TYPE dconstroot;
7531 if (BUILTIN_SQRT_P (fcode))
7532 dconstroot = dconsthalf;
7533 else
7534 dconstroot = dconst_third ();
7535
7536 /* Adjust for the outer root. */
7537 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7538 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7539 tree_root = build_real (type, dconstroot);
7540 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7541 }
7542 }
7543
7544 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7545 if (flag_unsafe_math_optimizations
7546 && (fcode == BUILT_IN_POW
7547 || fcode == BUILT_IN_POWF
7548 || fcode == BUILT_IN_POWL))
7549 {
7550 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7551 tree arg0 = CALL_EXPR_ARG (arg, 0);
7552 tree arg1 = CALL_EXPR_ARG (arg, 1);
7553 tree narg1;
7554 if (!tree_expr_nonnegative_p (arg0))
7555 arg0 = build1 (ABS_EXPR, type, arg0);
7556 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7557 build_real (type, dconsthalf));
7558 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7559 }
7560
7561 return NULL_TREE;
7562 }
7563
7564 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7565 Return NULL_TREE if no simplification can be made. */
7566
7567 static tree
7568 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7569 {
7570 const enum built_in_function fcode = builtin_mathfn_code (arg);
7571 tree res;
7572
7573 if (!validate_arg (arg, REAL_TYPE))
7574 return NULL_TREE;
7575
7576 /* Calculate the result when the argument is a constant. */
7577 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7578 return res;
7579
7580 if (flag_unsafe_math_optimizations)
7581 {
7582 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7583 if (BUILTIN_EXPONENT_P (fcode))
7584 {
7585 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7586 const REAL_VALUE_TYPE third_trunc =
7587 real_value_truncate (TYPE_MODE (type), dconst_third ());
7588 arg = fold_build2_loc (loc, MULT_EXPR, type,
7589 CALL_EXPR_ARG (arg, 0),
7590 build_real (type, third_trunc));
7591 return build_call_expr_loc (loc, expfn, 1, arg);
7592 }
7593
7594 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7595 if (BUILTIN_SQRT_P (fcode))
7596 {
7597 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7598
7599 if (powfn)
7600 {
7601 tree arg0 = CALL_EXPR_ARG (arg, 0);
7602 tree tree_root;
7603 REAL_VALUE_TYPE dconstroot = dconst_third ();
7604
7605 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7606 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7607 tree_root = build_real (type, dconstroot);
7608 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7609 }
7610 }
7611
7612 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7613 if (BUILTIN_CBRT_P (fcode))
7614 {
7615 tree arg0 = CALL_EXPR_ARG (arg, 0);
7616 if (tree_expr_nonnegative_p (arg0))
7617 {
7618 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7619
7620 if (powfn)
7621 {
7622 tree tree_root;
7623 REAL_VALUE_TYPE dconstroot;
7624
7625 real_arithmetic (&dconstroot, MULT_EXPR,
7626 dconst_third_ptr (), dconst_third_ptr ());
7627 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7628 tree_root = build_real (type, dconstroot);
7629 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7630 }
7631 }
7632 }
7633
7634 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7635 if (fcode == BUILT_IN_POW
7636 || fcode == BUILT_IN_POWF
7637 || fcode == BUILT_IN_POWL)
7638 {
7639 tree arg00 = CALL_EXPR_ARG (arg, 0);
7640 tree arg01 = CALL_EXPR_ARG (arg, 1);
7641 if (tree_expr_nonnegative_p (arg00))
7642 {
7643 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7644 const REAL_VALUE_TYPE dconstroot
7645 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7646 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7647 build_real (type, dconstroot));
7648 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7649 }
7650 }
7651 }
7652 return NULL_TREE;
7653 }
7654
7655 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7656 TYPE is the type of the return value. Return NULL_TREE if no
7657 simplification can be made. */
7658
7659 static tree
7660 fold_builtin_cos (location_t loc,
7661 tree arg, tree type, tree fndecl)
7662 {
7663 tree res, narg;
7664
7665 if (!validate_arg (arg, REAL_TYPE))
7666 return NULL_TREE;
7667
7668 /* Calculate the result when the argument is a constant. */
7669 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7670 return res;
7671
7672 /* Optimize cos(-x) into cos (x). */
7673 if ((narg = fold_strip_sign_ops (arg)))
7674 return build_call_expr_loc (loc, fndecl, 1, narg);
7675
7676 return NULL_TREE;
7677 }
7678
7679 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7680 Return NULL_TREE if no simplification can be made. */
7681
7682 static tree
7683 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7684 {
7685 if (validate_arg (arg, REAL_TYPE))
7686 {
7687 tree res, narg;
7688
7689 /* Calculate the result when the argument is a constant. */
7690 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7691 return res;
7692
7693 /* Optimize cosh(-x) into cosh (x). */
7694 if ((narg = fold_strip_sign_ops (arg)))
7695 return build_call_expr_loc (loc, fndecl, 1, narg);
7696 }
7697
7698 return NULL_TREE;
7699 }
7700
7701 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7702 argument ARG. TYPE is the type of the return value. Return
7703 NULL_TREE if no simplification can be made. */
7704
7705 static tree
7706 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7707 bool hyper)
7708 {
7709 if (validate_arg (arg, COMPLEX_TYPE)
7710 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7711 {
7712 tree tmp;
7713
7714 /* Calculate the result when the argument is a constant. */
7715 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7716 return tmp;
7717
7718 /* Optimize fn(-x) into fn(x). */
7719 if ((tmp = fold_strip_sign_ops (arg)))
7720 return build_call_expr_loc (loc, fndecl, 1, tmp);
7721 }
7722
7723 return NULL_TREE;
7724 }
7725
7726 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7727 Return NULL_TREE if no simplification can be made. */
7728
7729 static tree
7730 fold_builtin_tan (tree arg, tree type)
7731 {
7732 enum built_in_function fcode;
7733 tree res;
7734
7735 if (!validate_arg (arg, REAL_TYPE))
7736 return NULL_TREE;
7737
7738 /* Calculate the result when the argument is a constant. */
7739 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7740 return res;
7741
7742 /* Optimize tan(atan(x)) = x. */
7743 fcode = builtin_mathfn_code (arg);
7744 if (flag_unsafe_math_optimizations
7745 && (fcode == BUILT_IN_ATAN
7746 || fcode == BUILT_IN_ATANF
7747 || fcode == BUILT_IN_ATANL))
7748 return CALL_EXPR_ARG (arg, 0);
7749
7750 return NULL_TREE;
7751 }
7752
7753 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7754 NULL_TREE if no simplification can be made. */
7755
7756 static tree
7757 fold_builtin_sincos (location_t loc,
7758 tree arg0, tree arg1, tree arg2)
7759 {
7760 tree type;
7761 tree res, fn, call;
7762
7763 if (!validate_arg (arg0, REAL_TYPE)
7764 || !validate_arg (arg1, POINTER_TYPE)
7765 || !validate_arg (arg2, POINTER_TYPE))
7766 return NULL_TREE;
7767
7768 type = TREE_TYPE (arg0);
7769
7770 /* Calculate the result when the argument is a constant. */
7771 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7772 return res;
7773
7774 /* Canonicalize sincos to cexpi. */
7775 if (!targetm.libc_has_function (function_c99_math_complex))
7776 return NULL_TREE;
7777 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7778 if (!fn)
7779 return NULL_TREE;
7780
7781 call = build_call_expr_loc (loc, fn, 1, arg0);
7782 call = builtin_save_expr (call);
7783
7784 return build2 (COMPOUND_EXPR, void_type_node,
7785 build2 (MODIFY_EXPR, void_type_node,
7786 build_fold_indirect_ref_loc (loc, arg1),
7787 build1 (IMAGPART_EXPR, type, call)),
7788 build2 (MODIFY_EXPR, void_type_node,
7789 build_fold_indirect_ref_loc (loc, arg2),
7790 build1 (REALPART_EXPR, type, call)));
7791 }
7792
7793 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7794 NULL_TREE if no simplification can be made. */
7795
7796 static tree
7797 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7798 {
7799 tree rtype;
7800 tree realp, imagp, ifn;
7801 tree res;
7802
7803 if (!validate_arg (arg0, COMPLEX_TYPE)
7804 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7805 return NULL_TREE;
7806
7807 /* Calculate the result when the argument is a constant. */
7808 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7809 return res;
7810
7811 rtype = TREE_TYPE (TREE_TYPE (arg0));
7812
7813 /* In case we can figure out the real part of arg0 and it is constant zero
7814 fold to cexpi. */
7815 if (!targetm.libc_has_function (function_c99_math_complex))
7816 return NULL_TREE;
7817 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7818 if (!ifn)
7819 return NULL_TREE;
7820
7821 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7822 && real_zerop (realp))
7823 {
7824 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7825 return build_call_expr_loc (loc, ifn, 1, narg);
7826 }
7827
7828 /* In case we can easily decompose real and imaginary parts split cexp
7829 to exp (r) * cexpi (i). */
7830 if (flag_unsafe_math_optimizations
7831 && realp)
7832 {
7833 tree rfn, rcall, icall;
7834
7835 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7836 if (!rfn)
7837 return NULL_TREE;
7838
7839 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7840 if (!imagp)
7841 return NULL_TREE;
7842
7843 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7844 icall = builtin_save_expr (icall);
7845 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7846 rcall = builtin_save_expr (rcall);
7847 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7848 fold_build2_loc (loc, MULT_EXPR, rtype,
7849 rcall,
7850 fold_build1_loc (loc, REALPART_EXPR,
7851 rtype, icall)),
7852 fold_build2_loc (loc, MULT_EXPR, rtype,
7853 rcall,
7854 fold_build1_loc (loc, IMAGPART_EXPR,
7855 rtype, icall)));
7856 }
7857
7858 return NULL_TREE;
7859 }
7860
7861 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7862 Return NULL_TREE if no simplification can be made. */
7863
7864 static tree
7865 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7866 {
7867 if (!validate_arg (arg, REAL_TYPE))
7868 return NULL_TREE;
7869
7870 /* Optimize trunc of constant value. */
7871 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7872 {
7873 REAL_VALUE_TYPE r, x;
7874 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7875
7876 x = TREE_REAL_CST (arg);
7877 real_trunc (&r, TYPE_MODE (type), &x);
7878 return build_real (type, r);
7879 }
7880
7881 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7882 }
7883
7884 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7885 Return NULL_TREE if no simplification can be made. */
7886
7887 static tree
7888 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7889 {
7890 if (!validate_arg (arg, REAL_TYPE))
7891 return NULL_TREE;
7892
7893 /* Optimize floor of constant value. */
7894 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7895 {
7896 REAL_VALUE_TYPE x;
7897
7898 x = TREE_REAL_CST (arg);
7899 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7900 {
7901 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7902 REAL_VALUE_TYPE r;
7903
7904 real_floor (&r, TYPE_MODE (type), &x);
7905 return build_real (type, r);
7906 }
7907 }
7908
7909 /* Fold floor (x) where x is nonnegative to trunc (x). */
7910 if (tree_expr_nonnegative_p (arg))
7911 {
7912 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7913 if (truncfn)
7914 return build_call_expr_loc (loc, truncfn, 1, arg);
7915 }
7916
7917 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7918 }
7919
7920 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7921 Return NULL_TREE if no simplification can be made. */
7922
7923 static tree
7924 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7925 {
7926 if (!validate_arg (arg, REAL_TYPE))
7927 return NULL_TREE;
7928
7929 /* Optimize ceil of constant value. */
7930 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7931 {
7932 REAL_VALUE_TYPE x;
7933
7934 x = TREE_REAL_CST (arg);
7935 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7936 {
7937 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7938 REAL_VALUE_TYPE r;
7939
7940 real_ceil (&r, TYPE_MODE (type), &x);
7941 return build_real (type, r);
7942 }
7943 }
7944
7945 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7946 }
7947
7948 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7949 Return NULL_TREE if no simplification can be made. */
7950
7951 static tree
7952 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7953 {
7954 if (!validate_arg (arg, REAL_TYPE))
7955 return NULL_TREE;
7956
7957 /* Optimize round of constant value. */
7958 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7959 {
7960 REAL_VALUE_TYPE x;
7961
7962 x = TREE_REAL_CST (arg);
7963 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7964 {
7965 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7966 REAL_VALUE_TYPE r;
7967
7968 real_round (&r, TYPE_MODE (type), &x);
7969 return build_real (type, r);
7970 }
7971 }
7972
7973 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7974 }
7975
7976 /* Fold function call to builtin lround, lroundf or lroundl (or the
7977 corresponding long long versions) and other rounding functions. ARG
7978 is the argument to the call. Return NULL_TREE if no simplification
7979 can be made. */
7980
7981 static tree
7982 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7983 {
7984 if (!validate_arg (arg, REAL_TYPE))
7985 return NULL_TREE;
7986
7987 /* Optimize lround of constant value. */
7988 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7989 {
7990 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7991
7992 if (real_isfinite (&x))
7993 {
7994 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7995 tree ftype = TREE_TYPE (arg);
7996 REAL_VALUE_TYPE r;
7997 bool fail = false;
7998
7999 switch (DECL_FUNCTION_CODE (fndecl))
8000 {
8001 CASE_FLT_FN (BUILT_IN_IFLOOR):
8002 CASE_FLT_FN (BUILT_IN_LFLOOR):
8003 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8004 real_floor (&r, TYPE_MODE (ftype), &x);
8005 break;
8006
8007 CASE_FLT_FN (BUILT_IN_ICEIL):
8008 CASE_FLT_FN (BUILT_IN_LCEIL):
8009 CASE_FLT_FN (BUILT_IN_LLCEIL):
8010 real_ceil (&r, TYPE_MODE (ftype), &x);
8011 break;
8012
8013 CASE_FLT_FN (BUILT_IN_IROUND):
8014 CASE_FLT_FN (BUILT_IN_LROUND):
8015 CASE_FLT_FN (BUILT_IN_LLROUND):
8016 real_round (&r, TYPE_MODE (ftype), &x);
8017 break;
8018
8019 default:
8020 gcc_unreachable ();
8021 }
8022
8023 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8024 if (!fail)
8025 return wide_int_to_tree (itype, val);
8026 }
8027 }
8028
8029 switch (DECL_FUNCTION_CODE (fndecl))
8030 {
8031 CASE_FLT_FN (BUILT_IN_LFLOOR):
8032 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8033 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8034 if (tree_expr_nonnegative_p (arg))
8035 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8036 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8037 break;
8038 default:;
8039 }
8040
8041 return fold_fixed_mathfn (loc, fndecl, arg);
8042 }
8043
8044 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8045 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8046 the argument to the call. Return NULL_TREE if no simplification can
8047 be made. */
8048
8049 static tree
8050 fold_builtin_bitop (tree fndecl, tree arg)
8051 {
8052 if (!validate_arg (arg, INTEGER_TYPE))
8053 return NULL_TREE;
8054
8055 /* Optimize for constant argument. */
8056 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8057 {
8058 tree type = TREE_TYPE (arg);
8059 int result;
8060
8061 switch (DECL_FUNCTION_CODE (fndecl))
8062 {
8063 CASE_INT_FN (BUILT_IN_FFS):
8064 result = wi::ffs (arg);
8065 break;
8066
8067 CASE_INT_FN (BUILT_IN_CLZ):
8068 if (wi::ne_p (arg, 0))
8069 result = wi::clz (arg);
8070 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8071 result = TYPE_PRECISION (type);
8072 break;
8073
8074 CASE_INT_FN (BUILT_IN_CTZ):
8075 if (wi::ne_p (arg, 0))
8076 result = wi::ctz (arg);
8077 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8078 result = TYPE_PRECISION (type);
8079 break;
8080
8081 CASE_INT_FN (BUILT_IN_CLRSB):
8082 result = wi::clrsb (arg);
8083 break;
8084
8085 CASE_INT_FN (BUILT_IN_POPCOUNT):
8086 result = wi::popcount (arg);
8087 break;
8088
8089 CASE_INT_FN (BUILT_IN_PARITY):
8090 result = wi::parity (arg);
8091 break;
8092
8093 default:
8094 gcc_unreachable ();
8095 }
8096
8097 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8098 }
8099
8100 return NULL_TREE;
8101 }
8102
8103 /* Fold function call to builtin_bswap and the short, long and long long
8104 variants. Return NULL_TREE if no simplification can be made. */
8105 static tree
8106 fold_builtin_bswap (tree fndecl, tree arg)
8107 {
8108 if (! validate_arg (arg, INTEGER_TYPE))
8109 return NULL_TREE;
8110
8111 /* Optimize constant value. */
8112 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8113 {
8114 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8115
8116 switch (DECL_FUNCTION_CODE (fndecl))
8117 {
8118 case BUILT_IN_BSWAP16:
8119 case BUILT_IN_BSWAP32:
8120 case BUILT_IN_BSWAP64:
8121 {
8122 signop sgn = TYPE_SIGN (type);
8123 tree result =
8124 wide_int_to_tree (type,
8125 wide_int::from (arg, TYPE_PRECISION (type),
8126 sgn).bswap ());
8127 return result;
8128 }
8129 default:
8130 gcc_unreachable ();
8131 }
8132 }
8133
8134 return NULL_TREE;
8135 }
8136
8137 /* A subroutine of fold_builtin to fold the various logarithmic
8138 functions. Return NULL_TREE if no simplification can me made.
8139 FUNC is the corresponding MPFR logarithm function. */
8140
8141 static tree
8142 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8143 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8144 {
8145 if (validate_arg (arg, REAL_TYPE))
8146 {
8147 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8148 tree res;
8149 const enum built_in_function fcode = builtin_mathfn_code (arg);
8150
8151 /* Calculate the result when the argument is a constant. */
8152 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8153 return res;
8154
8155 /* Special case, optimize logN(expN(x)) = x. */
8156 if (flag_unsafe_math_optimizations
8157 && ((func == mpfr_log
8158 && (fcode == BUILT_IN_EXP
8159 || fcode == BUILT_IN_EXPF
8160 || fcode == BUILT_IN_EXPL))
8161 || (func == mpfr_log2
8162 && (fcode == BUILT_IN_EXP2
8163 || fcode == BUILT_IN_EXP2F
8164 || fcode == BUILT_IN_EXP2L))
8165 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8166 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8167
8168 /* Optimize logN(func()) for various exponential functions. We
8169 want to determine the value "x" and the power "exponent" in
8170 order to transform logN(x**exponent) into exponent*logN(x). */
8171 if (flag_unsafe_math_optimizations)
8172 {
8173 tree exponent = 0, x = 0;
8174
8175 switch (fcode)
8176 {
8177 CASE_FLT_FN (BUILT_IN_EXP):
8178 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8179 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8180 dconst_e ()));
8181 exponent = CALL_EXPR_ARG (arg, 0);
8182 break;
8183 CASE_FLT_FN (BUILT_IN_EXP2):
8184 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8185 x = build_real (type, dconst2);
8186 exponent = CALL_EXPR_ARG (arg, 0);
8187 break;
8188 CASE_FLT_FN (BUILT_IN_EXP10):
8189 CASE_FLT_FN (BUILT_IN_POW10):
8190 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8191 {
8192 REAL_VALUE_TYPE dconst10;
8193 real_from_integer (&dconst10, VOIDmode, 10, SIGNED);
8194 x = build_real (type, dconst10);
8195 }
8196 exponent = CALL_EXPR_ARG (arg, 0);
8197 break;
8198 CASE_FLT_FN (BUILT_IN_SQRT):
8199 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8200 x = CALL_EXPR_ARG (arg, 0);
8201 exponent = build_real (type, dconsthalf);
8202 break;
8203 CASE_FLT_FN (BUILT_IN_CBRT):
8204 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8205 x = CALL_EXPR_ARG (arg, 0);
8206 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8207 dconst_third ()));
8208 break;
8209 CASE_FLT_FN (BUILT_IN_POW):
8210 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8211 x = CALL_EXPR_ARG (arg, 0);
8212 exponent = CALL_EXPR_ARG (arg, 1);
8213 break;
8214 default:
8215 break;
8216 }
8217
8218 /* Now perform the optimization. */
8219 if (x && exponent)
8220 {
8221 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8222 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8223 }
8224 }
8225 }
8226
8227 return NULL_TREE;
8228 }
8229
8230 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8231 NULL_TREE if no simplification can be made. */
8232
8233 static tree
8234 fold_builtin_hypot (location_t loc, tree fndecl,
8235 tree arg0, tree arg1, tree type)
8236 {
8237 tree res, narg0, narg1;
8238
8239 if (!validate_arg (arg0, REAL_TYPE)
8240 || !validate_arg (arg1, REAL_TYPE))
8241 return NULL_TREE;
8242
8243 /* Calculate the result when the argument is a constant. */
8244 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8245 return res;
8246
8247 /* If either argument to hypot has a negate or abs, strip that off.
8248 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8249 narg0 = fold_strip_sign_ops (arg0);
8250 narg1 = fold_strip_sign_ops (arg1);
8251 if (narg0 || narg1)
8252 {
8253 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8254 narg1 ? narg1 : arg1);
8255 }
8256
8257 /* If either argument is zero, hypot is fabs of the other. */
8258 if (real_zerop (arg0))
8259 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8260 else if (real_zerop (arg1))
8261 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8262
8263 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8264 if (flag_unsafe_math_optimizations
8265 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8266 {
8267 const REAL_VALUE_TYPE sqrt2_trunc
8268 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8269 return fold_build2_loc (loc, MULT_EXPR, type,
8270 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8271 build_real (type, sqrt2_trunc));
8272 }
8273
8274 return NULL_TREE;
8275 }
8276
8277
8278 /* Fold a builtin function call to pow, powf, or powl. Return
8279 NULL_TREE if no simplification can be made. */
8280 static tree
8281 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8282 {
8283 tree res;
8284
8285 if (!validate_arg (arg0, REAL_TYPE)
8286 || !validate_arg (arg1, REAL_TYPE))
8287 return NULL_TREE;
8288
8289 /* Calculate the result when the argument is a constant. */
8290 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8291 return res;
8292
8293 /* Optimize pow(1.0,y) = 1.0. */
8294 if (real_onep (arg0))
8295 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8296
8297 if (TREE_CODE (arg1) == REAL_CST
8298 && !TREE_OVERFLOW (arg1))
8299 {
8300 REAL_VALUE_TYPE cint;
8301 REAL_VALUE_TYPE c;
8302 HOST_WIDE_INT n;
8303
8304 c = TREE_REAL_CST (arg1);
8305
8306 /* Optimize pow(x,0.0) = 1.0. */
8307 if (REAL_VALUES_EQUAL (c, dconst0))
8308 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8309 arg0);
8310
8311 /* Optimize pow(x,1.0) = x. */
8312 if (REAL_VALUES_EQUAL (c, dconst1))
8313 return arg0;
8314
8315 /* Optimize pow(x,-1.0) = 1.0/x. */
8316 if (REAL_VALUES_EQUAL (c, dconstm1))
8317 return fold_build2_loc (loc, RDIV_EXPR, type,
8318 build_real (type, dconst1), arg0);
8319
8320 /* Optimize pow(x,0.5) = sqrt(x). */
8321 if (flag_unsafe_math_optimizations
8322 && REAL_VALUES_EQUAL (c, dconsthalf))
8323 {
8324 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8325
8326 if (sqrtfn != NULL_TREE)
8327 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8328 }
8329
8330 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8331 if (flag_unsafe_math_optimizations)
8332 {
8333 const REAL_VALUE_TYPE dconstroot
8334 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8335
8336 if (REAL_VALUES_EQUAL (c, dconstroot))
8337 {
8338 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8339 if (cbrtfn != NULL_TREE)
8340 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8341 }
8342 }
8343
8344 /* Check for an integer exponent. */
8345 n = real_to_integer (&c);
8346 real_from_integer (&cint, VOIDmode, n, SIGNED);
8347 if (real_identical (&c, &cint))
8348 {
8349 /* Attempt to evaluate pow at compile-time, unless this should
8350 raise an exception. */
8351 if (TREE_CODE (arg0) == REAL_CST
8352 && !TREE_OVERFLOW (arg0)
8353 && (n > 0
8354 || (!flag_trapping_math && !flag_errno_math)
8355 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8356 {
8357 REAL_VALUE_TYPE x;
8358 bool inexact;
8359
8360 x = TREE_REAL_CST (arg0);
8361 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8362 if (flag_unsafe_math_optimizations || !inexact)
8363 return build_real (type, x);
8364 }
8365
8366 /* Strip sign ops from even integer powers. */
8367 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8368 {
8369 tree narg0 = fold_strip_sign_ops (arg0);
8370 if (narg0)
8371 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8372 }
8373 }
8374 }
8375
8376 if (flag_unsafe_math_optimizations)
8377 {
8378 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8379
8380 /* Optimize pow(expN(x),y) = expN(x*y). */
8381 if (BUILTIN_EXPONENT_P (fcode))
8382 {
8383 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8384 tree arg = CALL_EXPR_ARG (arg0, 0);
8385 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8386 return build_call_expr_loc (loc, expfn, 1, arg);
8387 }
8388
8389 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8390 if (BUILTIN_SQRT_P (fcode))
8391 {
8392 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8393 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8394 build_real (type, dconsthalf));
8395 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8396 }
8397
8398 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8399 if (BUILTIN_CBRT_P (fcode))
8400 {
8401 tree arg = CALL_EXPR_ARG (arg0, 0);
8402 if (tree_expr_nonnegative_p (arg))
8403 {
8404 const REAL_VALUE_TYPE dconstroot
8405 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8406 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8407 build_real (type, dconstroot));
8408 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8409 }
8410 }
8411
8412 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8413 if (fcode == BUILT_IN_POW
8414 || fcode == BUILT_IN_POWF
8415 || fcode == BUILT_IN_POWL)
8416 {
8417 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8418 if (tree_expr_nonnegative_p (arg00))
8419 {
8420 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8421 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8422 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8423 }
8424 }
8425 }
8426
8427 return NULL_TREE;
8428 }
8429
8430 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8431 Return NULL_TREE if no simplification can be made. */
8432 static tree
8433 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8434 tree arg0, tree arg1, tree type)
8435 {
8436 if (!validate_arg (arg0, REAL_TYPE)
8437 || !validate_arg (arg1, INTEGER_TYPE))
8438 return NULL_TREE;
8439
8440 /* Optimize pow(1.0,y) = 1.0. */
8441 if (real_onep (arg0))
8442 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8443
8444 if (tree_fits_shwi_p (arg1))
8445 {
8446 HOST_WIDE_INT c = tree_to_shwi (arg1);
8447
8448 /* Evaluate powi at compile-time. */
8449 if (TREE_CODE (arg0) == REAL_CST
8450 && !TREE_OVERFLOW (arg0))
8451 {
8452 REAL_VALUE_TYPE x;
8453 x = TREE_REAL_CST (arg0);
8454 real_powi (&x, TYPE_MODE (type), &x, c);
8455 return build_real (type, x);
8456 }
8457
8458 /* Optimize pow(x,0) = 1.0. */
8459 if (c == 0)
8460 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8461 arg0);
8462
8463 /* Optimize pow(x,1) = x. */
8464 if (c == 1)
8465 return arg0;
8466
8467 /* Optimize pow(x,-1) = 1.0/x. */
8468 if (c == -1)
8469 return fold_build2_loc (loc, RDIV_EXPR, type,
8470 build_real (type, dconst1), arg0);
8471 }
8472
8473 return NULL_TREE;
8474 }
8475
8476 /* A subroutine of fold_builtin to fold the various exponent
8477 functions. Return NULL_TREE if no simplification can be made.
8478 FUNC is the corresponding MPFR exponent function. */
8479
8480 static tree
8481 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8482 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8483 {
8484 if (validate_arg (arg, REAL_TYPE))
8485 {
8486 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8487 tree res;
8488
8489 /* Calculate the result when the argument is a constant. */
8490 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8491 return res;
8492
8493 /* Optimize expN(logN(x)) = x. */
8494 if (flag_unsafe_math_optimizations)
8495 {
8496 const enum built_in_function fcode = builtin_mathfn_code (arg);
8497
8498 if ((func == mpfr_exp
8499 && (fcode == BUILT_IN_LOG
8500 || fcode == BUILT_IN_LOGF
8501 || fcode == BUILT_IN_LOGL))
8502 || (func == mpfr_exp2
8503 && (fcode == BUILT_IN_LOG2
8504 || fcode == BUILT_IN_LOG2F
8505 || fcode == BUILT_IN_LOG2L))
8506 || (func == mpfr_exp10
8507 && (fcode == BUILT_IN_LOG10
8508 || fcode == BUILT_IN_LOG10F
8509 || fcode == BUILT_IN_LOG10L)))
8510 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8511 }
8512 }
8513
8514 return NULL_TREE;
8515 }
8516
8517 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8518 Return NULL_TREE if no simplification can be made. */
8519
8520 static tree
8521 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8522 {
8523 tree fn, len, lenp1, call, type;
8524
8525 if (!validate_arg (dest, POINTER_TYPE)
8526 || !validate_arg (src, POINTER_TYPE))
8527 return NULL_TREE;
8528
8529 len = c_strlen (src, 1);
8530 if (!len
8531 || TREE_CODE (len) != INTEGER_CST)
8532 return NULL_TREE;
8533
8534 if (optimize_function_for_size_p (cfun)
8535 /* If length is zero it's small enough. */
8536 && !integer_zerop (len))
8537 return NULL_TREE;
8538
8539 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8540 if (!fn)
8541 return NULL_TREE;
8542
8543 lenp1 = size_binop_loc (loc, PLUS_EXPR,
8544 fold_convert_loc (loc, size_type_node, len),
8545 build_int_cst (size_type_node, 1));
8546 /* We use dest twice in building our expression. Save it from
8547 multiple expansions. */
8548 dest = builtin_save_expr (dest);
8549 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8550
8551 type = TREE_TYPE (TREE_TYPE (fndecl));
8552 dest = fold_build_pointer_plus_loc (loc, dest, len);
8553 dest = fold_convert_loc (loc, type, dest);
8554 dest = omit_one_operand_loc (loc, type, dest, call);
8555 return dest;
8556 }
8557
8558 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8559 arguments to the call, and TYPE is its return type.
8560 Return NULL_TREE if no simplification can be made. */
8561
8562 static tree
8563 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8564 {
8565 if (!validate_arg (arg1, POINTER_TYPE)
8566 || !validate_arg (arg2, INTEGER_TYPE)
8567 || !validate_arg (len, INTEGER_TYPE))
8568 return NULL_TREE;
8569 else
8570 {
8571 const char *p1;
8572
8573 if (TREE_CODE (arg2) != INTEGER_CST
8574 || !tree_fits_uhwi_p (len))
8575 return NULL_TREE;
8576
8577 p1 = c_getstr (arg1);
8578 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8579 {
8580 char c;
8581 const char *r;
8582 tree tem;
8583
8584 if (target_char_cast (arg2, &c))
8585 return NULL_TREE;
8586
8587 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8588
8589 if (r == NULL)
8590 return build_int_cst (TREE_TYPE (arg1), 0);
8591
8592 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8593 return fold_convert_loc (loc, type, tem);
8594 }
8595 return NULL_TREE;
8596 }
8597 }
8598
8599 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8600 Return NULL_TREE if no simplification can be made. */
8601
8602 static tree
8603 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8604 {
8605 const char *p1, *p2;
8606
8607 if (!validate_arg (arg1, POINTER_TYPE)
8608 || !validate_arg (arg2, POINTER_TYPE)
8609 || !validate_arg (len, INTEGER_TYPE))
8610 return NULL_TREE;
8611
8612 /* If the LEN parameter is zero, return zero. */
8613 if (integer_zerop (len))
8614 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8615 arg1, arg2);
8616
8617 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8618 if (operand_equal_p (arg1, arg2, 0))
8619 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8620
8621 p1 = c_getstr (arg1);
8622 p2 = c_getstr (arg2);
8623
8624 /* If all arguments are constant, and the value of len is not greater
8625 than the lengths of arg1 and arg2, evaluate at compile-time. */
8626 if (tree_fits_uhwi_p (len) && p1 && p2
8627 && compare_tree_int (len, strlen (p1) + 1) <= 0
8628 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8629 {
8630 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8631
8632 if (r > 0)
8633 return integer_one_node;
8634 else if (r < 0)
8635 return integer_minus_one_node;
8636 else
8637 return integer_zero_node;
8638 }
8639
8640 /* If len parameter is one, return an expression corresponding to
8641 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8642 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8643 {
8644 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8645 tree cst_uchar_ptr_node
8646 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8647
8648 tree ind1
8649 = fold_convert_loc (loc, integer_type_node,
8650 build1 (INDIRECT_REF, cst_uchar_node,
8651 fold_convert_loc (loc,
8652 cst_uchar_ptr_node,
8653 arg1)));
8654 tree ind2
8655 = fold_convert_loc (loc, integer_type_node,
8656 build1 (INDIRECT_REF, cst_uchar_node,
8657 fold_convert_loc (loc,
8658 cst_uchar_ptr_node,
8659 arg2)));
8660 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8661 }
8662
8663 return NULL_TREE;
8664 }
8665
8666 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8667 Return NULL_TREE if no simplification can be made. */
8668
8669 static tree
8670 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8671 {
8672 const char *p1, *p2;
8673
8674 if (!validate_arg (arg1, POINTER_TYPE)
8675 || !validate_arg (arg2, POINTER_TYPE))
8676 return NULL_TREE;
8677
8678 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8679 if (operand_equal_p (arg1, arg2, 0))
8680 return integer_zero_node;
8681
8682 p1 = c_getstr (arg1);
8683 p2 = c_getstr (arg2);
8684
8685 if (p1 && p2)
8686 {
8687 const int i = strcmp (p1, p2);
8688 if (i < 0)
8689 return integer_minus_one_node;
8690 else if (i > 0)
8691 return integer_one_node;
8692 else
8693 return integer_zero_node;
8694 }
8695
8696 /* If the second arg is "", return *(const unsigned char*)arg1. */
8697 if (p2 && *p2 == '\0')
8698 {
8699 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8700 tree cst_uchar_ptr_node
8701 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8702
8703 return fold_convert_loc (loc, integer_type_node,
8704 build1 (INDIRECT_REF, cst_uchar_node,
8705 fold_convert_loc (loc,
8706 cst_uchar_ptr_node,
8707 arg1)));
8708 }
8709
8710 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8711 if (p1 && *p1 == '\0')
8712 {
8713 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8714 tree cst_uchar_ptr_node
8715 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8716
8717 tree temp
8718 = fold_convert_loc (loc, integer_type_node,
8719 build1 (INDIRECT_REF, cst_uchar_node,
8720 fold_convert_loc (loc,
8721 cst_uchar_ptr_node,
8722 arg2)));
8723 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8724 }
8725
8726 return NULL_TREE;
8727 }
8728
8729 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8730 Return NULL_TREE if no simplification can be made. */
8731
8732 static tree
8733 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8734 {
8735 const char *p1, *p2;
8736
8737 if (!validate_arg (arg1, POINTER_TYPE)
8738 || !validate_arg (arg2, POINTER_TYPE)
8739 || !validate_arg (len, INTEGER_TYPE))
8740 return NULL_TREE;
8741
8742 /* If the LEN parameter is zero, return zero. */
8743 if (integer_zerop (len))
8744 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8745 arg1, arg2);
8746
8747 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8748 if (operand_equal_p (arg1, arg2, 0))
8749 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8750
8751 p1 = c_getstr (arg1);
8752 p2 = c_getstr (arg2);
8753
8754 if (tree_fits_uhwi_p (len) && p1 && p2)
8755 {
8756 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8757 if (i > 0)
8758 return integer_one_node;
8759 else if (i < 0)
8760 return integer_minus_one_node;
8761 else
8762 return integer_zero_node;
8763 }
8764
8765 /* If the second arg is "", and the length is greater than zero,
8766 return *(const unsigned char*)arg1. */
8767 if (p2 && *p2 == '\0'
8768 && TREE_CODE (len) == INTEGER_CST
8769 && tree_int_cst_sgn (len) == 1)
8770 {
8771 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8772 tree cst_uchar_ptr_node
8773 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8774
8775 return fold_convert_loc (loc, integer_type_node,
8776 build1 (INDIRECT_REF, cst_uchar_node,
8777 fold_convert_loc (loc,
8778 cst_uchar_ptr_node,
8779 arg1)));
8780 }
8781
8782 /* If the first arg is "", and the length is greater than zero,
8783 return -*(const unsigned char*)arg2. */
8784 if (p1 && *p1 == '\0'
8785 && TREE_CODE (len) == INTEGER_CST
8786 && tree_int_cst_sgn (len) == 1)
8787 {
8788 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8789 tree cst_uchar_ptr_node
8790 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8791
8792 tree temp = fold_convert_loc (loc, integer_type_node,
8793 build1 (INDIRECT_REF, cst_uchar_node,
8794 fold_convert_loc (loc,
8795 cst_uchar_ptr_node,
8796 arg2)));
8797 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8798 }
8799
8800 /* If len parameter is one, return an expression corresponding to
8801 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8802 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8803 {
8804 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8805 tree cst_uchar_ptr_node
8806 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8807
8808 tree ind1 = fold_convert_loc (loc, integer_type_node,
8809 build1 (INDIRECT_REF, cst_uchar_node,
8810 fold_convert_loc (loc,
8811 cst_uchar_ptr_node,
8812 arg1)));
8813 tree ind2 = fold_convert_loc (loc, integer_type_node,
8814 build1 (INDIRECT_REF, cst_uchar_node,
8815 fold_convert_loc (loc,
8816 cst_uchar_ptr_node,
8817 arg2)));
8818 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8819 }
8820
8821 return NULL_TREE;
8822 }
8823
8824 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8825 ARG. Return NULL_TREE if no simplification can be made. */
8826
8827 static tree
8828 fold_builtin_signbit (location_t loc, tree arg, tree type)
8829 {
8830 if (!validate_arg (arg, REAL_TYPE))
8831 return NULL_TREE;
8832
8833 /* If ARG is a compile-time constant, determine the result. */
8834 if (TREE_CODE (arg) == REAL_CST
8835 && !TREE_OVERFLOW (arg))
8836 {
8837 REAL_VALUE_TYPE c;
8838
8839 c = TREE_REAL_CST (arg);
8840 return (REAL_VALUE_NEGATIVE (c)
8841 ? build_one_cst (type)
8842 : build_zero_cst (type));
8843 }
8844
8845 /* If ARG is non-negative, the result is always zero. */
8846 if (tree_expr_nonnegative_p (arg))
8847 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8848
8849 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8850 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8851 return fold_convert (type,
8852 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8853 build_real (TREE_TYPE (arg), dconst0)));
8854
8855 return NULL_TREE;
8856 }
8857
8858 /* Fold function call to builtin copysign, copysignf or copysignl with
8859 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8860 be made. */
8861
8862 static tree
8863 fold_builtin_copysign (location_t loc, tree fndecl,
8864 tree arg1, tree arg2, tree type)
8865 {
8866 tree tem;
8867
8868 if (!validate_arg (arg1, REAL_TYPE)
8869 || !validate_arg (arg2, REAL_TYPE))
8870 return NULL_TREE;
8871
8872 /* copysign(X,X) is X. */
8873 if (operand_equal_p (arg1, arg2, 0))
8874 return fold_convert_loc (loc, type, arg1);
8875
8876 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8877 if (TREE_CODE (arg1) == REAL_CST
8878 && TREE_CODE (arg2) == REAL_CST
8879 && !TREE_OVERFLOW (arg1)
8880 && !TREE_OVERFLOW (arg2))
8881 {
8882 REAL_VALUE_TYPE c1, c2;
8883
8884 c1 = TREE_REAL_CST (arg1);
8885 c2 = TREE_REAL_CST (arg2);
8886 /* c1.sign := c2.sign. */
8887 real_copysign (&c1, &c2);
8888 return build_real (type, c1);
8889 }
8890
8891 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8892 Remember to evaluate Y for side-effects. */
8893 if (tree_expr_nonnegative_p (arg2))
8894 return omit_one_operand_loc (loc, type,
8895 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8896 arg2);
8897
8898 /* Strip sign changing operations for the first argument. */
8899 tem = fold_strip_sign_ops (arg1);
8900 if (tem)
8901 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8902
8903 return NULL_TREE;
8904 }
8905
8906 /* Fold a call to builtin isascii with argument ARG. */
8907
8908 static tree
8909 fold_builtin_isascii (location_t loc, tree arg)
8910 {
8911 if (!validate_arg (arg, INTEGER_TYPE))
8912 return NULL_TREE;
8913 else
8914 {
8915 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8916 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8917 build_int_cst (integer_type_node,
8918 ~ (unsigned HOST_WIDE_INT) 0x7f));
8919 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8920 arg, integer_zero_node);
8921 }
8922 }
8923
8924 /* Fold a call to builtin toascii with argument ARG. */
8925
8926 static tree
8927 fold_builtin_toascii (location_t loc, tree arg)
8928 {
8929 if (!validate_arg (arg, INTEGER_TYPE))
8930 return NULL_TREE;
8931
8932 /* Transform toascii(c) -> (c & 0x7f). */
8933 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8934 build_int_cst (integer_type_node, 0x7f));
8935 }
8936
8937 /* Fold a call to builtin isdigit with argument ARG. */
8938
8939 static tree
8940 fold_builtin_isdigit (location_t loc, tree arg)
8941 {
8942 if (!validate_arg (arg, INTEGER_TYPE))
8943 return NULL_TREE;
8944 else
8945 {
8946 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8947 /* According to the C standard, isdigit is unaffected by locale.
8948 However, it definitely is affected by the target character set. */
8949 unsigned HOST_WIDE_INT target_digit0
8950 = lang_hooks.to_target_charset ('0');
8951
8952 if (target_digit0 == 0)
8953 return NULL_TREE;
8954
8955 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8956 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8957 build_int_cst (unsigned_type_node, target_digit0));
8958 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8959 build_int_cst (unsigned_type_node, 9));
8960 }
8961 }
8962
8963 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8964
8965 static tree
8966 fold_builtin_fabs (location_t loc, tree arg, tree type)
8967 {
8968 if (!validate_arg (arg, REAL_TYPE))
8969 return NULL_TREE;
8970
8971 arg = fold_convert_loc (loc, type, arg);
8972 if (TREE_CODE (arg) == REAL_CST)
8973 return fold_abs_const (arg, type);
8974 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8975 }
8976
8977 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8978
8979 static tree
8980 fold_builtin_abs (location_t loc, tree arg, tree type)
8981 {
8982 if (!validate_arg (arg, INTEGER_TYPE))
8983 return NULL_TREE;
8984
8985 arg = fold_convert_loc (loc, type, arg);
8986 if (TREE_CODE (arg) == INTEGER_CST)
8987 return fold_abs_const (arg, type);
8988 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8989 }
8990
8991 /* Fold a fma operation with arguments ARG[012]. */
8992
8993 tree
8994 fold_fma (location_t loc ATTRIBUTE_UNUSED,
8995 tree type, tree arg0, tree arg1, tree arg2)
8996 {
8997 if (TREE_CODE (arg0) == REAL_CST
8998 && TREE_CODE (arg1) == REAL_CST
8999 && TREE_CODE (arg2) == REAL_CST)
9000 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9001
9002 return NULL_TREE;
9003 }
9004
9005 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9006
9007 static tree
9008 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9009 {
9010 if (validate_arg (arg0, REAL_TYPE)
9011 && validate_arg (arg1, REAL_TYPE)
9012 && validate_arg (arg2, REAL_TYPE))
9013 {
9014 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9015 if (tem)
9016 return tem;
9017
9018 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9019 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9020 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9021 }
9022 return NULL_TREE;
9023 }
9024
9025 /* Fold a call to builtin fmin or fmax. */
9026
9027 static tree
9028 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9029 tree type, bool max)
9030 {
9031 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9032 {
9033 /* Calculate the result when the argument is a constant. */
9034 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9035
9036 if (res)
9037 return res;
9038
9039 /* If either argument is NaN, return the other one. Avoid the
9040 transformation if we get (and honor) a signalling NaN. Using
9041 omit_one_operand() ensures we create a non-lvalue. */
9042 if (TREE_CODE (arg0) == REAL_CST
9043 && real_isnan (&TREE_REAL_CST (arg0))
9044 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9045 || ! TREE_REAL_CST (arg0).signalling))
9046 return omit_one_operand_loc (loc, type, arg1, arg0);
9047 if (TREE_CODE (arg1) == REAL_CST
9048 && real_isnan (&TREE_REAL_CST (arg1))
9049 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9050 || ! TREE_REAL_CST (arg1).signalling))
9051 return omit_one_operand_loc (loc, type, arg0, arg1);
9052
9053 /* Transform fmin/fmax(x,x) -> x. */
9054 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9055 return omit_one_operand_loc (loc, type, arg0, arg1);
9056
9057 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9058 functions to return the numeric arg if the other one is NaN.
9059 These tree codes don't honor that, so only transform if
9060 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9061 handled, so we don't have to worry about it either. */
9062 if (flag_finite_math_only)
9063 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9064 fold_convert_loc (loc, type, arg0),
9065 fold_convert_loc (loc, type, arg1));
9066 }
9067 return NULL_TREE;
9068 }
9069
9070 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9071
9072 static tree
9073 fold_builtin_carg (location_t loc, tree arg, tree type)
9074 {
9075 if (validate_arg (arg, COMPLEX_TYPE)
9076 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9077 {
9078 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9079
9080 if (atan2_fn)
9081 {
9082 tree new_arg = builtin_save_expr (arg);
9083 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9084 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9085 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9086 }
9087 }
9088
9089 return NULL_TREE;
9090 }
9091
9092 /* Fold a call to builtin logb/ilogb. */
9093
9094 static tree
9095 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9096 {
9097 if (! validate_arg (arg, REAL_TYPE))
9098 return NULL_TREE;
9099
9100 STRIP_NOPS (arg);
9101
9102 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9103 {
9104 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9105
9106 switch (value->cl)
9107 {
9108 case rvc_nan:
9109 case rvc_inf:
9110 /* If arg is Inf or NaN and we're logb, return it. */
9111 if (TREE_CODE (rettype) == REAL_TYPE)
9112 {
9113 /* For logb(-Inf) we have to return +Inf. */
9114 if (real_isinf (value) && real_isneg (value))
9115 {
9116 REAL_VALUE_TYPE tem;
9117 real_inf (&tem);
9118 return build_real (rettype, tem);
9119 }
9120 return fold_convert_loc (loc, rettype, arg);
9121 }
9122 /* Fall through... */
9123 case rvc_zero:
9124 /* Zero may set errno and/or raise an exception for logb, also
9125 for ilogb we don't know FP_ILOGB0. */
9126 return NULL_TREE;
9127 case rvc_normal:
9128 /* For normal numbers, proceed iff radix == 2. In GCC,
9129 normalized significands are in the range [0.5, 1.0). We
9130 want the exponent as if they were [1.0, 2.0) so get the
9131 exponent and subtract 1. */
9132 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9133 return fold_convert_loc (loc, rettype,
9134 build_int_cst (integer_type_node,
9135 REAL_EXP (value)-1));
9136 break;
9137 }
9138 }
9139
9140 return NULL_TREE;
9141 }
9142
9143 /* Fold a call to builtin significand, if radix == 2. */
9144
9145 static tree
9146 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9147 {
9148 if (! validate_arg (arg, REAL_TYPE))
9149 return NULL_TREE;
9150
9151 STRIP_NOPS (arg);
9152
9153 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9154 {
9155 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9156
9157 switch (value->cl)
9158 {
9159 case rvc_zero:
9160 case rvc_nan:
9161 case rvc_inf:
9162 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9163 return fold_convert_loc (loc, rettype, arg);
9164 case rvc_normal:
9165 /* For normal numbers, proceed iff radix == 2. */
9166 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9167 {
9168 REAL_VALUE_TYPE result = *value;
9169 /* In GCC, normalized significands are in the range [0.5,
9170 1.0). We want them to be [1.0, 2.0) so set the
9171 exponent to 1. */
9172 SET_REAL_EXP (&result, 1);
9173 return build_real (rettype, result);
9174 }
9175 break;
9176 }
9177 }
9178
9179 return NULL_TREE;
9180 }
9181
9182 /* Fold a call to builtin frexp, we can assume the base is 2. */
9183
9184 static tree
9185 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9186 {
9187 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9188 return NULL_TREE;
9189
9190 STRIP_NOPS (arg0);
9191
9192 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9193 return NULL_TREE;
9194
9195 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9196
9197 /* Proceed if a valid pointer type was passed in. */
9198 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9199 {
9200 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9201 tree frac, exp;
9202
9203 switch (value->cl)
9204 {
9205 case rvc_zero:
9206 /* For +-0, return (*exp = 0, +-0). */
9207 exp = integer_zero_node;
9208 frac = arg0;
9209 break;
9210 case rvc_nan:
9211 case rvc_inf:
9212 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9213 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9214 case rvc_normal:
9215 {
9216 /* Since the frexp function always expects base 2, and in
9217 GCC normalized significands are already in the range
9218 [0.5, 1.0), we have exactly what frexp wants. */
9219 REAL_VALUE_TYPE frac_rvt = *value;
9220 SET_REAL_EXP (&frac_rvt, 0);
9221 frac = build_real (rettype, frac_rvt);
9222 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9223 }
9224 break;
9225 default:
9226 gcc_unreachable ();
9227 }
9228
9229 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9230 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9231 TREE_SIDE_EFFECTS (arg1) = 1;
9232 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9233 }
9234
9235 return NULL_TREE;
9236 }
9237
9238 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9239 then we can assume the base is two. If it's false, then we have to
9240 check the mode of the TYPE parameter in certain cases. */
9241
9242 static tree
9243 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9244 tree type, bool ldexp)
9245 {
9246 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9247 {
9248 STRIP_NOPS (arg0);
9249 STRIP_NOPS (arg1);
9250
9251 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9252 if (real_zerop (arg0) || integer_zerop (arg1)
9253 || (TREE_CODE (arg0) == REAL_CST
9254 && !real_isfinite (&TREE_REAL_CST (arg0))))
9255 return omit_one_operand_loc (loc, type, arg0, arg1);
9256
9257 /* If both arguments are constant, then try to evaluate it. */
9258 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9259 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9260 && tree_fits_shwi_p (arg1))
9261 {
9262 /* Bound the maximum adjustment to twice the range of the
9263 mode's valid exponents. Use abs to ensure the range is
9264 positive as a sanity check. */
9265 const long max_exp_adj = 2 *
9266 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9267 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9268
9269 /* Get the user-requested adjustment. */
9270 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9271
9272 /* The requested adjustment must be inside this range. This
9273 is a preliminary cap to avoid things like overflow, we
9274 may still fail to compute the result for other reasons. */
9275 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9276 {
9277 REAL_VALUE_TYPE initial_result;
9278
9279 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9280
9281 /* Ensure we didn't overflow. */
9282 if (! real_isinf (&initial_result))
9283 {
9284 const REAL_VALUE_TYPE trunc_result
9285 = real_value_truncate (TYPE_MODE (type), initial_result);
9286
9287 /* Only proceed if the target mode can hold the
9288 resulting value. */
9289 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9290 return build_real (type, trunc_result);
9291 }
9292 }
9293 }
9294 }
9295
9296 return NULL_TREE;
9297 }
9298
9299 /* Fold a call to builtin modf. */
9300
9301 static tree
9302 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9303 {
9304 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9305 return NULL_TREE;
9306
9307 STRIP_NOPS (arg0);
9308
9309 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9310 return NULL_TREE;
9311
9312 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9313
9314 /* Proceed if a valid pointer type was passed in. */
9315 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9316 {
9317 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9318 REAL_VALUE_TYPE trunc, frac;
9319
9320 switch (value->cl)
9321 {
9322 case rvc_nan:
9323 case rvc_zero:
9324 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9325 trunc = frac = *value;
9326 break;
9327 case rvc_inf:
9328 /* For +-Inf, return (*arg1 = arg0, +-0). */
9329 frac = dconst0;
9330 frac.sign = value->sign;
9331 trunc = *value;
9332 break;
9333 case rvc_normal:
9334 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9335 real_trunc (&trunc, VOIDmode, value);
9336 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9337 /* If the original number was negative and already
9338 integral, then the fractional part is -0.0. */
9339 if (value->sign && frac.cl == rvc_zero)
9340 frac.sign = value->sign;
9341 break;
9342 }
9343
9344 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9345 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9346 build_real (rettype, trunc));
9347 TREE_SIDE_EFFECTS (arg1) = 1;
9348 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9349 build_real (rettype, frac));
9350 }
9351
9352 return NULL_TREE;
9353 }
9354
9355 /* Given a location LOC, an interclass builtin function decl FNDECL
9356 and its single argument ARG, return an folded expression computing
9357 the same, or NULL_TREE if we either couldn't or didn't want to fold
9358 (the latter happen if there's an RTL instruction available). */
9359
9360 static tree
9361 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9362 {
9363 machine_mode mode;
9364
9365 if (!validate_arg (arg, REAL_TYPE))
9366 return NULL_TREE;
9367
9368 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9369 return NULL_TREE;
9370
9371 mode = TYPE_MODE (TREE_TYPE (arg));
9372
9373 /* If there is no optab, try generic code. */
9374 switch (DECL_FUNCTION_CODE (fndecl))
9375 {
9376 tree result;
9377
9378 CASE_FLT_FN (BUILT_IN_ISINF):
9379 {
9380 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9381 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9382 tree const type = TREE_TYPE (arg);
9383 REAL_VALUE_TYPE r;
9384 char buf[128];
9385
9386 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9387 real_from_string (&r, buf);
9388 result = build_call_expr (isgr_fn, 2,
9389 fold_build1_loc (loc, ABS_EXPR, type, arg),
9390 build_real (type, r));
9391 return result;
9392 }
9393 CASE_FLT_FN (BUILT_IN_FINITE):
9394 case BUILT_IN_ISFINITE:
9395 {
9396 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9397 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9398 tree const type = TREE_TYPE (arg);
9399 REAL_VALUE_TYPE r;
9400 char buf[128];
9401
9402 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9403 real_from_string (&r, buf);
9404 result = build_call_expr (isle_fn, 2,
9405 fold_build1_loc (loc, ABS_EXPR, type, arg),
9406 build_real (type, r));
9407 /*result = fold_build2_loc (loc, UNGT_EXPR,
9408 TREE_TYPE (TREE_TYPE (fndecl)),
9409 fold_build1_loc (loc, ABS_EXPR, type, arg),
9410 build_real (type, r));
9411 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9412 TREE_TYPE (TREE_TYPE (fndecl)),
9413 result);*/
9414 return result;
9415 }
9416 case BUILT_IN_ISNORMAL:
9417 {
9418 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9419 islessequal(fabs(x),DBL_MAX). */
9420 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9421 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9422 tree const type = TREE_TYPE (arg);
9423 REAL_VALUE_TYPE rmax, rmin;
9424 char buf[128];
9425
9426 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9427 real_from_string (&rmax, buf);
9428 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9429 real_from_string (&rmin, buf);
9430 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9431 result = build_call_expr (isle_fn, 2, arg,
9432 build_real (type, rmax));
9433 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9434 build_call_expr (isge_fn, 2, arg,
9435 build_real (type, rmin)));
9436 return result;
9437 }
9438 default:
9439 break;
9440 }
9441
9442 return NULL_TREE;
9443 }
9444
9445 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9446 ARG is the argument for the call. */
9447
9448 static tree
9449 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9450 {
9451 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9452 REAL_VALUE_TYPE r;
9453
9454 if (!validate_arg (arg, REAL_TYPE))
9455 return NULL_TREE;
9456
9457 switch (builtin_index)
9458 {
9459 case BUILT_IN_ISINF:
9460 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9461 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9462
9463 if (TREE_CODE (arg) == REAL_CST)
9464 {
9465 r = TREE_REAL_CST (arg);
9466 if (real_isinf (&r))
9467 return real_compare (GT_EXPR, &r, &dconst0)
9468 ? integer_one_node : integer_minus_one_node;
9469 else
9470 return integer_zero_node;
9471 }
9472
9473 return NULL_TREE;
9474
9475 case BUILT_IN_ISINF_SIGN:
9476 {
9477 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9478 /* In a boolean context, GCC will fold the inner COND_EXPR to
9479 1. So e.g. "if (isinf_sign(x))" would be folded to just
9480 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9481 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9482 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9483 tree tmp = NULL_TREE;
9484
9485 arg = builtin_save_expr (arg);
9486
9487 if (signbit_fn && isinf_fn)
9488 {
9489 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9490 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9491
9492 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9493 signbit_call, integer_zero_node);
9494 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9495 isinf_call, integer_zero_node);
9496
9497 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9498 integer_minus_one_node, integer_one_node);
9499 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9500 isinf_call, tmp,
9501 integer_zero_node);
9502 }
9503
9504 return tmp;
9505 }
9506
9507 case BUILT_IN_ISFINITE:
9508 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9509 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9510 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9511
9512 if (TREE_CODE (arg) == REAL_CST)
9513 {
9514 r = TREE_REAL_CST (arg);
9515 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9516 }
9517
9518 return NULL_TREE;
9519
9520 case BUILT_IN_ISNAN:
9521 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9522 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9523
9524 if (TREE_CODE (arg) == REAL_CST)
9525 {
9526 r = TREE_REAL_CST (arg);
9527 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9528 }
9529
9530 arg = builtin_save_expr (arg);
9531 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9532
9533 default:
9534 gcc_unreachable ();
9535 }
9536 }
9537
9538 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9539 This builtin will generate code to return the appropriate floating
9540 point classification depending on the value of the floating point
9541 number passed in. The possible return values must be supplied as
9542 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9543 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9544 one floating point argument which is "type generic". */
9545
9546 static tree
9547 fold_builtin_fpclassify (location_t loc, tree exp)
9548 {
9549 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9550 arg, type, res, tmp;
9551 machine_mode mode;
9552 REAL_VALUE_TYPE r;
9553 char buf[128];
9554
9555 /* Verify the required arguments in the original call. */
9556 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9557 INTEGER_TYPE, INTEGER_TYPE,
9558 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9559 return NULL_TREE;
9560
9561 fp_nan = CALL_EXPR_ARG (exp, 0);
9562 fp_infinite = CALL_EXPR_ARG (exp, 1);
9563 fp_normal = CALL_EXPR_ARG (exp, 2);
9564 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9565 fp_zero = CALL_EXPR_ARG (exp, 4);
9566 arg = CALL_EXPR_ARG (exp, 5);
9567 type = TREE_TYPE (arg);
9568 mode = TYPE_MODE (type);
9569 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9570
9571 /* fpclassify(x) ->
9572 isnan(x) ? FP_NAN :
9573 (fabs(x) == Inf ? FP_INFINITE :
9574 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9575 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9576
9577 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9578 build_real (type, dconst0));
9579 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9580 tmp, fp_zero, fp_subnormal);
9581
9582 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9583 real_from_string (&r, buf);
9584 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9585 arg, build_real (type, r));
9586 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9587
9588 if (HONOR_INFINITIES (mode))
9589 {
9590 real_inf (&r);
9591 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9592 build_real (type, r));
9593 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9594 fp_infinite, res);
9595 }
9596
9597 if (HONOR_NANS (mode))
9598 {
9599 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9600 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9601 }
9602
9603 return res;
9604 }
9605
9606 /* Fold a call to an unordered comparison function such as
9607 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9608 being called and ARG0 and ARG1 are the arguments for the call.
9609 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9610 the opposite of the desired result. UNORDERED_CODE is used
9611 for modes that can hold NaNs and ORDERED_CODE is used for
9612 the rest. */
9613
9614 static tree
9615 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9616 enum tree_code unordered_code,
9617 enum tree_code ordered_code)
9618 {
9619 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9620 enum tree_code code;
9621 tree type0, type1;
9622 enum tree_code code0, code1;
9623 tree cmp_type = NULL_TREE;
9624
9625 type0 = TREE_TYPE (arg0);
9626 type1 = TREE_TYPE (arg1);
9627
9628 code0 = TREE_CODE (type0);
9629 code1 = TREE_CODE (type1);
9630
9631 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9632 /* Choose the wider of two real types. */
9633 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9634 ? type0 : type1;
9635 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9636 cmp_type = type0;
9637 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9638 cmp_type = type1;
9639
9640 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9641 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9642
9643 if (unordered_code == UNORDERED_EXPR)
9644 {
9645 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9646 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9647 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9648 }
9649
9650 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9651 : ordered_code;
9652 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9653 fold_build2_loc (loc, code, type, arg0, arg1));
9654 }
9655
9656 /* Fold a call to built-in function FNDECL with 0 arguments.
9657 IGNORE is true if the result of the function call is ignored. This
9658 function returns NULL_TREE if no simplification was possible. */
9659
9660 static tree
9661 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9662 {
9663 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9664 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9665 switch (fcode)
9666 {
9667 CASE_FLT_FN (BUILT_IN_INF):
9668 case BUILT_IN_INFD32:
9669 case BUILT_IN_INFD64:
9670 case BUILT_IN_INFD128:
9671 return fold_builtin_inf (loc, type, true);
9672
9673 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9674 return fold_builtin_inf (loc, type, false);
9675
9676 case BUILT_IN_CLASSIFY_TYPE:
9677 return fold_builtin_classify_type (NULL_TREE);
9678
9679 case BUILT_IN_UNREACHABLE:
9680 if (flag_sanitize & SANITIZE_UNREACHABLE
9681 && (current_function_decl == NULL
9682 || !lookup_attribute ("no_sanitize_undefined",
9683 DECL_ATTRIBUTES (current_function_decl))))
9684 return ubsan_instrument_unreachable (loc);
9685 break;
9686
9687 default:
9688 break;
9689 }
9690 return NULL_TREE;
9691 }
9692
9693 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9694 IGNORE is true if the result of the function call is ignored. This
9695 function returns NULL_TREE if no simplification was possible. */
9696
9697 static tree
9698 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9699 {
9700 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9701 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9702 switch (fcode)
9703 {
9704 case BUILT_IN_CONSTANT_P:
9705 {
9706 tree val = fold_builtin_constant_p (arg0);
9707
9708 /* Gimplification will pull the CALL_EXPR for the builtin out of
9709 an if condition. When not optimizing, we'll not CSE it back.
9710 To avoid link error types of regressions, return false now. */
9711 if (!val && !optimize)
9712 val = integer_zero_node;
9713
9714 return val;
9715 }
9716
9717 case BUILT_IN_CLASSIFY_TYPE:
9718 return fold_builtin_classify_type (arg0);
9719
9720 case BUILT_IN_STRLEN:
9721 return fold_builtin_strlen (loc, type, arg0);
9722
9723 CASE_FLT_FN (BUILT_IN_FABS):
9724 case BUILT_IN_FABSD32:
9725 case BUILT_IN_FABSD64:
9726 case BUILT_IN_FABSD128:
9727 return fold_builtin_fabs (loc, arg0, type);
9728
9729 case BUILT_IN_ABS:
9730 case BUILT_IN_LABS:
9731 case BUILT_IN_LLABS:
9732 case BUILT_IN_IMAXABS:
9733 return fold_builtin_abs (loc, arg0, type);
9734
9735 CASE_FLT_FN (BUILT_IN_CONJ):
9736 if (validate_arg (arg0, COMPLEX_TYPE)
9737 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9738 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9739 break;
9740
9741 CASE_FLT_FN (BUILT_IN_CREAL):
9742 if (validate_arg (arg0, COMPLEX_TYPE)
9743 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9744 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9745 break;
9746
9747 CASE_FLT_FN (BUILT_IN_CIMAG):
9748 if (validate_arg (arg0, COMPLEX_TYPE)
9749 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9750 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9751 break;
9752
9753 CASE_FLT_FN (BUILT_IN_CCOS):
9754 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9755
9756 CASE_FLT_FN (BUILT_IN_CCOSH):
9757 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9758
9759 CASE_FLT_FN (BUILT_IN_CPROJ):
9760 return fold_builtin_cproj (loc, arg0, type);
9761
9762 CASE_FLT_FN (BUILT_IN_CSIN):
9763 if (validate_arg (arg0, COMPLEX_TYPE)
9764 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9765 return do_mpc_arg1 (arg0, type, mpc_sin);
9766 break;
9767
9768 CASE_FLT_FN (BUILT_IN_CSINH):
9769 if (validate_arg (arg0, COMPLEX_TYPE)
9770 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9771 return do_mpc_arg1 (arg0, type, mpc_sinh);
9772 break;
9773
9774 CASE_FLT_FN (BUILT_IN_CTAN):
9775 if (validate_arg (arg0, COMPLEX_TYPE)
9776 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9777 return do_mpc_arg1 (arg0, type, mpc_tan);
9778 break;
9779
9780 CASE_FLT_FN (BUILT_IN_CTANH):
9781 if (validate_arg (arg0, COMPLEX_TYPE)
9782 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9783 return do_mpc_arg1 (arg0, type, mpc_tanh);
9784 break;
9785
9786 CASE_FLT_FN (BUILT_IN_CLOG):
9787 if (validate_arg (arg0, COMPLEX_TYPE)
9788 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9789 return do_mpc_arg1 (arg0, type, mpc_log);
9790 break;
9791
9792 CASE_FLT_FN (BUILT_IN_CSQRT):
9793 if (validate_arg (arg0, COMPLEX_TYPE)
9794 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9795 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9796 break;
9797
9798 CASE_FLT_FN (BUILT_IN_CASIN):
9799 if (validate_arg (arg0, COMPLEX_TYPE)
9800 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9801 return do_mpc_arg1 (arg0, type, mpc_asin);
9802 break;
9803
9804 CASE_FLT_FN (BUILT_IN_CACOS):
9805 if (validate_arg (arg0, COMPLEX_TYPE)
9806 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9807 return do_mpc_arg1 (arg0, type, mpc_acos);
9808 break;
9809
9810 CASE_FLT_FN (BUILT_IN_CATAN):
9811 if (validate_arg (arg0, COMPLEX_TYPE)
9812 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9813 return do_mpc_arg1 (arg0, type, mpc_atan);
9814 break;
9815
9816 CASE_FLT_FN (BUILT_IN_CASINH):
9817 if (validate_arg (arg0, COMPLEX_TYPE)
9818 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9819 return do_mpc_arg1 (arg0, type, mpc_asinh);
9820 break;
9821
9822 CASE_FLT_FN (BUILT_IN_CACOSH):
9823 if (validate_arg (arg0, COMPLEX_TYPE)
9824 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9825 return do_mpc_arg1 (arg0, type, mpc_acosh);
9826 break;
9827
9828 CASE_FLT_FN (BUILT_IN_CATANH):
9829 if (validate_arg (arg0, COMPLEX_TYPE)
9830 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9831 return do_mpc_arg1 (arg0, type, mpc_atanh);
9832 break;
9833
9834 CASE_FLT_FN (BUILT_IN_CABS):
9835 return fold_builtin_cabs (loc, arg0, type, fndecl);
9836
9837 CASE_FLT_FN (BUILT_IN_CARG):
9838 return fold_builtin_carg (loc, arg0, type);
9839
9840 CASE_FLT_FN (BUILT_IN_SQRT):
9841 return fold_builtin_sqrt (loc, arg0, type);
9842
9843 CASE_FLT_FN (BUILT_IN_CBRT):
9844 return fold_builtin_cbrt (loc, arg0, type);
9845
9846 CASE_FLT_FN (BUILT_IN_ASIN):
9847 if (validate_arg (arg0, REAL_TYPE))
9848 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9849 &dconstm1, &dconst1, true);
9850 break;
9851
9852 CASE_FLT_FN (BUILT_IN_ACOS):
9853 if (validate_arg (arg0, REAL_TYPE))
9854 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9855 &dconstm1, &dconst1, true);
9856 break;
9857
9858 CASE_FLT_FN (BUILT_IN_ATAN):
9859 if (validate_arg (arg0, REAL_TYPE))
9860 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9861 break;
9862
9863 CASE_FLT_FN (BUILT_IN_ASINH):
9864 if (validate_arg (arg0, REAL_TYPE))
9865 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9866 break;
9867
9868 CASE_FLT_FN (BUILT_IN_ACOSH):
9869 if (validate_arg (arg0, REAL_TYPE))
9870 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9871 &dconst1, NULL, true);
9872 break;
9873
9874 CASE_FLT_FN (BUILT_IN_ATANH):
9875 if (validate_arg (arg0, REAL_TYPE))
9876 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9877 &dconstm1, &dconst1, false);
9878 break;
9879
9880 CASE_FLT_FN (BUILT_IN_SIN):
9881 if (validate_arg (arg0, REAL_TYPE))
9882 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9883 break;
9884
9885 CASE_FLT_FN (BUILT_IN_COS):
9886 return fold_builtin_cos (loc, arg0, type, fndecl);
9887
9888 CASE_FLT_FN (BUILT_IN_TAN):
9889 return fold_builtin_tan (arg0, type);
9890
9891 CASE_FLT_FN (BUILT_IN_CEXP):
9892 return fold_builtin_cexp (loc, arg0, type);
9893
9894 CASE_FLT_FN (BUILT_IN_CEXPI):
9895 if (validate_arg (arg0, REAL_TYPE))
9896 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9897 break;
9898
9899 CASE_FLT_FN (BUILT_IN_SINH):
9900 if (validate_arg (arg0, REAL_TYPE))
9901 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9902 break;
9903
9904 CASE_FLT_FN (BUILT_IN_COSH):
9905 return fold_builtin_cosh (loc, arg0, type, fndecl);
9906
9907 CASE_FLT_FN (BUILT_IN_TANH):
9908 if (validate_arg (arg0, REAL_TYPE))
9909 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9910 break;
9911
9912 CASE_FLT_FN (BUILT_IN_ERF):
9913 if (validate_arg (arg0, REAL_TYPE))
9914 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9915 break;
9916
9917 CASE_FLT_FN (BUILT_IN_ERFC):
9918 if (validate_arg (arg0, REAL_TYPE))
9919 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9920 break;
9921
9922 CASE_FLT_FN (BUILT_IN_TGAMMA):
9923 if (validate_arg (arg0, REAL_TYPE))
9924 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9925 break;
9926
9927 CASE_FLT_FN (BUILT_IN_EXP):
9928 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9929
9930 CASE_FLT_FN (BUILT_IN_EXP2):
9931 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9932
9933 CASE_FLT_FN (BUILT_IN_EXP10):
9934 CASE_FLT_FN (BUILT_IN_POW10):
9935 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9936
9937 CASE_FLT_FN (BUILT_IN_EXPM1):
9938 if (validate_arg (arg0, REAL_TYPE))
9939 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9940 break;
9941
9942 CASE_FLT_FN (BUILT_IN_LOG):
9943 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9944
9945 CASE_FLT_FN (BUILT_IN_LOG2):
9946 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9947
9948 CASE_FLT_FN (BUILT_IN_LOG10):
9949 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9950
9951 CASE_FLT_FN (BUILT_IN_LOG1P):
9952 if (validate_arg (arg0, REAL_TYPE))
9953 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9954 &dconstm1, NULL, false);
9955 break;
9956
9957 CASE_FLT_FN (BUILT_IN_J0):
9958 if (validate_arg (arg0, REAL_TYPE))
9959 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9960 NULL, NULL, 0);
9961 break;
9962
9963 CASE_FLT_FN (BUILT_IN_J1):
9964 if (validate_arg (arg0, REAL_TYPE))
9965 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9966 NULL, NULL, 0);
9967 break;
9968
9969 CASE_FLT_FN (BUILT_IN_Y0):
9970 if (validate_arg (arg0, REAL_TYPE))
9971 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9972 &dconst0, NULL, false);
9973 break;
9974
9975 CASE_FLT_FN (BUILT_IN_Y1):
9976 if (validate_arg (arg0, REAL_TYPE))
9977 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9978 &dconst0, NULL, false);
9979 break;
9980
9981 CASE_FLT_FN (BUILT_IN_NAN):
9982 case BUILT_IN_NAND32:
9983 case BUILT_IN_NAND64:
9984 case BUILT_IN_NAND128:
9985 return fold_builtin_nan (arg0, type, true);
9986
9987 CASE_FLT_FN (BUILT_IN_NANS):
9988 return fold_builtin_nan (arg0, type, false);
9989
9990 CASE_FLT_FN (BUILT_IN_FLOOR):
9991 return fold_builtin_floor (loc, fndecl, arg0);
9992
9993 CASE_FLT_FN (BUILT_IN_CEIL):
9994 return fold_builtin_ceil (loc, fndecl, arg0);
9995
9996 CASE_FLT_FN (BUILT_IN_TRUNC):
9997 return fold_builtin_trunc (loc, fndecl, arg0);
9998
9999 CASE_FLT_FN (BUILT_IN_ROUND):
10000 return fold_builtin_round (loc, fndecl, arg0);
10001
10002 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10003 CASE_FLT_FN (BUILT_IN_RINT):
10004 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10005
10006 CASE_FLT_FN (BUILT_IN_ICEIL):
10007 CASE_FLT_FN (BUILT_IN_LCEIL):
10008 CASE_FLT_FN (BUILT_IN_LLCEIL):
10009 CASE_FLT_FN (BUILT_IN_LFLOOR):
10010 CASE_FLT_FN (BUILT_IN_IFLOOR):
10011 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10012 CASE_FLT_FN (BUILT_IN_IROUND):
10013 CASE_FLT_FN (BUILT_IN_LROUND):
10014 CASE_FLT_FN (BUILT_IN_LLROUND):
10015 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10016
10017 CASE_FLT_FN (BUILT_IN_IRINT):
10018 CASE_FLT_FN (BUILT_IN_LRINT):
10019 CASE_FLT_FN (BUILT_IN_LLRINT):
10020 return fold_fixed_mathfn (loc, fndecl, arg0);
10021
10022 case BUILT_IN_BSWAP16:
10023 case BUILT_IN_BSWAP32:
10024 case BUILT_IN_BSWAP64:
10025 return fold_builtin_bswap (fndecl, arg0);
10026
10027 CASE_INT_FN (BUILT_IN_FFS):
10028 CASE_INT_FN (BUILT_IN_CLZ):
10029 CASE_INT_FN (BUILT_IN_CTZ):
10030 CASE_INT_FN (BUILT_IN_CLRSB):
10031 CASE_INT_FN (BUILT_IN_POPCOUNT):
10032 CASE_INT_FN (BUILT_IN_PARITY):
10033 return fold_builtin_bitop (fndecl, arg0);
10034
10035 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10036 return fold_builtin_signbit (loc, arg0, type);
10037
10038 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10039 return fold_builtin_significand (loc, arg0, type);
10040
10041 CASE_FLT_FN (BUILT_IN_ILOGB):
10042 CASE_FLT_FN (BUILT_IN_LOGB):
10043 return fold_builtin_logb (loc, arg0, type);
10044
10045 case BUILT_IN_ISASCII:
10046 return fold_builtin_isascii (loc, arg0);
10047
10048 case BUILT_IN_TOASCII:
10049 return fold_builtin_toascii (loc, arg0);
10050
10051 case BUILT_IN_ISDIGIT:
10052 return fold_builtin_isdigit (loc, arg0);
10053
10054 CASE_FLT_FN (BUILT_IN_FINITE):
10055 case BUILT_IN_FINITED32:
10056 case BUILT_IN_FINITED64:
10057 case BUILT_IN_FINITED128:
10058 case BUILT_IN_ISFINITE:
10059 {
10060 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10061 if (ret)
10062 return ret;
10063 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10064 }
10065
10066 CASE_FLT_FN (BUILT_IN_ISINF):
10067 case BUILT_IN_ISINFD32:
10068 case BUILT_IN_ISINFD64:
10069 case BUILT_IN_ISINFD128:
10070 {
10071 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10072 if (ret)
10073 return ret;
10074 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10075 }
10076
10077 case BUILT_IN_ISNORMAL:
10078 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10079
10080 case BUILT_IN_ISINF_SIGN:
10081 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10082
10083 CASE_FLT_FN (BUILT_IN_ISNAN):
10084 case BUILT_IN_ISNAND32:
10085 case BUILT_IN_ISNAND64:
10086 case BUILT_IN_ISNAND128:
10087 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10088
10089 case BUILT_IN_PRINTF:
10090 case BUILT_IN_PRINTF_UNLOCKED:
10091 case BUILT_IN_VPRINTF:
10092 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10093
10094 case BUILT_IN_FREE:
10095 if (integer_zerop (arg0))
10096 return build_empty_stmt (loc);
10097 break;
10098
10099 default:
10100 break;
10101 }
10102
10103 return NULL_TREE;
10104
10105 }
10106
10107 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10108 IGNORE is true if the result of the function call is ignored. This
10109 function returns NULL_TREE if no simplification was possible. */
10110
10111 static tree
10112 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10113 {
10114 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10115 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10116
10117 switch (fcode)
10118 {
10119 CASE_FLT_FN (BUILT_IN_JN):
10120 if (validate_arg (arg0, INTEGER_TYPE)
10121 && validate_arg (arg1, REAL_TYPE))
10122 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10123 break;
10124
10125 CASE_FLT_FN (BUILT_IN_YN):
10126 if (validate_arg (arg0, INTEGER_TYPE)
10127 && validate_arg (arg1, REAL_TYPE))
10128 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10129 &dconst0, false);
10130 break;
10131
10132 CASE_FLT_FN (BUILT_IN_DREM):
10133 CASE_FLT_FN (BUILT_IN_REMAINDER):
10134 if (validate_arg (arg0, REAL_TYPE)
10135 && validate_arg (arg1, REAL_TYPE))
10136 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10137 break;
10138
10139 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10140 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10141 if (validate_arg (arg0, REAL_TYPE)
10142 && validate_arg (arg1, POINTER_TYPE))
10143 return do_mpfr_lgamma_r (arg0, arg1, type);
10144 break;
10145
10146 CASE_FLT_FN (BUILT_IN_ATAN2):
10147 if (validate_arg (arg0, REAL_TYPE)
10148 && validate_arg (arg1, REAL_TYPE))
10149 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10150 break;
10151
10152 CASE_FLT_FN (BUILT_IN_FDIM):
10153 if (validate_arg (arg0, REAL_TYPE)
10154 && validate_arg (arg1, REAL_TYPE))
10155 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10156 break;
10157
10158 CASE_FLT_FN (BUILT_IN_HYPOT):
10159 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10160
10161 CASE_FLT_FN (BUILT_IN_CPOW):
10162 if (validate_arg (arg0, COMPLEX_TYPE)
10163 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10164 && validate_arg (arg1, COMPLEX_TYPE)
10165 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10166 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10167 break;
10168
10169 CASE_FLT_FN (BUILT_IN_LDEXP):
10170 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10171 CASE_FLT_FN (BUILT_IN_SCALBN):
10172 CASE_FLT_FN (BUILT_IN_SCALBLN):
10173 return fold_builtin_load_exponent (loc, arg0, arg1,
10174 type, /*ldexp=*/false);
10175
10176 CASE_FLT_FN (BUILT_IN_FREXP):
10177 return fold_builtin_frexp (loc, arg0, arg1, type);
10178
10179 CASE_FLT_FN (BUILT_IN_MODF):
10180 return fold_builtin_modf (loc, arg0, arg1, type);
10181
10182 case BUILT_IN_STRSTR:
10183 return fold_builtin_strstr (loc, arg0, arg1, type);
10184
10185 case BUILT_IN_STRSPN:
10186 return fold_builtin_strspn (loc, arg0, arg1);
10187
10188 case BUILT_IN_STRCSPN:
10189 return fold_builtin_strcspn (loc, arg0, arg1);
10190
10191 case BUILT_IN_STRCHR:
10192 case BUILT_IN_INDEX:
10193 return fold_builtin_strchr (loc, arg0, arg1, type);
10194
10195 case BUILT_IN_STRRCHR:
10196 case BUILT_IN_RINDEX:
10197 return fold_builtin_strrchr (loc, arg0, arg1, type);
10198
10199 case BUILT_IN_STPCPY:
10200 if (ignore)
10201 {
10202 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10203 if (!fn)
10204 break;
10205
10206 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10207 }
10208 else
10209 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10210 break;
10211
10212 case BUILT_IN_STRCMP:
10213 return fold_builtin_strcmp (loc, arg0, arg1);
10214
10215 case BUILT_IN_STRPBRK:
10216 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10217
10218 case BUILT_IN_EXPECT:
10219 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10220
10221 CASE_FLT_FN (BUILT_IN_POW):
10222 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10223
10224 CASE_FLT_FN (BUILT_IN_POWI):
10225 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10226
10227 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10228 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10229
10230 CASE_FLT_FN (BUILT_IN_FMIN):
10231 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10232
10233 CASE_FLT_FN (BUILT_IN_FMAX):
10234 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10235
10236 case BUILT_IN_ISGREATER:
10237 return fold_builtin_unordered_cmp (loc, fndecl,
10238 arg0, arg1, UNLE_EXPR, LE_EXPR);
10239 case BUILT_IN_ISGREATEREQUAL:
10240 return fold_builtin_unordered_cmp (loc, fndecl,
10241 arg0, arg1, UNLT_EXPR, LT_EXPR);
10242 case BUILT_IN_ISLESS:
10243 return fold_builtin_unordered_cmp (loc, fndecl,
10244 arg0, arg1, UNGE_EXPR, GE_EXPR);
10245 case BUILT_IN_ISLESSEQUAL:
10246 return fold_builtin_unordered_cmp (loc, fndecl,
10247 arg0, arg1, UNGT_EXPR, GT_EXPR);
10248 case BUILT_IN_ISLESSGREATER:
10249 return fold_builtin_unordered_cmp (loc, fndecl,
10250 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10251 case BUILT_IN_ISUNORDERED:
10252 return fold_builtin_unordered_cmp (loc, fndecl,
10253 arg0, arg1, UNORDERED_EXPR,
10254 NOP_EXPR);
10255
10256 /* We do the folding for va_start in the expander. */
10257 case BUILT_IN_VA_START:
10258 break;
10259
10260 case BUILT_IN_OBJECT_SIZE:
10261 return fold_builtin_object_size (arg0, arg1);
10262
10263 case BUILT_IN_PRINTF:
10264 case BUILT_IN_PRINTF_UNLOCKED:
10265 case BUILT_IN_VPRINTF:
10266 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10267
10268 case BUILT_IN_PRINTF_CHK:
10269 case BUILT_IN_VPRINTF_CHK:
10270 if (!validate_arg (arg0, INTEGER_TYPE)
10271 || TREE_SIDE_EFFECTS (arg0))
10272 return NULL_TREE;
10273 else
10274 return fold_builtin_printf (loc, fndecl,
10275 arg1, NULL_TREE, ignore, fcode);
10276 break;
10277
10278 case BUILT_IN_FPRINTF:
10279 case BUILT_IN_FPRINTF_UNLOCKED:
10280 case BUILT_IN_VFPRINTF:
10281 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10282 ignore, fcode);
10283
10284 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10285 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10286
10287 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10288 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10289
10290 default:
10291 break;
10292 }
10293 return NULL_TREE;
10294 }
10295
10296 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10297 and ARG2. IGNORE is true if the result of the function call is ignored.
10298 This function returns NULL_TREE if no simplification was possible. */
10299
10300 static tree
10301 fold_builtin_3 (location_t loc, tree fndecl,
10302 tree arg0, tree arg1, tree arg2, bool ignore)
10303 {
10304 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10305 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10306 switch (fcode)
10307 {
10308
10309 CASE_FLT_FN (BUILT_IN_SINCOS):
10310 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10311
10312 CASE_FLT_FN (BUILT_IN_FMA):
10313 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10314 break;
10315
10316 CASE_FLT_FN (BUILT_IN_REMQUO):
10317 if (validate_arg (arg0, REAL_TYPE)
10318 && validate_arg (arg1, REAL_TYPE)
10319 && validate_arg (arg2, POINTER_TYPE))
10320 return do_mpfr_remquo (arg0, arg1, arg2);
10321 break;
10322
10323 case BUILT_IN_STRNCAT:
10324 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10325
10326 case BUILT_IN_STRNCMP:
10327 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10328
10329 case BUILT_IN_MEMCHR:
10330 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10331
10332 case BUILT_IN_BCMP:
10333 case BUILT_IN_MEMCMP:
10334 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10335
10336 case BUILT_IN_PRINTF_CHK:
10337 case BUILT_IN_VPRINTF_CHK:
10338 if (!validate_arg (arg0, INTEGER_TYPE)
10339 || TREE_SIDE_EFFECTS (arg0))
10340 return NULL_TREE;
10341 else
10342 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10343 break;
10344
10345 case BUILT_IN_FPRINTF:
10346 case BUILT_IN_FPRINTF_UNLOCKED:
10347 case BUILT_IN_VFPRINTF:
10348 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10349 ignore, fcode);
10350
10351 case BUILT_IN_FPRINTF_CHK:
10352 case BUILT_IN_VFPRINTF_CHK:
10353 if (!validate_arg (arg1, INTEGER_TYPE)
10354 || TREE_SIDE_EFFECTS (arg1))
10355 return NULL_TREE;
10356 else
10357 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10358 ignore, fcode);
10359
10360 case BUILT_IN_EXPECT:
10361 return fold_builtin_expect (loc, arg0, arg1, arg2);
10362
10363 default:
10364 break;
10365 }
10366 return NULL_TREE;
10367 }
10368
10369 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10370 ARG2, and ARG3. IGNORE is true if the result of the function call is
10371 ignored. This function returns NULL_TREE if no simplification was
10372 possible. */
10373
10374 static tree
10375 fold_builtin_4 (location_t loc, tree fndecl,
10376 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10377 {
10378 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10379
10380 switch (fcode)
10381 {
10382 case BUILT_IN_FPRINTF_CHK:
10383 case BUILT_IN_VFPRINTF_CHK:
10384 if (!validate_arg (arg1, INTEGER_TYPE)
10385 || TREE_SIDE_EFFECTS (arg1))
10386 return NULL_TREE;
10387 else
10388 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10389 ignore, fcode);
10390 break;
10391
10392 default:
10393 break;
10394 }
10395 return NULL_TREE;
10396 }
10397
10398 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10399 arguments, where NARGS <= 4. IGNORE is true if the result of the
10400 function call is ignored. This function returns NULL_TREE if no
10401 simplification was possible. Note that this only folds builtins with
10402 fixed argument patterns. Foldings that do varargs-to-varargs
10403 transformations, or that match calls with more than 4 arguments,
10404 need to be handled with fold_builtin_varargs instead. */
10405
10406 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10407
10408 tree
10409 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10410 {
10411 tree ret = NULL_TREE;
10412
10413 switch (nargs)
10414 {
10415 case 0:
10416 ret = fold_builtin_0 (loc, fndecl, ignore);
10417 break;
10418 case 1:
10419 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10420 break;
10421 case 2:
10422 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10423 break;
10424 case 3:
10425 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10426 break;
10427 case 4:
10428 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10429 ignore);
10430 break;
10431 default:
10432 break;
10433 }
10434 if (ret)
10435 {
10436 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10437 SET_EXPR_LOCATION (ret, loc);
10438 TREE_NO_WARNING (ret) = 1;
10439 return ret;
10440 }
10441 return NULL_TREE;
10442 }
10443
10444 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10445 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10446 of arguments in ARGS to be omitted. OLDNARGS is the number of
10447 elements in ARGS. */
10448
10449 static tree
10450 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10451 int skip, tree fndecl, int n, va_list newargs)
10452 {
10453 int nargs = oldnargs - skip + n;
10454 tree *buffer;
10455
10456 if (n > 0)
10457 {
10458 int i, j;
10459
10460 buffer = XALLOCAVEC (tree, nargs);
10461 for (i = 0; i < n; i++)
10462 buffer[i] = va_arg (newargs, tree);
10463 for (j = skip; j < oldnargs; j++, i++)
10464 buffer[i] = args[j];
10465 }
10466 else
10467 buffer = args + skip;
10468
10469 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10470 }
10471
10472 /* Return true if FNDECL shouldn't be folded right now.
10473 If a built-in function has an inline attribute always_inline
10474 wrapper, defer folding it after always_inline functions have
10475 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10476 might not be performed. */
10477
10478 bool
10479 avoid_folding_inline_builtin (tree fndecl)
10480 {
10481 return (DECL_DECLARED_INLINE_P (fndecl)
10482 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10483 && cfun
10484 && !cfun->always_inline_functions_inlined
10485 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10486 }
10487
10488 /* A wrapper function for builtin folding that prevents warnings for
10489 "statement without effect" and the like, caused by removing the
10490 call node earlier than the warning is generated. */
10491
10492 tree
10493 fold_call_expr (location_t loc, tree exp, bool ignore)
10494 {
10495 tree ret = NULL_TREE;
10496 tree fndecl = get_callee_fndecl (exp);
10497 if (fndecl
10498 && TREE_CODE (fndecl) == FUNCTION_DECL
10499 && DECL_BUILT_IN (fndecl)
10500 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10501 yet. Defer folding until we see all the arguments
10502 (after inlining). */
10503 && !CALL_EXPR_VA_ARG_PACK (exp))
10504 {
10505 int nargs = call_expr_nargs (exp);
10506
10507 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10508 instead last argument is __builtin_va_arg_pack (). Defer folding
10509 even in that case, until arguments are finalized. */
10510 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10511 {
10512 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10513 if (fndecl2
10514 && TREE_CODE (fndecl2) == FUNCTION_DECL
10515 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10516 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10517 return NULL_TREE;
10518 }
10519
10520 if (avoid_folding_inline_builtin (fndecl))
10521 return NULL_TREE;
10522
10523 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10524 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10525 CALL_EXPR_ARGP (exp), ignore);
10526 else
10527 {
10528 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10529 {
10530 tree *args = CALL_EXPR_ARGP (exp);
10531 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10532 }
10533 if (!ret)
10534 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10535 if (ret)
10536 return ret;
10537 }
10538 }
10539 return NULL_TREE;
10540 }
10541
10542 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10543 N arguments are passed in the array ARGARRAY. */
10544
10545 tree
10546 fold_builtin_call_array (location_t loc, tree type,
10547 tree fn,
10548 int n,
10549 tree *argarray)
10550 {
10551 tree ret = NULL_TREE;
10552 tree exp;
10553
10554 if (TREE_CODE (fn) == ADDR_EXPR)
10555 {
10556 tree fndecl = TREE_OPERAND (fn, 0);
10557 if (TREE_CODE (fndecl) == FUNCTION_DECL
10558 && DECL_BUILT_IN (fndecl))
10559 {
10560 /* If last argument is __builtin_va_arg_pack (), arguments to this
10561 function are not finalized yet. Defer folding until they are. */
10562 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10563 {
10564 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10565 if (fndecl2
10566 && TREE_CODE (fndecl2) == FUNCTION_DECL
10567 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10568 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10569 return build_call_array_loc (loc, type, fn, n, argarray);
10570 }
10571 if (avoid_folding_inline_builtin (fndecl))
10572 return build_call_array_loc (loc, type, fn, n, argarray);
10573 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10574 {
10575 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10576 if (ret)
10577 return ret;
10578
10579 return build_call_array_loc (loc, type, fn, n, argarray);
10580 }
10581 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10582 {
10583 /* First try the transformations that don't require consing up
10584 an exp. */
10585 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10586 if (ret)
10587 return ret;
10588 }
10589
10590 /* If we got this far, we need to build an exp. */
10591 exp = build_call_array_loc (loc, type, fn, n, argarray);
10592 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10593 return ret ? ret : exp;
10594 }
10595 }
10596
10597 return build_call_array_loc (loc, type, fn, n, argarray);
10598 }
10599
10600 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10601 along with N new arguments specified as the "..." parameters. SKIP
10602 is the number of arguments in EXP to be omitted. This function is used
10603 to do varargs-to-varargs transformations. */
10604
10605 static tree
10606 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10607 {
10608 va_list ap;
10609 tree t;
10610
10611 va_start (ap, n);
10612 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10613 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10614 va_end (ap);
10615
10616 return t;
10617 }
10618
10619 /* Validate a single argument ARG against a tree code CODE representing
10620 a type. */
10621
10622 static bool
10623 validate_arg (const_tree arg, enum tree_code code)
10624 {
10625 if (!arg)
10626 return false;
10627 else if (code == POINTER_TYPE)
10628 return POINTER_TYPE_P (TREE_TYPE (arg));
10629 else if (code == INTEGER_TYPE)
10630 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10631 return code == TREE_CODE (TREE_TYPE (arg));
10632 }
10633
10634 /* This function validates the types of a function call argument list
10635 against a specified list of tree_codes. If the last specifier is a 0,
10636 that represents an ellipses, otherwise the last specifier must be a
10637 VOID_TYPE.
10638
10639 This is the GIMPLE version of validate_arglist. Eventually we want to
10640 completely convert builtins.c to work from GIMPLEs and the tree based
10641 validate_arglist will then be removed. */
10642
10643 bool
10644 validate_gimple_arglist (const_gimple call, ...)
10645 {
10646 enum tree_code code;
10647 bool res = 0;
10648 va_list ap;
10649 const_tree arg;
10650 size_t i;
10651
10652 va_start (ap, call);
10653 i = 0;
10654
10655 do
10656 {
10657 code = (enum tree_code) va_arg (ap, int);
10658 switch (code)
10659 {
10660 case 0:
10661 /* This signifies an ellipses, any further arguments are all ok. */
10662 res = true;
10663 goto end;
10664 case VOID_TYPE:
10665 /* This signifies an endlink, if no arguments remain, return
10666 true, otherwise return false. */
10667 res = (i == gimple_call_num_args (call));
10668 goto end;
10669 default:
10670 /* If no parameters remain or the parameter's code does not
10671 match the specified code, return false. Otherwise continue
10672 checking any remaining arguments. */
10673 arg = gimple_call_arg (call, i++);
10674 if (!validate_arg (arg, code))
10675 goto end;
10676 break;
10677 }
10678 }
10679 while (1);
10680
10681 /* We need gotos here since we can only have one VA_CLOSE in a
10682 function. */
10683 end: ;
10684 va_end (ap);
10685
10686 return res;
10687 }
10688
10689 /* Default target-specific builtin expander that does nothing. */
10690
10691 rtx
10692 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10693 rtx target ATTRIBUTE_UNUSED,
10694 rtx subtarget ATTRIBUTE_UNUSED,
10695 machine_mode mode ATTRIBUTE_UNUSED,
10696 int ignore ATTRIBUTE_UNUSED)
10697 {
10698 return NULL_RTX;
10699 }
10700
10701 /* Returns true is EXP represents data that would potentially reside
10702 in a readonly section. */
10703
10704 bool
10705 readonly_data_expr (tree exp)
10706 {
10707 STRIP_NOPS (exp);
10708
10709 if (TREE_CODE (exp) != ADDR_EXPR)
10710 return false;
10711
10712 exp = get_base_address (TREE_OPERAND (exp, 0));
10713 if (!exp)
10714 return false;
10715
10716 /* Make sure we call decl_readonly_section only for trees it
10717 can handle (since it returns true for everything it doesn't
10718 understand). */
10719 if (TREE_CODE (exp) == STRING_CST
10720 || TREE_CODE (exp) == CONSTRUCTOR
10721 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10722 return decl_readonly_section (exp, 0);
10723 else
10724 return false;
10725 }
10726
10727 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10728 to the call, and TYPE is its return type.
10729
10730 Return NULL_TREE if no simplification was possible, otherwise return the
10731 simplified form of the call as a tree.
10732
10733 The simplified form may be a constant or other expression which
10734 computes the same value, but in a more efficient manner (including
10735 calls to other builtin functions).
10736
10737 The call may contain arguments which need to be evaluated, but
10738 which are not useful to determine the result of the call. In
10739 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10740 COMPOUND_EXPR will be an argument which must be evaluated.
10741 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10742 COMPOUND_EXPR in the chain will contain the tree for the simplified
10743 form of the builtin function call. */
10744
10745 static tree
10746 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10747 {
10748 if (!validate_arg (s1, POINTER_TYPE)
10749 || !validate_arg (s2, POINTER_TYPE))
10750 return NULL_TREE;
10751 else
10752 {
10753 tree fn;
10754 const char *p1, *p2;
10755
10756 p2 = c_getstr (s2);
10757 if (p2 == NULL)
10758 return NULL_TREE;
10759
10760 p1 = c_getstr (s1);
10761 if (p1 != NULL)
10762 {
10763 const char *r = strstr (p1, p2);
10764 tree tem;
10765
10766 if (r == NULL)
10767 return build_int_cst (TREE_TYPE (s1), 0);
10768
10769 /* Return an offset into the constant string argument. */
10770 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10771 return fold_convert_loc (loc, type, tem);
10772 }
10773
10774 /* The argument is const char *, and the result is char *, so we need
10775 a type conversion here to avoid a warning. */
10776 if (p2[0] == '\0')
10777 return fold_convert_loc (loc, type, s1);
10778
10779 if (p2[1] != '\0')
10780 return NULL_TREE;
10781
10782 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10783 if (!fn)
10784 return NULL_TREE;
10785
10786 /* New argument list transforming strstr(s1, s2) to
10787 strchr(s1, s2[0]). */
10788 return build_call_expr_loc (loc, fn, 2, s1,
10789 build_int_cst (integer_type_node, p2[0]));
10790 }
10791 }
10792
10793 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10794 the call, and TYPE is its return type.
10795
10796 Return NULL_TREE if no simplification was possible, otherwise return the
10797 simplified form of the call as a tree.
10798
10799 The simplified form may be a constant or other expression which
10800 computes the same value, but in a more efficient manner (including
10801 calls to other builtin functions).
10802
10803 The call may contain arguments which need to be evaluated, but
10804 which are not useful to determine the result of the call. In
10805 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10806 COMPOUND_EXPR will be an argument which must be evaluated.
10807 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10808 COMPOUND_EXPR in the chain will contain the tree for the simplified
10809 form of the builtin function call. */
10810
10811 static tree
10812 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10813 {
10814 if (!validate_arg (s1, POINTER_TYPE)
10815 || !validate_arg (s2, INTEGER_TYPE))
10816 return NULL_TREE;
10817 else
10818 {
10819 const char *p1;
10820
10821 if (TREE_CODE (s2) != INTEGER_CST)
10822 return NULL_TREE;
10823
10824 p1 = c_getstr (s1);
10825 if (p1 != NULL)
10826 {
10827 char c;
10828 const char *r;
10829 tree tem;
10830
10831 if (target_char_cast (s2, &c))
10832 return NULL_TREE;
10833
10834 r = strchr (p1, c);
10835
10836 if (r == NULL)
10837 return build_int_cst (TREE_TYPE (s1), 0);
10838
10839 /* Return an offset into the constant string argument. */
10840 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10841 return fold_convert_loc (loc, type, tem);
10842 }
10843 return NULL_TREE;
10844 }
10845 }
10846
10847 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10848 the call, and TYPE is its return type.
10849
10850 Return NULL_TREE if no simplification was possible, otherwise return the
10851 simplified form of the call as a tree.
10852
10853 The simplified form may be a constant or other expression which
10854 computes the same value, but in a more efficient manner (including
10855 calls to other builtin functions).
10856
10857 The call may contain arguments which need to be evaluated, but
10858 which are not useful to determine the result of the call. In
10859 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10860 COMPOUND_EXPR will be an argument which must be evaluated.
10861 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10862 COMPOUND_EXPR in the chain will contain the tree for the simplified
10863 form of the builtin function call. */
10864
10865 static tree
10866 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10867 {
10868 if (!validate_arg (s1, POINTER_TYPE)
10869 || !validate_arg (s2, INTEGER_TYPE))
10870 return NULL_TREE;
10871 else
10872 {
10873 tree fn;
10874 const char *p1;
10875
10876 if (TREE_CODE (s2) != INTEGER_CST)
10877 return NULL_TREE;
10878
10879 p1 = c_getstr (s1);
10880 if (p1 != NULL)
10881 {
10882 char c;
10883 const char *r;
10884 tree tem;
10885
10886 if (target_char_cast (s2, &c))
10887 return NULL_TREE;
10888
10889 r = strrchr (p1, c);
10890
10891 if (r == NULL)
10892 return build_int_cst (TREE_TYPE (s1), 0);
10893
10894 /* Return an offset into the constant string argument. */
10895 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10896 return fold_convert_loc (loc, type, tem);
10897 }
10898
10899 if (! integer_zerop (s2))
10900 return NULL_TREE;
10901
10902 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10903 if (!fn)
10904 return NULL_TREE;
10905
10906 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10907 return build_call_expr_loc (loc, fn, 2, s1, s2);
10908 }
10909 }
10910
10911 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10912 to the call, and TYPE is its return type.
10913
10914 Return NULL_TREE if no simplification was possible, otherwise return the
10915 simplified form of the call as a tree.
10916
10917 The simplified form may be a constant or other expression which
10918 computes the same value, but in a more efficient manner (including
10919 calls to other builtin functions).
10920
10921 The call may contain arguments which need to be evaluated, but
10922 which are not useful to determine the result of the call. In
10923 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10924 COMPOUND_EXPR will be an argument which must be evaluated.
10925 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10926 COMPOUND_EXPR in the chain will contain the tree for the simplified
10927 form of the builtin function call. */
10928
10929 static tree
10930 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10931 {
10932 if (!validate_arg (s1, POINTER_TYPE)
10933 || !validate_arg (s2, POINTER_TYPE))
10934 return NULL_TREE;
10935 else
10936 {
10937 tree fn;
10938 const char *p1, *p2;
10939
10940 p2 = c_getstr (s2);
10941 if (p2 == NULL)
10942 return NULL_TREE;
10943
10944 p1 = c_getstr (s1);
10945 if (p1 != NULL)
10946 {
10947 const char *r = strpbrk (p1, p2);
10948 tree tem;
10949
10950 if (r == NULL)
10951 return build_int_cst (TREE_TYPE (s1), 0);
10952
10953 /* Return an offset into the constant string argument. */
10954 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10955 return fold_convert_loc (loc, type, tem);
10956 }
10957
10958 if (p2[0] == '\0')
10959 /* strpbrk(x, "") == NULL.
10960 Evaluate and ignore s1 in case it had side-effects. */
10961 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
10962
10963 if (p2[1] != '\0')
10964 return NULL_TREE; /* Really call strpbrk. */
10965
10966 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10967 if (!fn)
10968 return NULL_TREE;
10969
10970 /* New argument list transforming strpbrk(s1, s2) to
10971 strchr(s1, s2[0]). */
10972 return build_call_expr_loc (loc, fn, 2, s1,
10973 build_int_cst (integer_type_node, p2[0]));
10974 }
10975 }
10976
10977 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10978 arguments to the call.
10979
10980 Return NULL_TREE if no simplification was possible, otherwise return the
10981 simplified form of the call as a tree.
10982
10983 The simplified form may be a constant or other expression which
10984 computes the same value, but in a more efficient manner (including
10985 calls to other builtin functions).
10986
10987 The call may contain arguments which need to be evaluated, but
10988 which are not useful to determine the result of the call. In
10989 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10990 COMPOUND_EXPR will be an argument which must be evaluated.
10991 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10992 COMPOUND_EXPR in the chain will contain the tree for the simplified
10993 form of the builtin function call. */
10994
10995 static tree
10996 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
10997 {
10998 if (!validate_arg (dst, POINTER_TYPE)
10999 || !validate_arg (src, POINTER_TYPE)
11000 || !validate_arg (len, INTEGER_TYPE))
11001 return NULL_TREE;
11002 else
11003 {
11004 const char *p = c_getstr (src);
11005
11006 /* If the requested length is zero, or the src parameter string
11007 length is zero, return the dst parameter. */
11008 if (integer_zerop (len) || (p && *p == '\0'))
11009 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11010
11011 /* If the requested len is greater than or equal to the string
11012 length, call strcat. */
11013 if (TREE_CODE (len) == INTEGER_CST && p
11014 && compare_tree_int (len, strlen (p)) >= 0)
11015 {
11016 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11017
11018 /* If the replacement _DECL isn't initialized, don't do the
11019 transformation. */
11020 if (!fn)
11021 return NULL_TREE;
11022
11023 return build_call_expr_loc (loc, fn, 2, dst, src);
11024 }
11025 return NULL_TREE;
11026 }
11027 }
11028
11029 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11030 to the call.
11031
11032 Return NULL_TREE if no simplification was possible, otherwise return the
11033 simplified form of the call as a tree.
11034
11035 The simplified form may be a constant or other expression which
11036 computes the same value, but in a more efficient manner (including
11037 calls to other builtin functions).
11038
11039 The call may contain arguments which need to be evaluated, but
11040 which are not useful to determine the result of the call. In
11041 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11042 COMPOUND_EXPR will be an argument which must be evaluated.
11043 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11044 COMPOUND_EXPR in the chain will contain the tree for the simplified
11045 form of the builtin function call. */
11046
11047 static tree
11048 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11049 {
11050 if (!validate_arg (s1, POINTER_TYPE)
11051 || !validate_arg (s2, POINTER_TYPE))
11052 return NULL_TREE;
11053 else
11054 {
11055 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11056
11057 /* If both arguments are constants, evaluate at compile-time. */
11058 if (p1 && p2)
11059 {
11060 const size_t r = strspn (p1, p2);
11061 return build_int_cst (size_type_node, r);
11062 }
11063
11064 /* If either argument is "", return NULL_TREE. */
11065 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11066 /* Evaluate and ignore both arguments in case either one has
11067 side-effects. */
11068 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11069 s1, s2);
11070 return NULL_TREE;
11071 }
11072 }
11073
11074 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11075 to the call.
11076
11077 Return NULL_TREE if no simplification was possible, otherwise return the
11078 simplified form of the call as a tree.
11079
11080 The simplified form may be a constant or other expression which
11081 computes the same value, but in a more efficient manner (including
11082 calls to other builtin functions).
11083
11084 The call may contain arguments which need to be evaluated, but
11085 which are not useful to determine the result of the call. In
11086 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11087 COMPOUND_EXPR will be an argument which must be evaluated.
11088 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11089 COMPOUND_EXPR in the chain will contain the tree for the simplified
11090 form of the builtin function call. */
11091
11092 static tree
11093 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11094 {
11095 if (!validate_arg (s1, POINTER_TYPE)
11096 || !validate_arg (s2, POINTER_TYPE))
11097 return NULL_TREE;
11098 else
11099 {
11100 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11101
11102 /* If both arguments are constants, evaluate at compile-time. */
11103 if (p1 && p2)
11104 {
11105 const size_t r = strcspn (p1, p2);
11106 return build_int_cst (size_type_node, r);
11107 }
11108
11109 /* If the first argument is "", return NULL_TREE. */
11110 if (p1 && *p1 == '\0')
11111 {
11112 /* Evaluate and ignore argument s2 in case it has
11113 side-effects. */
11114 return omit_one_operand_loc (loc, size_type_node,
11115 size_zero_node, s2);
11116 }
11117
11118 /* If the second argument is "", return __builtin_strlen(s1). */
11119 if (p2 && *p2 == '\0')
11120 {
11121 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11122
11123 /* If the replacement _DECL isn't initialized, don't do the
11124 transformation. */
11125 if (!fn)
11126 return NULL_TREE;
11127
11128 return build_call_expr_loc (loc, fn, 1, s1);
11129 }
11130 return NULL_TREE;
11131 }
11132 }
11133
11134 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11135 produced. False otherwise. This is done so that we don't output the error
11136 or warning twice or three times. */
11137
11138 bool
11139 fold_builtin_next_arg (tree exp, bool va_start_p)
11140 {
11141 tree fntype = TREE_TYPE (current_function_decl);
11142 int nargs = call_expr_nargs (exp);
11143 tree arg;
11144 /* There is good chance the current input_location points inside the
11145 definition of the va_start macro (perhaps on the token for
11146 builtin) in a system header, so warnings will not be emitted.
11147 Use the location in real source code. */
11148 source_location current_location =
11149 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11150 NULL);
11151
11152 if (!stdarg_p (fntype))
11153 {
11154 error ("%<va_start%> used in function with fixed args");
11155 return true;
11156 }
11157
11158 if (va_start_p)
11159 {
11160 if (va_start_p && (nargs != 2))
11161 {
11162 error ("wrong number of arguments to function %<va_start%>");
11163 return true;
11164 }
11165 arg = CALL_EXPR_ARG (exp, 1);
11166 }
11167 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11168 when we checked the arguments and if needed issued a warning. */
11169 else
11170 {
11171 if (nargs == 0)
11172 {
11173 /* Evidently an out of date version of <stdarg.h>; can't validate
11174 va_start's second argument, but can still work as intended. */
11175 warning_at (current_location,
11176 OPT_Wvarargs,
11177 "%<__builtin_next_arg%> called without an argument");
11178 return true;
11179 }
11180 else if (nargs > 1)
11181 {
11182 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11183 return true;
11184 }
11185 arg = CALL_EXPR_ARG (exp, 0);
11186 }
11187
11188 if (TREE_CODE (arg) == SSA_NAME)
11189 arg = SSA_NAME_VAR (arg);
11190
11191 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11192 or __builtin_next_arg (0) the first time we see it, after checking
11193 the arguments and if needed issuing a warning. */
11194 if (!integer_zerop (arg))
11195 {
11196 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11197
11198 /* Strip off all nops for the sake of the comparison. This
11199 is not quite the same as STRIP_NOPS. It does more.
11200 We must also strip off INDIRECT_EXPR for C++ reference
11201 parameters. */
11202 while (CONVERT_EXPR_P (arg)
11203 || TREE_CODE (arg) == INDIRECT_REF)
11204 arg = TREE_OPERAND (arg, 0);
11205 if (arg != last_parm)
11206 {
11207 /* FIXME: Sometimes with the tree optimizers we can get the
11208 not the last argument even though the user used the last
11209 argument. We just warn and set the arg to be the last
11210 argument so that we will get wrong-code because of
11211 it. */
11212 warning_at (current_location,
11213 OPT_Wvarargs,
11214 "second parameter of %<va_start%> not last named argument");
11215 }
11216
11217 /* Undefined by C99 7.15.1.4p4 (va_start):
11218 "If the parameter parmN is declared with the register storage
11219 class, with a function or array type, or with a type that is
11220 not compatible with the type that results after application of
11221 the default argument promotions, the behavior is undefined."
11222 */
11223 else if (DECL_REGISTER (arg))
11224 {
11225 warning_at (current_location,
11226 OPT_Wvarargs,
11227 "undefined behaviour when second parameter of "
11228 "%<va_start%> is declared with %<register%> storage");
11229 }
11230
11231 /* We want to verify the second parameter just once before the tree
11232 optimizers are run and then avoid keeping it in the tree,
11233 as otherwise we could warn even for correct code like:
11234 void foo (int i, ...)
11235 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11236 if (va_start_p)
11237 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11238 else
11239 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11240 }
11241 return false;
11242 }
11243
11244
11245 /* Expand a call EXP to __builtin_object_size. */
11246
11247 static rtx
11248 expand_builtin_object_size (tree exp)
11249 {
11250 tree ost;
11251 int object_size_type;
11252 tree fndecl = get_callee_fndecl (exp);
11253
11254 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11255 {
11256 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11257 exp, fndecl);
11258 expand_builtin_trap ();
11259 return const0_rtx;
11260 }
11261
11262 ost = CALL_EXPR_ARG (exp, 1);
11263 STRIP_NOPS (ost);
11264
11265 if (TREE_CODE (ost) != INTEGER_CST
11266 || tree_int_cst_sgn (ost) < 0
11267 || compare_tree_int (ost, 3) > 0)
11268 {
11269 error ("%Klast argument of %D is not integer constant between 0 and 3",
11270 exp, fndecl);
11271 expand_builtin_trap ();
11272 return const0_rtx;
11273 }
11274
11275 object_size_type = tree_to_shwi (ost);
11276
11277 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11278 }
11279
11280 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11281 FCODE is the BUILT_IN_* to use.
11282 Return NULL_RTX if we failed; the caller should emit a normal call,
11283 otherwise try to get the result in TARGET, if convenient (and in
11284 mode MODE if that's convenient). */
11285
11286 static rtx
11287 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11288 enum built_in_function fcode)
11289 {
11290 tree dest, src, len, size;
11291
11292 if (!validate_arglist (exp,
11293 POINTER_TYPE,
11294 fcode == BUILT_IN_MEMSET_CHK
11295 ? INTEGER_TYPE : POINTER_TYPE,
11296 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11297 return NULL_RTX;
11298
11299 dest = CALL_EXPR_ARG (exp, 0);
11300 src = CALL_EXPR_ARG (exp, 1);
11301 len = CALL_EXPR_ARG (exp, 2);
11302 size = CALL_EXPR_ARG (exp, 3);
11303
11304 if (! tree_fits_uhwi_p (size))
11305 return NULL_RTX;
11306
11307 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11308 {
11309 tree fn;
11310
11311 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11312 {
11313 warning_at (tree_nonartificial_location (exp),
11314 0, "%Kcall to %D will always overflow destination buffer",
11315 exp, get_callee_fndecl (exp));
11316 return NULL_RTX;
11317 }
11318
11319 fn = NULL_TREE;
11320 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11321 mem{cpy,pcpy,move,set} is available. */
11322 switch (fcode)
11323 {
11324 case BUILT_IN_MEMCPY_CHK:
11325 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11326 break;
11327 case BUILT_IN_MEMPCPY_CHK:
11328 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11329 break;
11330 case BUILT_IN_MEMMOVE_CHK:
11331 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11332 break;
11333 case BUILT_IN_MEMSET_CHK:
11334 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11335 break;
11336 default:
11337 break;
11338 }
11339
11340 if (! fn)
11341 return NULL_RTX;
11342
11343 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11344 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11345 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11346 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11347 }
11348 else if (fcode == BUILT_IN_MEMSET_CHK)
11349 return NULL_RTX;
11350 else
11351 {
11352 unsigned int dest_align = get_pointer_alignment (dest);
11353
11354 /* If DEST is not a pointer type, call the normal function. */
11355 if (dest_align == 0)
11356 return NULL_RTX;
11357
11358 /* If SRC and DEST are the same (and not volatile), do nothing. */
11359 if (operand_equal_p (src, dest, 0))
11360 {
11361 tree expr;
11362
11363 if (fcode != BUILT_IN_MEMPCPY_CHK)
11364 {
11365 /* Evaluate and ignore LEN in case it has side-effects. */
11366 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11367 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11368 }
11369
11370 expr = fold_build_pointer_plus (dest, len);
11371 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11372 }
11373
11374 /* __memmove_chk special case. */
11375 if (fcode == BUILT_IN_MEMMOVE_CHK)
11376 {
11377 unsigned int src_align = get_pointer_alignment (src);
11378
11379 if (src_align == 0)
11380 return NULL_RTX;
11381
11382 /* If src is categorized for a readonly section we can use
11383 normal __memcpy_chk. */
11384 if (readonly_data_expr (src))
11385 {
11386 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11387 if (!fn)
11388 return NULL_RTX;
11389 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11390 dest, src, len, size);
11391 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11392 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11393 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11394 }
11395 }
11396 return NULL_RTX;
11397 }
11398 }
11399
11400 /* Emit warning if a buffer overflow is detected at compile time. */
11401
11402 static void
11403 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11404 {
11405 int is_strlen = 0;
11406 tree len, size;
11407 location_t loc = tree_nonartificial_location (exp);
11408
11409 switch (fcode)
11410 {
11411 case BUILT_IN_STRCPY_CHK:
11412 case BUILT_IN_STPCPY_CHK:
11413 /* For __strcat_chk the warning will be emitted only if overflowing
11414 by at least strlen (dest) + 1 bytes. */
11415 case BUILT_IN_STRCAT_CHK:
11416 len = CALL_EXPR_ARG (exp, 1);
11417 size = CALL_EXPR_ARG (exp, 2);
11418 is_strlen = 1;
11419 break;
11420 case BUILT_IN_STRNCAT_CHK:
11421 case BUILT_IN_STRNCPY_CHK:
11422 case BUILT_IN_STPNCPY_CHK:
11423 len = CALL_EXPR_ARG (exp, 2);
11424 size = CALL_EXPR_ARG (exp, 3);
11425 break;
11426 case BUILT_IN_SNPRINTF_CHK:
11427 case BUILT_IN_VSNPRINTF_CHK:
11428 len = CALL_EXPR_ARG (exp, 1);
11429 size = CALL_EXPR_ARG (exp, 3);
11430 break;
11431 default:
11432 gcc_unreachable ();
11433 }
11434
11435 if (!len || !size)
11436 return;
11437
11438 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11439 return;
11440
11441 if (is_strlen)
11442 {
11443 len = c_strlen (len, 1);
11444 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11445 return;
11446 }
11447 else if (fcode == BUILT_IN_STRNCAT_CHK)
11448 {
11449 tree src = CALL_EXPR_ARG (exp, 1);
11450 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11451 return;
11452 src = c_strlen (src, 1);
11453 if (! src || ! tree_fits_uhwi_p (src))
11454 {
11455 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11456 exp, get_callee_fndecl (exp));
11457 return;
11458 }
11459 else if (tree_int_cst_lt (src, size))
11460 return;
11461 }
11462 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11463 return;
11464
11465 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11466 exp, get_callee_fndecl (exp));
11467 }
11468
11469 /* Emit warning if a buffer overflow is detected at compile time
11470 in __sprintf_chk/__vsprintf_chk calls. */
11471
11472 static void
11473 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11474 {
11475 tree size, len, fmt;
11476 const char *fmt_str;
11477 int nargs = call_expr_nargs (exp);
11478
11479 /* Verify the required arguments in the original call. */
11480
11481 if (nargs < 4)
11482 return;
11483 size = CALL_EXPR_ARG (exp, 2);
11484 fmt = CALL_EXPR_ARG (exp, 3);
11485
11486 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11487 return;
11488
11489 /* Check whether the format is a literal string constant. */
11490 fmt_str = c_getstr (fmt);
11491 if (fmt_str == NULL)
11492 return;
11493
11494 if (!init_target_chars ())
11495 return;
11496
11497 /* If the format doesn't contain % args or %%, we know its size. */
11498 if (strchr (fmt_str, target_percent) == 0)
11499 len = build_int_cstu (size_type_node, strlen (fmt_str));
11500 /* If the format is "%s" and first ... argument is a string literal,
11501 we know it too. */
11502 else if (fcode == BUILT_IN_SPRINTF_CHK
11503 && strcmp (fmt_str, target_percent_s) == 0)
11504 {
11505 tree arg;
11506
11507 if (nargs < 5)
11508 return;
11509 arg = CALL_EXPR_ARG (exp, 4);
11510 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11511 return;
11512
11513 len = c_strlen (arg, 1);
11514 if (!len || ! tree_fits_uhwi_p (len))
11515 return;
11516 }
11517 else
11518 return;
11519
11520 if (! tree_int_cst_lt (len, size))
11521 warning_at (tree_nonartificial_location (exp),
11522 0, "%Kcall to %D will always overflow destination buffer",
11523 exp, get_callee_fndecl (exp));
11524 }
11525
11526 /* Emit warning if a free is called with address of a variable. */
11527
11528 static void
11529 maybe_emit_free_warning (tree exp)
11530 {
11531 tree arg = CALL_EXPR_ARG (exp, 0);
11532
11533 STRIP_NOPS (arg);
11534 if (TREE_CODE (arg) != ADDR_EXPR)
11535 return;
11536
11537 arg = get_base_address (TREE_OPERAND (arg, 0));
11538 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11539 return;
11540
11541 if (SSA_VAR_P (arg))
11542 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11543 "%Kattempt to free a non-heap object %qD", exp, arg);
11544 else
11545 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11546 "%Kattempt to free a non-heap object", exp);
11547 }
11548
11549 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11550 if possible. */
11551
11552 static tree
11553 fold_builtin_object_size (tree ptr, tree ost)
11554 {
11555 unsigned HOST_WIDE_INT bytes;
11556 int object_size_type;
11557
11558 if (!validate_arg (ptr, POINTER_TYPE)
11559 || !validate_arg (ost, INTEGER_TYPE))
11560 return NULL_TREE;
11561
11562 STRIP_NOPS (ost);
11563
11564 if (TREE_CODE (ost) != INTEGER_CST
11565 || tree_int_cst_sgn (ost) < 0
11566 || compare_tree_int (ost, 3) > 0)
11567 return NULL_TREE;
11568
11569 object_size_type = tree_to_shwi (ost);
11570
11571 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11572 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11573 and (size_t) 0 for types 2 and 3. */
11574 if (TREE_SIDE_EFFECTS (ptr))
11575 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11576
11577 if (TREE_CODE (ptr) == ADDR_EXPR)
11578 {
11579 bytes = compute_builtin_object_size (ptr, object_size_type);
11580 if (wi::fits_to_tree_p (bytes, size_type_node))
11581 return build_int_cstu (size_type_node, bytes);
11582 }
11583 else if (TREE_CODE (ptr) == SSA_NAME)
11584 {
11585 /* If object size is not known yet, delay folding until
11586 later. Maybe subsequent passes will help determining
11587 it. */
11588 bytes = compute_builtin_object_size (ptr, object_size_type);
11589 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11590 && wi::fits_to_tree_p (bytes, size_type_node))
11591 return build_int_cstu (size_type_node, bytes);
11592 }
11593
11594 return NULL_TREE;
11595 }
11596
11597 /* Builtins with folding operations that operate on "..." arguments
11598 need special handling; we need to store the arguments in a convenient
11599 data structure before attempting any folding. Fortunately there are
11600 only a few builtins that fall into this category. FNDECL is the
11601 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11602 result of the function call is ignored. */
11603
11604 static tree
11605 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11606 bool ignore ATTRIBUTE_UNUSED)
11607 {
11608 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11609 tree ret = NULL_TREE;
11610
11611 switch (fcode)
11612 {
11613 case BUILT_IN_FPCLASSIFY:
11614 ret = fold_builtin_fpclassify (loc, exp);
11615 break;
11616
11617 default:
11618 break;
11619 }
11620 if (ret)
11621 {
11622 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11623 SET_EXPR_LOCATION (ret, loc);
11624 TREE_NO_WARNING (ret) = 1;
11625 return ret;
11626 }
11627 return NULL_TREE;
11628 }
11629
11630 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
11631 FMT and ARG are the arguments to the call; we don't fold cases with
11632 more than 2 arguments, and ARG may be null if this is a 1-argument case.
11633
11634 Return NULL_TREE if no simplification was possible, otherwise return the
11635 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11636 code of the function to be simplified. */
11637
11638 static tree
11639 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
11640 tree arg, bool ignore,
11641 enum built_in_function fcode)
11642 {
11643 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
11644 const char *fmt_str = NULL;
11645
11646 /* If the return value is used, don't do the transformation. */
11647 if (! ignore)
11648 return NULL_TREE;
11649
11650 /* Verify the required arguments in the original call. */
11651 if (!validate_arg (fmt, POINTER_TYPE))
11652 return NULL_TREE;
11653
11654 /* Check whether the format is a literal string constant. */
11655 fmt_str = c_getstr (fmt);
11656 if (fmt_str == NULL)
11657 return NULL_TREE;
11658
11659 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
11660 {
11661 /* If we're using an unlocked function, assume the other
11662 unlocked functions exist explicitly. */
11663 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
11664 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
11665 }
11666 else
11667 {
11668 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
11669 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
11670 }
11671
11672 if (!init_target_chars ())
11673 return NULL_TREE;
11674
11675 if (strcmp (fmt_str, target_percent_s) == 0
11676 || strchr (fmt_str, target_percent) == NULL)
11677 {
11678 const char *str;
11679
11680 if (strcmp (fmt_str, target_percent_s) == 0)
11681 {
11682 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11683 return NULL_TREE;
11684
11685 if (!arg || !validate_arg (arg, POINTER_TYPE))
11686 return NULL_TREE;
11687
11688 str = c_getstr (arg);
11689 if (str == NULL)
11690 return NULL_TREE;
11691 }
11692 else
11693 {
11694 /* The format specifier doesn't contain any '%' characters. */
11695 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
11696 && arg)
11697 return NULL_TREE;
11698 str = fmt_str;
11699 }
11700
11701 /* If the string was "", printf does nothing. */
11702 if (str[0] == '\0')
11703 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
11704
11705 /* If the string has length of 1, call putchar. */
11706 if (str[1] == '\0')
11707 {
11708 /* Given printf("c"), (where c is any one character,)
11709 convert "c"[0] to an int and pass that to the replacement
11710 function. */
11711 newarg = build_int_cst (integer_type_node, str[0]);
11712 if (fn_putchar)
11713 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
11714 }
11715 else
11716 {
11717 /* If the string was "string\n", call puts("string"). */
11718 size_t len = strlen (str);
11719 if ((unsigned char)str[len - 1] == target_newline
11720 && (size_t) (int) len == len
11721 && (int) len > 0)
11722 {
11723 char *newstr;
11724 tree offset_node, string_cst;
11725
11726 /* Create a NUL-terminated string that's one char shorter
11727 than the original, stripping off the trailing '\n'. */
11728 newarg = build_string_literal (len, str);
11729 string_cst = string_constant (newarg, &offset_node);
11730 gcc_checking_assert (string_cst
11731 && (TREE_STRING_LENGTH (string_cst)
11732 == (int) len)
11733 && integer_zerop (offset_node)
11734 && (unsigned char)
11735 TREE_STRING_POINTER (string_cst)[len - 1]
11736 == target_newline);
11737 /* build_string_literal creates a new STRING_CST,
11738 modify it in place to avoid double copying. */
11739 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
11740 newstr[len - 1] = '\0';
11741 if (fn_puts)
11742 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
11743 }
11744 else
11745 /* We'd like to arrange to call fputs(string,stdout) here,
11746 but we need stdout and don't have a way to get it yet. */
11747 return NULL_TREE;
11748 }
11749 }
11750
11751 /* The other optimizations can be done only on the non-va_list variants. */
11752 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
11753 return NULL_TREE;
11754
11755 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
11756 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
11757 {
11758 if (!arg || !validate_arg (arg, POINTER_TYPE))
11759 return NULL_TREE;
11760 if (fn_puts)
11761 call = build_call_expr_loc (loc, fn_puts, 1, arg);
11762 }
11763
11764 /* If the format specifier was "%c", call __builtin_putchar(arg). */
11765 else if (strcmp (fmt_str, target_percent_c) == 0)
11766 {
11767 if (!arg || !validate_arg (arg, INTEGER_TYPE))
11768 return NULL_TREE;
11769 if (fn_putchar)
11770 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
11771 }
11772
11773 if (!call)
11774 return NULL_TREE;
11775
11776 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
11777 }
11778
11779 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
11780 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
11781 more than 3 arguments, and ARG may be null in the 2-argument case.
11782
11783 Return NULL_TREE if no simplification was possible, otherwise return the
11784 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11785 code of the function to be simplified. */
11786
11787 static tree
11788 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
11789 tree fmt, tree arg, bool ignore,
11790 enum built_in_function fcode)
11791 {
11792 tree fn_fputc, fn_fputs, call = NULL_TREE;
11793 const char *fmt_str = NULL;
11794
11795 /* If the return value is used, don't do the transformation. */
11796 if (! ignore)
11797 return NULL_TREE;
11798
11799 /* Verify the required arguments in the original call. */
11800 if (!validate_arg (fp, POINTER_TYPE))
11801 return NULL_TREE;
11802 if (!validate_arg (fmt, POINTER_TYPE))
11803 return NULL_TREE;
11804
11805 /* Check whether the format is a literal string constant. */
11806 fmt_str = c_getstr (fmt);
11807 if (fmt_str == NULL)
11808 return NULL_TREE;
11809
11810 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
11811 {
11812 /* If we're using an unlocked function, assume the other
11813 unlocked functions exist explicitly. */
11814 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
11815 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
11816 }
11817 else
11818 {
11819 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
11820 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
11821 }
11822
11823 if (!init_target_chars ())
11824 return NULL_TREE;
11825
11826 /* If the format doesn't contain % args or %%, use strcpy. */
11827 if (strchr (fmt_str, target_percent) == NULL)
11828 {
11829 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
11830 && arg)
11831 return NULL_TREE;
11832
11833 /* If the format specifier was "", fprintf does nothing. */
11834 if (fmt_str[0] == '\0')
11835 {
11836 /* If FP has side-effects, just wait until gimplification is
11837 done. */
11838 if (TREE_SIDE_EFFECTS (fp))
11839 return NULL_TREE;
11840
11841 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
11842 }
11843
11844 /* When "string" doesn't contain %, replace all cases of
11845 fprintf (fp, string) with fputs (string, fp). The fputs
11846 builtin will take care of special cases like length == 1. */
11847 if (fn_fputs)
11848 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
11849 }
11850
11851 /* The other optimizations can be done only on the non-va_list variants. */
11852 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
11853 return NULL_TREE;
11854
11855 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
11856 else if (strcmp (fmt_str, target_percent_s) == 0)
11857 {
11858 if (!arg || !validate_arg (arg, POINTER_TYPE))
11859 return NULL_TREE;
11860 if (fn_fputs)
11861 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
11862 }
11863
11864 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
11865 else if (strcmp (fmt_str, target_percent_c) == 0)
11866 {
11867 if (!arg || !validate_arg (arg, INTEGER_TYPE))
11868 return NULL_TREE;
11869 if (fn_fputc)
11870 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
11871 }
11872
11873 if (!call)
11874 return NULL_TREE;
11875 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
11876 }
11877
11878 /* Initialize format string characters in the target charset. */
11879
11880 bool
11881 init_target_chars (void)
11882 {
11883 static bool init;
11884 if (!init)
11885 {
11886 target_newline = lang_hooks.to_target_charset ('\n');
11887 target_percent = lang_hooks.to_target_charset ('%');
11888 target_c = lang_hooks.to_target_charset ('c');
11889 target_s = lang_hooks.to_target_charset ('s');
11890 if (target_newline == 0 || target_percent == 0 || target_c == 0
11891 || target_s == 0)
11892 return false;
11893
11894 target_percent_c[0] = target_percent;
11895 target_percent_c[1] = target_c;
11896 target_percent_c[2] = '\0';
11897
11898 target_percent_s[0] = target_percent;
11899 target_percent_s[1] = target_s;
11900 target_percent_s[2] = '\0';
11901
11902 target_percent_s_newline[0] = target_percent;
11903 target_percent_s_newline[1] = target_s;
11904 target_percent_s_newline[2] = target_newline;
11905 target_percent_s_newline[3] = '\0';
11906
11907 init = true;
11908 }
11909 return true;
11910 }
11911
11912 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11913 and no overflow/underflow occurred. INEXACT is true if M was not
11914 exactly calculated. TYPE is the tree type for the result. This
11915 function assumes that you cleared the MPFR flags and then
11916 calculated M to see if anything subsequently set a flag prior to
11917 entering this function. Return NULL_TREE if any checks fail. */
11918
11919 static tree
11920 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11921 {
11922 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11923 overflow/underflow occurred. If -frounding-math, proceed iff the
11924 result of calling FUNC was exact. */
11925 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11926 && (!flag_rounding_math || !inexact))
11927 {
11928 REAL_VALUE_TYPE rr;
11929
11930 real_from_mpfr (&rr, m, type, GMP_RNDN);
11931 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11932 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11933 but the mpft_t is not, then we underflowed in the
11934 conversion. */
11935 if (real_isfinite (&rr)
11936 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11937 {
11938 REAL_VALUE_TYPE rmode;
11939
11940 real_convert (&rmode, TYPE_MODE (type), &rr);
11941 /* Proceed iff the specified mode can hold the value. */
11942 if (real_identical (&rmode, &rr))
11943 return build_real (type, rmode);
11944 }
11945 }
11946 return NULL_TREE;
11947 }
11948
11949 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11950 number and no overflow/underflow occurred. INEXACT is true if M
11951 was not exactly calculated. TYPE is the tree type for the result.
11952 This function assumes that you cleared the MPFR flags and then
11953 calculated M to see if anything subsequently set a flag prior to
11954 entering this function. Return NULL_TREE if any checks fail, if
11955 FORCE_CONVERT is true, then bypass the checks. */
11956
11957 static tree
11958 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11959 {
11960 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11961 overflow/underflow occurred. If -frounding-math, proceed iff the
11962 result of calling FUNC was exact. */
11963 if (force_convert
11964 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11965 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11966 && (!flag_rounding_math || !inexact)))
11967 {
11968 REAL_VALUE_TYPE re, im;
11969
11970 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11971 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11972 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11973 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11974 but the mpft_t is not, then we underflowed in the
11975 conversion. */
11976 if (force_convert
11977 || (real_isfinite (&re) && real_isfinite (&im)
11978 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11979 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11980 {
11981 REAL_VALUE_TYPE re_mode, im_mode;
11982
11983 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11984 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11985 /* Proceed iff the specified mode can hold the value. */
11986 if (force_convert
11987 || (real_identical (&re_mode, &re)
11988 && real_identical (&im_mode, &im)))
11989 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11990 build_real (TREE_TYPE (type), im_mode));
11991 }
11992 }
11993 return NULL_TREE;
11994 }
11995
11996 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11997 FUNC on it and return the resulting value as a tree with type TYPE.
11998 If MIN and/or MAX are not NULL, then the supplied ARG must be
11999 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12000 acceptable values, otherwise they are not. The mpfr precision is
12001 set to the precision of TYPE. We assume that function FUNC returns
12002 zero if the result could be calculated exactly within the requested
12003 precision. */
12004
12005 static tree
12006 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12007 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12008 bool inclusive)
12009 {
12010 tree result = NULL_TREE;
12011
12012 STRIP_NOPS (arg);
12013
12014 /* To proceed, MPFR must exactly represent the target floating point
12015 format, which only happens when the target base equals two. */
12016 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12017 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12018 {
12019 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12020
12021 if (real_isfinite (ra)
12022 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12023 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12024 {
12025 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12026 const int prec = fmt->p;
12027 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12028 int inexact;
12029 mpfr_t m;
12030
12031 mpfr_init2 (m, prec);
12032 mpfr_from_real (m, ra, GMP_RNDN);
12033 mpfr_clear_flags ();
12034 inexact = func (m, m, rnd);
12035 result = do_mpfr_ckconv (m, type, inexact);
12036 mpfr_clear (m);
12037 }
12038 }
12039
12040 return result;
12041 }
12042
12043 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12044 FUNC on it and return the resulting value as a tree with type TYPE.
12045 The mpfr precision is set to the precision of TYPE. We assume that
12046 function FUNC returns zero if the result could be calculated
12047 exactly within the requested precision. */
12048
12049 static tree
12050 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12051 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12052 {
12053 tree result = NULL_TREE;
12054
12055 STRIP_NOPS (arg1);
12056 STRIP_NOPS (arg2);
12057
12058 /* To proceed, MPFR must exactly represent the target floating point
12059 format, which only happens when the target base equals two. */
12060 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12061 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12062 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12063 {
12064 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12065 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12066
12067 if (real_isfinite (ra1) && real_isfinite (ra2))
12068 {
12069 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12070 const int prec = fmt->p;
12071 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12072 int inexact;
12073 mpfr_t m1, m2;
12074
12075 mpfr_inits2 (prec, m1, m2, NULL);
12076 mpfr_from_real (m1, ra1, GMP_RNDN);
12077 mpfr_from_real (m2, ra2, GMP_RNDN);
12078 mpfr_clear_flags ();
12079 inexact = func (m1, m1, m2, rnd);
12080 result = do_mpfr_ckconv (m1, type, inexact);
12081 mpfr_clears (m1, m2, NULL);
12082 }
12083 }
12084
12085 return result;
12086 }
12087
12088 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12089 FUNC on it and return the resulting value as a tree with type TYPE.
12090 The mpfr precision is set to the precision of TYPE. We assume that
12091 function FUNC returns zero if the result could be calculated
12092 exactly within the requested precision. */
12093
12094 static tree
12095 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12096 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12097 {
12098 tree result = NULL_TREE;
12099
12100 STRIP_NOPS (arg1);
12101 STRIP_NOPS (arg2);
12102 STRIP_NOPS (arg3);
12103
12104 /* To proceed, MPFR must exactly represent the target floating point
12105 format, which only happens when the target base equals two. */
12106 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12107 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12108 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12109 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12110 {
12111 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12112 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12113 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12114
12115 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12116 {
12117 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12118 const int prec = fmt->p;
12119 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12120 int inexact;
12121 mpfr_t m1, m2, m3;
12122
12123 mpfr_inits2 (prec, m1, m2, m3, NULL);
12124 mpfr_from_real (m1, ra1, GMP_RNDN);
12125 mpfr_from_real (m2, ra2, GMP_RNDN);
12126 mpfr_from_real (m3, ra3, GMP_RNDN);
12127 mpfr_clear_flags ();
12128 inexact = func (m1, m1, m2, m3, rnd);
12129 result = do_mpfr_ckconv (m1, type, inexact);
12130 mpfr_clears (m1, m2, m3, NULL);
12131 }
12132 }
12133
12134 return result;
12135 }
12136
12137 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12138 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12139 If ARG_SINP and ARG_COSP are NULL then the result is returned
12140 as a complex value.
12141 The type is taken from the type of ARG and is used for setting the
12142 precision of the calculation and results. */
12143
12144 static tree
12145 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12146 {
12147 tree const type = TREE_TYPE (arg);
12148 tree result = NULL_TREE;
12149
12150 STRIP_NOPS (arg);
12151
12152 /* To proceed, MPFR must exactly represent the target floating point
12153 format, which only happens when the target base equals two. */
12154 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12155 && TREE_CODE (arg) == REAL_CST
12156 && !TREE_OVERFLOW (arg))
12157 {
12158 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12159
12160 if (real_isfinite (ra))
12161 {
12162 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12163 const int prec = fmt->p;
12164 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12165 tree result_s, result_c;
12166 int inexact;
12167 mpfr_t m, ms, mc;
12168
12169 mpfr_inits2 (prec, m, ms, mc, NULL);
12170 mpfr_from_real (m, ra, GMP_RNDN);
12171 mpfr_clear_flags ();
12172 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12173 result_s = do_mpfr_ckconv (ms, type, inexact);
12174 result_c = do_mpfr_ckconv (mc, type, inexact);
12175 mpfr_clears (m, ms, mc, NULL);
12176 if (result_s && result_c)
12177 {
12178 /* If we are to return in a complex value do so. */
12179 if (!arg_sinp && !arg_cosp)
12180 return build_complex (build_complex_type (type),
12181 result_c, result_s);
12182
12183 /* Dereference the sin/cos pointer arguments. */
12184 arg_sinp = build_fold_indirect_ref (arg_sinp);
12185 arg_cosp = build_fold_indirect_ref (arg_cosp);
12186 /* Proceed if valid pointer type were passed in. */
12187 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12188 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12189 {
12190 /* Set the values. */
12191 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12192 result_s);
12193 TREE_SIDE_EFFECTS (result_s) = 1;
12194 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12195 result_c);
12196 TREE_SIDE_EFFECTS (result_c) = 1;
12197 /* Combine the assignments into a compound expr. */
12198 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12199 result_s, result_c));
12200 }
12201 }
12202 }
12203 }
12204 return result;
12205 }
12206
12207 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12208 two-argument mpfr order N Bessel function FUNC on them and return
12209 the resulting value as a tree with type TYPE. The mpfr precision
12210 is set to the precision of TYPE. We assume that function FUNC
12211 returns zero if the result could be calculated exactly within the
12212 requested precision. */
12213 static tree
12214 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12215 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12216 const REAL_VALUE_TYPE *min, bool inclusive)
12217 {
12218 tree result = NULL_TREE;
12219
12220 STRIP_NOPS (arg1);
12221 STRIP_NOPS (arg2);
12222
12223 /* To proceed, MPFR must exactly represent the target floating point
12224 format, which only happens when the target base equals two. */
12225 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12226 && tree_fits_shwi_p (arg1)
12227 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12228 {
12229 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12230 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12231
12232 if (n == (long)n
12233 && real_isfinite (ra)
12234 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12235 {
12236 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12237 const int prec = fmt->p;
12238 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12239 int inexact;
12240 mpfr_t m;
12241
12242 mpfr_init2 (m, prec);
12243 mpfr_from_real (m, ra, GMP_RNDN);
12244 mpfr_clear_flags ();
12245 inexact = func (m, n, m, rnd);
12246 result = do_mpfr_ckconv (m, type, inexact);
12247 mpfr_clear (m);
12248 }
12249 }
12250
12251 return result;
12252 }
12253
12254 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12255 the pointer *(ARG_QUO) and return the result. The type is taken
12256 from the type of ARG0 and is used for setting the precision of the
12257 calculation and results. */
12258
12259 static tree
12260 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12261 {
12262 tree const type = TREE_TYPE (arg0);
12263 tree result = NULL_TREE;
12264
12265 STRIP_NOPS (arg0);
12266 STRIP_NOPS (arg1);
12267
12268 /* To proceed, MPFR must exactly represent the target floating point
12269 format, which only happens when the target base equals two. */
12270 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12271 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12272 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12273 {
12274 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12275 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12276
12277 if (real_isfinite (ra0) && real_isfinite (ra1))
12278 {
12279 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12280 const int prec = fmt->p;
12281 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12282 tree result_rem;
12283 long integer_quo;
12284 mpfr_t m0, m1;
12285
12286 mpfr_inits2 (prec, m0, m1, NULL);
12287 mpfr_from_real (m0, ra0, GMP_RNDN);
12288 mpfr_from_real (m1, ra1, GMP_RNDN);
12289 mpfr_clear_flags ();
12290 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12291 /* Remquo is independent of the rounding mode, so pass
12292 inexact=0 to do_mpfr_ckconv(). */
12293 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12294 mpfr_clears (m0, m1, NULL);
12295 if (result_rem)
12296 {
12297 /* MPFR calculates quo in the host's long so it may
12298 return more bits in quo than the target int can hold
12299 if sizeof(host long) > sizeof(target int). This can
12300 happen even for native compilers in LP64 mode. In
12301 these cases, modulo the quo value with the largest
12302 number that the target int can hold while leaving one
12303 bit for the sign. */
12304 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12305 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12306
12307 /* Dereference the quo pointer argument. */
12308 arg_quo = build_fold_indirect_ref (arg_quo);
12309 /* Proceed iff a valid pointer type was passed in. */
12310 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12311 {
12312 /* Set the value. */
12313 tree result_quo
12314 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12315 build_int_cst (TREE_TYPE (arg_quo),
12316 integer_quo));
12317 TREE_SIDE_EFFECTS (result_quo) = 1;
12318 /* Combine the quo assignment with the rem. */
12319 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12320 result_quo, result_rem));
12321 }
12322 }
12323 }
12324 }
12325 return result;
12326 }
12327
12328 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12329 resulting value as a tree with type TYPE. The mpfr precision is
12330 set to the precision of TYPE. We assume that this mpfr function
12331 returns zero if the result could be calculated exactly within the
12332 requested precision. In addition, the integer pointer represented
12333 by ARG_SG will be dereferenced and set to the appropriate signgam
12334 (-1,1) value. */
12335
12336 static tree
12337 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12338 {
12339 tree result = NULL_TREE;
12340
12341 STRIP_NOPS (arg);
12342
12343 /* To proceed, MPFR must exactly represent the target floating point
12344 format, which only happens when the target base equals two. Also
12345 verify ARG is a constant and that ARG_SG is an int pointer. */
12346 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12347 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12348 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12349 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12350 {
12351 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12352
12353 /* In addition to NaN and Inf, the argument cannot be zero or a
12354 negative integer. */
12355 if (real_isfinite (ra)
12356 && ra->cl != rvc_zero
12357 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12358 {
12359 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12360 const int prec = fmt->p;
12361 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12362 int inexact, sg;
12363 mpfr_t m;
12364 tree result_lg;
12365
12366 mpfr_init2 (m, prec);
12367 mpfr_from_real (m, ra, GMP_RNDN);
12368 mpfr_clear_flags ();
12369 inexact = mpfr_lgamma (m, &sg, m, rnd);
12370 result_lg = do_mpfr_ckconv (m, type, inexact);
12371 mpfr_clear (m);
12372 if (result_lg)
12373 {
12374 tree result_sg;
12375
12376 /* Dereference the arg_sg pointer argument. */
12377 arg_sg = build_fold_indirect_ref (arg_sg);
12378 /* Assign the signgam value into *arg_sg. */
12379 result_sg = fold_build2 (MODIFY_EXPR,
12380 TREE_TYPE (arg_sg), arg_sg,
12381 build_int_cst (TREE_TYPE (arg_sg), sg));
12382 TREE_SIDE_EFFECTS (result_sg) = 1;
12383 /* Combine the signgam assignment with the lgamma result. */
12384 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12385 result_sg, result_lg));
12386 }
12387 }
12388 }
12389
12390 return result;
12391 }
12392
12393 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12394 function FUNC on it and return the resulting value as a tree with
12395 type TYPE. The mpfr precision is set to the precision of TYPE. We
12396 assume that function FUNC returns zero if the result could be
12397 calculated exactly within the requested precision. */
12398
12399 static tree
12400 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12401 {
12402 tree result = NULL_TREE;
12403
12404 STRIP_NOPS (arg);
12405
12406 /* To proceed, MPFR must exactly represent the target floating point
12407 format, which only happens when the target base equals two. */
12408 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12409 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12410 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12411 {
12412 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12413 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12414
12415 if (real_isfinite (re) && real_isfinite (im))
12416 {
12417 const struct real_format *const fmt =
12418 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12419 const int prec = fmt->p;
12420 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12421 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12422 int inexact;
12423 mpc_t m;
12424
12425 mpc_init2 (m, prec);
12426 mpfr_from_real (mpc_realref (m), re, rnd);
12427 mpfr_from_real (mpc_imagref (m), im, rnd);
12428 mpfr_clear_flags ();
12429 inexact = func (m, m, crnd);
12430 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12431 mpc_clear (m);
12432 }
12433 }
12434
12435 return result;
12436 }
12437
12438 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12439 mpc function FUNC on it and return the resulting value as a tree
12440 with type TYPE. The mpfr precision is set to the precision of
12441 TYPE. We assume that function FUNC returns zero if the result
12442 could be calculated exactly within the requested precision. If
12443 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12444 in the arguments and/or results. */
12445
12446 tree
12447 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12448 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12449 {
12450 tree result = NULL_TREE;
12451
12452 STRIP_NOPS (arg0);
12453 STRIP_NOPS (arg1);
12454
12455 /* To proceed, MPFR must exactly represent the target floating point
12456 format, which only happens when the target base equals two. */
12457 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12458 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12459 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12460 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12461 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12462 {
12463 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12464 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12465 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12466 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12467
12468 if (do_nonfinite
12469 || (real_isfinite (re0) && real_isfinite (im0)
12470 && real_isfinite (re1) && real_isfinite (im1)))
12471 {
12472 const struct real_format *const fmt =
12473 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12474 const int prec = fmt->p;
12475 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12476 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12477 int inexact;
12478 mpc_t m0, m1;
12479
12480 mpc_init2 (m0, prec);
12481 mpc_init2 (m1, prec);
12482 mpfr_from_real (mpc_realref (m0), re0, rnd);
12483 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12484 mpfr_from_real (mpc_realref (m1), re1, rnd);
12485 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12486 mpfr_clear_flags ();
12487 inexact = func (m0, m0, m1, crnd);
12488 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12489 mpc_clear (m0);
12490 mpc_clear (m1);
12491 }
12492 }
12493
12494 return result;
12495 }
12496
12497 /* A wrapper function for builtin folding that prevents warnings for
12498 "statement without effect" and the like, caused by removing the
12499 call node earlier than the warning is generated. */
12500
12501 tree
12502 fold_call_stmt (gimple stmt, bool ignore)
12503 {
12504 tree ret = NULL_TREE;
12505 tree fndecl = gimple_call_fndecl (stmt);
12506 location_t loc = gimple_location (stmt);
12507 if (fndecl
12508 && TREE_CODE (fndecl) == FUNCTION_DECL
12509 && DECL_BUILT_IN (fndecl)
12510 && !gimple_call_va_arg_pack_p (stmt))
12511 {
12512 int nargs = gimple_call_num_args (stmt);
12513 tree *args = (nargs > 0
12514 ? gimple_call_arg_ptr (stmt, 0)
12515 : &error_mark_node);
12516
12517 if (avoid_folding_inline_builtin (fndecl))
12518 return NULL_TREE;
12519 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12520 {
12521 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12522 }
12523 else
12524 {
12525 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
12526 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12527 if (ret)
12528 {
12529 /* Propagate location information from original call to
12530 expansion of builtin. Otherwise things like
12531 maybe_emit_chk_warning, that operate on the expansion
12532 of a builtin, will use the wrong location information. */
12533 if (gimple_has_location (stmt))
12534 {
12535 tree realret = ret;
12536 if (TREE_CODE (ret) == NOP_EXPR)
12537 realret = TREE_OPERAND (ret, 0);
12538 if (CAN_HAVE_LOCATION_P (realret)
12539 && !EXPR_HAS_LOCATION (realret))
12540 SET_EXPR_LOCATION (realret, loc);
12541 return realret;
12542 }
12543 return ret;
12544 }
12545 }
12546 }
12547 return NULL_TREE;
12548 }
12549
12550 /* Look up the function in builtin_decl that corresponds to DECL
12551 and set ASMSPEC as its user assembler name. DECL must be a
12552 function decl that declares a builtin. */
12553
12554 void
12555 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12556 {
12557 tree builtin;
12558 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12559 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12560 && asmspec != 0);
12561
12562 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12563 set_user_assembler_name (builtin, asmspec);
12564 switch (DECL_FUNCTION_CODE (decl))
12565 {
12566 case BUILT_IN_MEMCPY:
12567 init_block_move_fn (asmspec);
12568 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12569 break;
12570 case BUILT_IN_MEMSET:
12571 init_block_clear_fn (asmspec);
12572 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12573 break;
12574 case BUILT_IN_MEMMOVE:
12575 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12576 break;
12577 case BUILT_IN_MEMCMP:
12578 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12579 break;
12580 case BUILT_IN_ABORT:
12581 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12582 break;
12583 case BUILT_IN_FFS:
12584 if (INT_TYPE_SIZE < BITS_PER_WORD)
12585 {
12586 set_user_assembler_libfunc ("ffs", asmspec);
12587 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12588 MODE_INT, 0), "ffs");
12589 }
12590 break;
12591 default:
12592 break;
12593 }
12594 }
12595
12596 /* Return true if DECL is a builtin that expands to a constant or similarly
12597 simple code. */
12598 bool
12599 is_simple_builtin (tree decl)
12600 {
12601 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12602 switch (DECL_FUNCTION_CODE (decl))
12603 {
12604 /* Builtins that expand to constants. */
12605 case BUILT_IN_CONSTANT_P:
12606 case BUILT_IN_EXPECT:
12607 case BUILT_IN_OBJECT_SIZE:
12608 case BUILT_IN_UNREACHABLE:
12609 /* Simple register moves or loads from stack. */
12610 case BUILT_IN_ASSUME_ALIGNED:
12611 case BUILT_IN_RETURN_ADDRESS:
12612 case BUILT_IN_EXTRACT_RETURN_ADDR:
12613 case BUILT_IN_FROB_RETURN_ADDR:
12614 case BUILT_IN_RETURN:
12615 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12616 case BUILT_IN_FRAME_ADDRESS:
12617 case BUILT_IN_VA_END:
12618 case BUILT_IN_STACK_SAVE:
12619 case BUILT_IN_STACK_RESTORE:
12620 /* Exception state returns or moves registers around. */
12621 case BUILT_IN_EH_FILTER:
12622 case BUILT_IN_EH_POINTER:
12623 case BUILT_IN_EH_COPY_VALUES:
12624 return true;
12625
12626 default:
12627 return false;
12628 }
12629
12630 return false;
12631 }
12632
12633 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12634 most probably expanded inline into reasonably simple code. This is a
12635 superset of is_simple_builtin. */
12636 bool
12637 is_inexpensive_builtin (tree decl)
12638 {
12639 if (!decl)
12640 return false;
12641 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12642 return true;
12643 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12644 switch (DECL_FUNCTION_CODE (decl))
12645 {
12646 case BUILT_IN_ABS:
12647 case BUILT_IN_ALLOCA:
12648 case BUILT_IN_ALLOCA_WITH_ALIGN:
12649 case BUILT_IN_BSWAP16:
12650 case BUILT_IN_BSWAP32:
12651 case BUILT_IN_BSWAP64:
12652 case BUILT_IN_CLZ:
12653 case BUILT_IN_CLZIMAX:
12654 case BUILT_IN_CLZL:
12655 case BUILT_IN_CLZLL:
12656 case BUILT_IN_CTZ:
12657 case BUILT_IN_CTZIMAX:
12658 case BUILT_IN_CTZL:
12659 case BUILT_IN_CTZLL:
12660 case BUILT_IN_FFS:
12661 case BUILT_IN_FFSIMAX:
12662 case BUILT_IN_FFSL:
12663 case BUILT_IN_FFSLL:
12664 case BUILT_IN_IMAXABS:
12665 case BUILT_IN_FINITE:
12666 case BUILT_IN_FINITEF:
12667 case BUILT_IN_FINITEL:
12668 case BUILT_IN_FINITED32:
12669 case BUILT_IN_FINITED64:
12670 case BUILT_IN_FINITED128:
12671 case BUILT_IN_FPCLASSIFY:
12672 case BUILT_IN_ISFINITE:
12673 case BUILT_IN_ISINF_SIGN:
12674 case BUILT_IN_ISINF:
12675 case BUILT_IN_ISINFF:
12676 case BUILT_IN_ISINFL:
12677 case BUILT_IN_ISINFD32:
12678 case BUILT_IN_ISINFD64:
12679 case BUILT_IN_ISINFD128:
12680 case BUILT_IN_ISNAN:
12681 case BUILT_IN_ISNANF:
12682 case BUILT_IN_ISNANL:
12683 case BUILT_IN_ISNAND32:
12684 case BUILT_IN_ISNAND64:
12685 case BUILT_IN_ISNAND128:
12686 case BUILT_IN_ISNORMAL:
12687 case BUILT_IN_ISGREATER:
12688 case BUILT_IN_ISGREATEREQUAL:
12689 case BUILT_IN_ISLESS:
12690 case BUILT_IN_ISLESSEQUAL:
12691 case BUILT_IN_ISLESSGREATER:
12692 case BUILT_IN_ISUNORDERED:
12693 case BUILT_IN_VA_ARG_PACK:
12694 case BUILT_IN_VA_ARG_PACK_LEN:
12695 case BUILT_IN_VA_COPY:
12696 case BUILT_IN_TRAP:
12697 case BUILT_IN_SAVEREGS:
12698 case BUILT_IN_POPCOUNTL:
12699 case BUILT_IN_POPCOUNTLL:
12700 case BUILT_IN_POPCOUNTIMAX:
12701 case BUILT_IN_POPCOUNT:
12702 case BUILT_IN_PARITYL:
12703 case BUILT_IN_PARITYLL:
12704 case BUILT_IN_PARITYIMAX:
12705 case BUILT_IN_PARITY:
12706 case BUILT_IN_LABS:
12707 case BUILT_IN_LLABS:
12708 case BUILT_IN_PREFETCH:
12709 return true;
12710
12711 default:
12712 return is_simple_builtin (decl);
12713 }
12714
12715 return false;
12716 }