builtins.c (dummy_object, [...]): Move to gimplify.c.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "realmpfr.h"
28 #include "gimple.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "predict.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "langhooks.h"
45 #include "basic-block.h"
46 #include "tree-ssanames.h"
47 #include "tree-dfa.h"
48 #include "value-prof.h"
49 #include "diagnostic-core.h"
50 #include "builtins.h"
51 #include "ubsan.h"
52
53
54 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
55
56 struct target_builtins default_target_builtins;
57 #if SWITCHABLE_TARGET
58 struct target_builtins *this_target_builtins = &default_target_builtins;
59 #endif
60
61 /* Define the names of the builtin function types and codes. */
62 const char *const built_in_class_names[BUILT_IN_LAST]
63 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
64
65 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
66 const char * built_in_names[(int) END_BUILTINS] =
67 {
68 #include "builtins.def"
69 };
70 #undef DEF_BUILTIN
71
72 /* Setup an array of _DECL trees, make sure each element is
73 initialized to NULL_TREE. */
74 builtin_info_type builtin_info;
75
76 /* Non-zero if __builtin_constant_p should be folded right away. */
77 bool force_folding_builtin_constant_p;
78
79 static const char *c_getstr (tree);
80 static rtx c_readstr (const char *, enum machine_mode);
81 static int target_char_cast (tree, char *);
82 static rtx get_memory_rtx (tree, tree);
83 static int apply_args_size (void);
84 static int apply_result_size (void);
85 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
86 static rtx result_vector (int, rtx);
87 #endif
88 static void expand_builtin_update_setjmp_buf (rtx);
89 static void expand_builtin_prefetch (tree);
90 static rtx expand_builtin_apply_args (void);
91 static rtx expand_builtin_apply_args_1 (void);
92 static rtx expand_builtin_apply (rtx, rtx, rtx);
93 static void expand_builtin_return (rtx);
94 static enum type_class type_to_class (tree);
95 static rtx expand_builtin_classify_type (tree);
96 static void expand_errno_check (tree, rtx);
97 static rtx expand_builtin_mathfn (tree, rtx, rtx);
98 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
99 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
101 static rtx expand_builtin_interclass_mathfn (tree, rtx);
102 static rtx expand_builtin_sincos (tree);
103 static rtx expand_builtin_cexpi (tree, rtx);
104 static rtx expand_builtin_int_roundingfn (tree, rtx);
105 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
106 static rtx expand_builtin_next_arg (void);
107 static rtx expand_builtin_va_start (tree);
108 static rtx expand_builtin_va_end (tree);
109 static rtx expand_builtin_va_copy (tree);
110 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strcmp (tree, rtx);
112 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
113 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
114 static rtx expand_builtin_memcpy (tree, rtx);
115 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
117 enum machine_mode, int);
118 static rtx expand_builtin_strcpy (tree, rtx);
119 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
120 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strncpy (tree, rtx);
122 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
123 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
125 static rtx expand_builtin_bzero (tree);
126 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_alloca (tree, bool);
128 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
129 static rtx expand_builtin_frame_address (tree, tree);
130 static tree stabilize_va_list_loc (location_t, tree, int);
131 static rtx expand_builtin_expect (tree, rtx);
132 static tree fold_builtin_constant_p (tree);
133 static tree fold_builtin_expect (location_t, tree, tree);
134 static tree fold_builtin_classify_type (tree);
135 static tree fold_builtin_strlen (location_t, tree, tree);
136 static tree fold_builtin_inf (location_t, tree, int);
137 static tree fold_builtin_nan (tree, tree, int);
138 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
139 static bool validate_arg (const_tree, enum tree_code code);
140 static bool integer_valued_real_p (tree);
141 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
142 static bool readonly_data_expr (tree);
143 static rtx expand_builtin_fabs (tree, rtx, rtx);
144 static rtx expand_builtin_signbit (tree, rtx);
145 static tree fold_builtin_sqrt (location_t, tree, tree);
146 static tree fold_builtin_cbrt (location_t, tree, tree);
147 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
148 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
149 static tree fold_builtin_cos (location_t, tree, tree, tree);
150 static tree fold_builtin_cosh (location_t, tree, tree, tree);
151 static tree fold_builtin_tan (tree, tree);
152 static tree fold_builtin_trunc (location_t, tree, tree);
153 static tree fold_builtin_floor (location_t, tree, tree);
154 static tree fold_builtin_ceil (location_t, tree, tree);
155 static tree fold_builtin_round (location_t, tree, tree);
156 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
157 static tree fold_builtin_bitop (tree, tree);
158 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
159 static tree fold_builtin_strchr (location_t, tree, tree, tree);
160 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
161 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
162 static tree fold_builtin_strcmp (location_t, tree, tree);
163 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
164 static tree fold_builtin_signbit (location_t, tree, tree);
165 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
166 static tree fold_builtin_isascii (location_t, tree);
167 static tree fold_builtin_toascii (location_t, tree);
168 static tree fold_builtin_isdigit (location_t, tree);
169 static tree fold_builtin_fabs (location_t, tree, tree);
170 static tree fold_builtin_abs (location_t, tree, tree);
171 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
172 enum tree_code);
173 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
174 static tree fold_builtin_0 (location_t, tree, bool);
175 static tree fold_builtin_1 (location_t, tree, tree, bool);
176 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
177 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
178 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
179 static tree fold_builtin_varargs (location_t, tree, tree, bool);
180
181 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
182 static tree fold_builtin_strstr (location_t, tree, tree, tree);
183 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
184 static tree fold_builtin_strcat (location_t, tree, tree);
185 static tree fold_builtin_strncat (location_t, tree, tree, tree);
186 static tree fold_builtin_strspn (location_t, tree, tree);
187 static tree fold_builtin_strcspn (location_t, tree, tree);
188 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
189 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
190
191 static rtx expand_builtin_object_size (tree);
192 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
193 enum built_in_function);
194 static void maybe_emit_chk_warning (tree, enum built_in_function);
195 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
196 static void maybe_emit_free_warning (tree);
197 static tree fold_builtin_object_size (tree, tree);
198 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
199 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
200 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
201 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
202 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
203 enum built_in_function);
204 static bool init_target_chars (void);
205
206 static unsigned HOST_WIDE_INT target_newline;
207 static unsigned HOST_WIDE_INT target_percent;
208 static unsigned HOST_WIDE_INT target_c;
209 static unsigned HOST_WIDE_INT target_s;
210 static char target_percent_c[3];
211 static char target_percent_s[3];
212 static char target_percent_s_newline[4];
213 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
214 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
215 static tree do_mpfr_arg2 (tree, tree, tree,
216 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
217 static tree do_mpfr_arg3 (tree, tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_sincos (tree, tree, tree);
220 static tree do_mpfr_bessel_n (tree, tree, tree,
221 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
222 const REAL_VALUE_TYPE *, bool);
223 static tree do_mpfr_remquo (tree, tree, tree);
224 static tree do_mpfr_lgamma_r (tree, tree, tree);
225 static void expand_builtin_sync_synchronize (void);
226
227 /* Return true if NAME starts with __builtin_ or __sync_. */
228
229 static bool
230 is_builtin_name (const char *name)
231 {
232 if (strncmp (name, "__builtin_", 10) == 0)
233 return true;
234 if (strncmp (name, "__sync_", 7) == 0)
235 return true;
236 if (strncmp (name, "__atomic_", 9) == 0)
237 return true;
238 return false;
239 }
240
241
242 /* Return true if DECL is a function symbol representing a built-in. */
243
244 bool
245 is_builtin_fn (tree decl)
246 {
247 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
248 }
249
250 /* By default we assume that c99 functions are present at the runtime,
251 but sincos is not. */
252 bool
253 default_libc_has_function (enum function_class fn_class)
254 {
255 if (fn_class == function_c94
256 || fn_class == function_c99_misc
257 || fn_class == function_c99_math_complex)
258 return true;
259
260 return false;
261 }
262
263 bool
264 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
265 {
266 return true;
267 }
268
269 bool
270 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
271 {
272 return false;
273 }
274
275 /* Return true if NODE should be considered for inline expansion regardless
276 of the optimization level. This means whenever a function is invoked with
277 its "internal" name, which normally contains the prefix "__builtin". */
278
279 static bool
280 called_as_built_in (tree node)
281 {
282 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
283 we want the name used to call the function, not the name it
284 will have. */
285 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
286 return is_builtin_name (name);
287 }
288
289 /* Compute values M and N such that M divides (address of EXP - N) and such
290 that N < M. If these numbers can be determined, store M in alignp and N in
291 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
292 *alignp and any bit-offset to *bitposp.
293
294 Note that the address (and thus the alignment) computed here is based
295 on the address to which a symbol resolves, whereas DECL_ALIGN is based
296 on the address at which an object is actually located. These two
297 addresses are not always the same. For example, on ARM targets,
298 the address &foo of a Thumb function foo() has the lowest bit set,
299 whereas foo() itself starts on an even address.
300
301 If ADDR_P is true we are taking the address of the memory reference EXP
302 and thus cannot rely on the access taking place. */
303
304 static bool
305 get_object_alignment_2 (tree exp, unsigned int *alignp,
306 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
307 {
308 HOST_WIDE_INT bitsize, bitpos;
309 tree offset;
310 enum machine_mode mode;
311 int unsignedp, volatilep;
312 unsigned int inner, align = BITS_PER_UNIT;
313 bool known_alignment = false;
314
315 /* Get the innermost object and the constant (bitpos) and possibly
316 variable (offset) offset of the access. */
317 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
318 &mode, &unsignedp, &volatilep, true);
319
320 /* Extract alignment information from the innermost object and
321 possibly adjust bitpos and offset. */
322 if (TREE_CODE (exp) == FUNCTION_DECL)
323 {
324 /* Function addresses can encode extra information besides their
325 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
326 allows the low bit to be used as a virtual bit, we know
327 that the address itself must be at least 2-byte aligned. */
328 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
329 align = 2 * BITS_PER_UNIT;
330 }
331 else if (TREE_CODE (exp) == LABEL_DECL)
332 ;
333 else if (TREE_CODE (exp) == CONST_DECL)
334 {
335 /* The alignment of a CONST_DECL is determined by its initializer. */
336 exp = DECL_INITIAL (exp);
337 align = TYPE_ALIGN (TREE_TYPE (exp));
338 #ifdef CONSTANT_ALIGNMENT
339 if (CONSTANT_CLASS_P (exp))
340 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
341 #endif
342 known_alignment = true;
343 }
344 else if (DECL_P (exp))
345 {
346 align = DECL_ALIGN (exp);
347 known_alignment = true;
348 }
349 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
350 {
351 align = TYPE_ALIGN (TREE_TYPE (exp));
352 }
353 else if (TREE_CODE (exp) == INDIRECT_REF
354 || TREE_CODE (exp) == MEM_REF
355 || TREE_CODE (exp) == TARGET_MEM_REF)
356 {
357 tree addr = TREE_OPERAND (exp, 0);
358 unsigned ptr_align;
359 unsigned HOST_WIDE_INT ptr_bitpos;
360
361 if (TREE_CODE (addr) == BIT_AND_EXPR
362 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
363 {
364 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
365 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
366 align *= BITS_PER_UNIT;
367 addr = TREE_OPERAND (addr, 0);
368 }
369
370 known_alignment
371 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
372 align = MAX (ptr_align, align);
373
374 /* The alignment of the pointer operand in a TARGET_MEM_REF
375 has to take the variable offset parts into account. */
376 if (TREE_CODE (exp) == TARGET_MEM_REF)
377 {
378 if (TMR_INDEX (exp))
379 {
380 unsigned HOST_WIDE_INT step = 1;
381 if (TMR_STEP (exp))
382 step = TREE_INT_CST_LOW (TMR_STEP (exp));
383 align = MIN (align, (step & -step) * BITS_PER_UNIT);
384 }
385 if (TMR_INDEX2 (exp))
386 align = BITS_PER_UNIT;
387 known_alignment = false;
388 }
389
390 /* When EXP is an actual memory reference then we can use
391 TYPE_ALIGN of a pointer indirection to derive alignment.
392 Do so only if get_pointer_alignment_1 did not reveal absolute
393 alignment knowledge and if using that alignment would
394 improve the situation. */
395 if (!addr_p && !known_alignment
396 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
397 align = TYPE_ALIGN (TREE_TYPE (exp));
398 else
399 {
400 /* Else adjust bitpos accordingly. */
401 bitpos += ptr_bitpos;
402 if (TREE_CODE (exp) == MEM_REF
403 || TREE_CODE (exp) == TARGET_MEM_REF)
404 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
405 }
406 }
407 else if (TREE_CODE (exp) == STRING_CST)
408 {
409 /* STRING_CST are the only constant objects we allow to be not
410 wrapped inside a CONST_DECL. */
411 align = TYPE_ALIGN (TREE_TYPE (exp));
412 #ifdef CONSTANT_ALIGNMENT
413 if (CONSTANT_CLASS_P (exp))
414 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
415 #endif
416 known_alignment = true;
417 }
418
419 /* If there is a non-constant offset part extract the maximum
420 alignment that can prevail. */
421 inner = ~0U;
422 while (offset)
423 {
424 tree next_offset;
425
426 if (TREE_CODE (offset) == PLUS_EXPR)
427 {
428 next_offset = TREE_OPERAND (offset, 0);
429 offset = TREE_OPERAND (offset, 1);
430 }
431 else
432 next_offset = NULL;
433 if (host_integerp (offset, 1))
434 {
435 /* Any overflow in calculating offset_bits won't change
436 the alignment. */
437 unsigned offset_bits
438 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
439
440 if (offset_bits)
441 inner = MIN (inner, (offset_bits & -offset_bits));
442 }
443 else if (TREE_CODE (offset) == MULT_EXPR
444 && host_integerp (TREE_OPERAND (offset, 1), 1))
445 {
446 /* Any overflow in calculating offset_factor won't change
447 the alignment. */
448 unsigned offset_factor
449 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
450 * BITS_PER_UNIT);
451
452 if (offset_factor)
453 inner = MIN (inner, (offset_factor & -offset_factor));
454 }
455 else
456 {
457 inner = MIN (inner, BITS_PER_UNIT);
458 break;
459 }
460 offset = next_offset;
461 }
462 /* Alignment is innermost object alignment adjusted by the constant
463 and non-constant offset parts. */
464 align = MIN (align, inner);
465
466 *alignp = align;
467 *bitposp = bitpos & (*alignp - 1);
468 return known_alignment;
469 }
470
471 /* For a memory reference expression EXP compute values M and N such that M
472 divides (&EXP - N) and such that N < M. If these numbers can be determined,
473 store M in alignp and N in *BITPOSP and return true. Otherwise return false
474 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
475
476 bool
477 get_object_alignment_1 (tree exp, unsigned int *alignp,
478 unsigned HOST_WIDE_INT *bitposp)
479 {
480 return get_object_alignment_2 (exp, alignp, bitposp, false);
481 }
482
483 /* Return the alignment in bits of EXP, an object. */
484
485 unsigned int
486 get_object_alignment (tree exp)
487 {
488 unsigned HOST_WIDE_INT bitpos = 0;
489 unsigned int align;
490
491 get_object_alignment_1 (exp, &align, &bitpos);
492
493 /* align and bitpos now specify known low bits of the pointer.
494 ptr & (align - 1) == bitpos. */
495
496 if (bitpos != 0)
497 align = (bitpos & -bitpos);
498 return align;
499 }
500
501 /* For a pointer valued expression EXP compute values M and N such that M
502 divides (EXP - N) and such that N < M. If these numbers can be determined,
503 store M in alignp and N in *BITPOSP and return true. Return false if
504 the results are just a conservative approximation.
505
506 If EXP is not a pointer, false is returned too. */
507
508 bool
509 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
510 unsigned HOST_WIDE_INT *bitposp)
511 {
512 STRIP_NOPS (exp);
513
514 if (TREE_CODE (exp) == ADDR_EXPR)
515 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
516 alignp, bitposp, true);
517 else if (TREE_CODE (exp) == SSA_NAME
518 && POINTER_TYPE_P (TREE_TYPE (exp)))
519 {
520 unsigned int ptr_align, ptr_misalign;
521 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
522
523 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
524 {
525 *bitposp = ptr_misalign * BITS_PER_UNIT;
526 *alignp = ptr_align * BITS_PER_UNIT;
527 /* We cannot really tell whether this result is an approximation. */
528 return true;
529 }
530 else
531 {
532 *bitposp = 0;
533 *alignp = BITS_PER_UNIT;
534 return false;
535 }
536 }
537 else if (TREE_CODE (exp) == INTEGER_CST)
538 {
539 *alignp = BIGGEST_ALIGNMENT;
540 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
541 & (BIGGEST_ALIGNMENT - 1));
542 return true;
543 }
544
545 *bitposp = 0;
546 *alignp = BITS_PER_UNIT;
547 return false;
548 }
549
550 /* Return the alignment in bits of EXP, a pointer valued expression.
551 The alignment returned is, by default, the alignment of the thing that
552 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
553
554 Otherwise, look at the expression to see if we can do better, i.e., if the
555 expression is actually pointing at an object whose alignment is tighter. */
556
557 unsigned int
558 get_pointer_alignment (tree exp)
559 {
560 unsigned HOST_WIDE_INT bitpos = 0;
561 unsigned int align;
562
563 get_pointer_alignment_1 (exp, &align, &bitpos);
564
565 /* align and bitpos now specify known low bits of the pointer.
566 ptr & (align - 1) == bitpos. */
567
568 if (bitpos != 0)
569 align = (bitpos & -bitpos);
570
571 return align;
572 }
573
574 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
575 way, because it could contain a zero byte in the middle.
576 TREE_STRING_LENGTH is the size of the character array, not the string.
577
578 ONLY_VALUE should be nonzero if the result is not going to be emitted
579 into the instruction stream and zero if it is going to be expanded.
580 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
581 is returned, otherwise NULL, since
582 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
583 evaluate the side-effects.
584
585 The value returned is of type `ssizetype'.
586
587 Unfortunately, string_constant can't access the values of const char
588 arrays with initializers, so neither can we do so here. */
589
590 tree
591 c_strlen (tree src, int only_value)
592 {
593 tree offset_node;
594 HOST_WIDE_INT offset;
595 int max;
596 const char *ptr;
597 location_t loc;
598
599 STRIP_NOPS (src);
600 if (TREE_CODE (src) == COND_EXPR
601 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
602 {
603 tree len1, len2;
604
605 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
606 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
607 if (tree_int_cst_equal (len1, len2))
608 return len1;
609 }
610
611 if (TREE_CODE (src) == COMPOUND_EXPR
612 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
613 return c_strlen (TREE_OPERAND (src, 1), only_value);
614
615 loc = EXPR_LOC_OR_HERE (src);
616
617 src = string_constant (src, &offset_node);
618 if (src == 0)
619 return NULL_TREE;
620
621 max = TREE_STRING_LENGTH (src) - 1;
622 ptr = TREE_STRING_POINTER (src);
623
624 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
625 {
626 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
627 compute the offset to the following null if we don't know where to
628 start searching for it. */
629 int i;
630
631 for (i = 0; i < max; i++)
632 if (ptr[i] == 0)
633 return NULL_TREE;
634
635 /* We don't know the starting offset, but we do know that the string
636 has no internal zero bytes. We can assume that the offset falls
637 within the bounds of the string; otherwise, the programmer deserves
638 what he gets. Subtract the offset from the length of the string,
639 and return that. This would perhaps not be valid if we were dealing
640 with named arrays in addition to literal string constants. */
641
642 return size_diffop_loc (loc, size_int (max), offset_node);
643 }
644
645 /* We have a known offset into the string. Start searching there for
646 a null character if we can represent it as a single HOST_WIDE_INT. */
647 if (offset_node == 0)
648 offset = 0;
649 else if (! host_integerp (offset_node, 0))
650 offset = -1;
651 else
652 offset = tree_low_cst (offset_node, 0);
653
654 /* If the offset is known to be out of bounds, warn, and call strlen at
655 runtime. */
656 if (offset < 0 || offset > max)
657 {
658 /* Suppress multiple warnings for propagated constant strings. */
659 if (! TREE_NO_WARNING (src))
660 {
661 warning_at (loc, 0, "offset outside bounds of constant string");
662 TREE_NO_WARNING (src) = 1;
663 }
664 return NULL_TREE;
665 }
666
667 /* Use strlen to search for the first zero byte. Since any strings
668 constructed with build_string will have nulls appended, we win even
669 if we get handed something like (char[4])"abcd".
670
671 Since OFFSET is our starting index into the string, no further
672 calculation is needed. */
673 return ssize_int (strlen (ptr + offset));
674 }
675
676 /* Return a char pointer for a C string if it is a string constant
677 or sum of string constant and integer constant. */
678
679 static const char *
680 c_getstr (tree src)
681 {
682 tree offset_node;
683
684 src = string_constant (src, &offset_node);
685 if (src == 0)
686 return 0;
687
688 if (offset_node == 0)
689 return TREE_STRING_POINTER (src);
690 else if (!host_integerp (offset_node, 1)
691 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
692 return 0;
693
694 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
695 }
696
697 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
698 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
699
700 static rtx
701 c_readstr (const char *str, enum machine_mode mode)
702 {
703 HOST_WIDE_INT c[2];
704 HOST_WIDE_INT ch;
705 unsigned int i, j;
706
707 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
708
709 c[0] = 0;
710 c[1] = 0;
711 ch = 1;
712 for (i = 0; i < GET_MODE_SIZE (mode); i++)
713 {
714 j = i;
715 if (WORDS_BIG_ENDIAN)
716 j = GET_MODE_SIZE (mode) - i - 1;
717 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
718 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
719 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
720 j *= BITS_PER_UNIT;
721 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
722
723 if (ch)
724 ch = (unsigned char) str[i];
725 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
726 }
727 return immed_double_const (c[0], c[1], mode);
728 }
729
730 /* Cast a target constant CST to target CHAR and if that value fits into
731 host char type, return zero and put that value into variable pointed to by
732 P. */
733
734 static int
735 target_char_cast (tree cst, char *p)
736 {
737 unsigned HOST_WIDE_INT val, hostval;
738
739 if (TREE_CODE (cst) != INTEGER_CST
740 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
741 return 1;
742
743 val = TREE_INT_CST_LOW (cst);
744 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
745 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
746
747 hostval = val;
748 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
749 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
750
751 if (val != hostval)
752 return 1;
753
754 *p = hostval;
755 return 0;
756 }
757
758 /* Similar to save_expr, but assumes that arbitrary code is not executed
759 in between the multiple evaluations. In particular, we assume that a
760 non-addressable local variable will not be modified. */
761
762 static tree
763 builtin_save_expr (tree exp)
764 {
765 if (TREE_CODE (exp) == SSA_NAME
766 || (TREE_ADDRESSABLE (exp) == 0
767 && (TREE_CODE (exp) == PARM_DECL
768 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
769 return exp;
770
771 return save_expr (exp);
772 }
773
774 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
775 times to get the address of either a higher stack frame, or a return
776 address located within it (depending on FNDECL_CODE). */
777
778 static rtx
779 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
780 {
781 int i;
782
783 #ifdef INITIAL_FRAME_ADDRESS_RTX
784 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
785 #else
786 rtx tem;
787
788 /* For a zero count with __builtin_return_address, we don't care what
789 frame address we return, because target-specific definitions will
790 override us. Therefore frame pointer elimination is OK, and using
791 the soft frame pointer is OK.
792
793 For a nonzero count, or a zero count with __builtin_frame_address,
794 we require a stable offset from the current frame pointer to the
795 previous one, so we must use the hard frame pointer, and
796 we must disable frame pointer elimination. */
797 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
798 tem = frame_pointer_rtx;
799 else
800 {
801 tem = hard_frame_pointer_rtx;
802
803 /* Tell reload not to eliminate the frame pointer. */
804 crtl->accesses_prior_frames = 1;
805 }
806 #endif
807
808 /* Some machines need special handling before we can access
809 arbitrary frames. For example, on the SPARC, we must first flush
810 all register windows to the stack. */
811 #ifdef SETUP_FRAME_ADDRESSES
812 if (count > 0)
813 SETUP_FRAME_ADDRESSES ();
814 #endif
815
816 /* On the SPARC, the return address is not in the frame, it is in a
817 register. There is no way to access it off of the current frame
818 pointer, but it can be accessed off the previous frame pointer by
819 reading the value from the register window save area. */
820 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
821 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
822 count--;
823 #endif
824
825 /* Scan back COUNT frames to the specified frame. */
826 for (i = 0; i < count; i++)
827 {
828 /* Assume the dynamic chain pointer is in the word that the
829 frame address points to, unless otherwise specified. */
830 #ifdef DYNAMIC_CHAIN_ADDRESS
831 tem = DYNAMIC_CHAIN_ADDRESS (tem);
832 #endif
833 tem = memory_address (Pmode, tem);
834 tem = gen_frame_mem (Pmode, tem);
835 tem = copy_to_reg (tem);
836 }
837
838 /* For __builtin_frame_address, return what we've got. But, on
839 the SPARC for example, we may have to add a bias. */
840 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
841 #ifdef FRAME_ADDR_RTX
842 return FRAME_ADDR_RTX (tem);
843 #else
844 return tem;
845 #endif
846
847 /* For __builtin_return_address, get the return address from that frame. */
848 #ifdef RETURN_ADDR_RTX
849 tem = RETURN_ADDR_RTX (count, tem);
850 #else
851 tem = memory_address (Pmode,
852 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
853 tem = gen_frame_mem (Pmode, tem);
854 #endif
855 return tem;
856 }
857
858 /* Alias set used for setjmp buffer. */
859 static alias_set_type setjmp_alias_set = -1;
860
861 /* Construct the leading half of a __builtin_setjmp call. Control will
862 return to RECEIVER_LABEL. This is also called directly by the SJLJ
863 exception handling code. */
864
865 void
866 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
867 {
868 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
869 rtx stack_save;
870 rtx mem;
871
872 if (setjmp_alias_set == -1)
873 setjmp_alias_set = new_alias_set ();
874
875 buf_addr = convert_memory_address (Pmode, buf_addr);
876
877 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
878
879 /* We store the frame pointer and the address of receiver_label in
880 the buffer and use the rest of it for the stack save area, which
881 is machine-dependent. */
882
883 mem = gen_rtx_MEM (Pmode, buf_addr);
884 set_mem_alias_set (mem, setjmp_alias_set);
885 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
886
887 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
888 GET_MODE_SIZE (Pmode))),
889 set_mem_alias_set (mem, setjmp_alias_set);
890
891 emit_move_insn (validize_mem (mem),
892 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
893
894 stack_save = gen_rtx_MEM (sa_mode,
895 plus_constant (Pmode, buf_addr,
896 2 * GET_MODE_SIZE (Pmode)));
897 set_mem_alias_set (stack_save, setjmp_alias_set);
898 emit_stack_save (SAVE_NONLOCAL, &stack_save);
899
900 /* If there is further processing to do, do it. */
901 #ifdef HAVE_builtin_setjmp_setup
902 if (HAVE_builtin_setjmp_setup)
903 emit_insn (gen_builtin_setjmp_setup (buf_addr));
904 #endif
905
906 /* We have a nonlocal label. */
907 cfun->has_nonlocal_label = 1;
908 }
909
910 /* Construct the trailing part of a __builtin_setjmp call. This is
911 also called directly by the SJLJ exception handling code.
912 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
913
914 void
915 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
916 {
917 rtx chain;
918
919 /* Mark the FP as used when we get here, so we have to make sure it's
920 marked as used by this function. */
921 emit_use (hard_frame_pointer_rtx);
922
923 /* Mark the static chain as clobbered here so life information
924 doesn't get messed up for it. */
925 chain = targetm.calls.static_chain (current_function_decl, true);
926 if (chain && REG_P (chain))
927 emit_clobber (chain);
928
929 /* Now put in the code to restore the frame pointer, and argument
930 pointer, if needed. */
931 #ifdef HAVE_nonlocal_goto
932 if (! HAVE_nonlocal_goto)
933 #endif
934 /* First adjust our frame pointer to its actual value. It was
935 previously set to the start of the virtual area corresponding to
936 the stacked variables when we branched here and now needs to be
937 adjusted to the actual hardware fp value.
938
939 Assignments to virtual registers are converted by
940 instantiate_virtual_regs into the corresponding assignment
941 to the underlying register (fp in this case) that makes
942 the original assignment true.
943 So the following insn will actually be decrementing fp by
944 STARTING_FRAME_OFFSET. */
945 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
946
947 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
948 if (fixed_regs[ARG_POINTER_REGNUM])
949 {
950 #ifdef ELIMINABLE_REGS
951 /* If the argument pointer can be eliminated in favor of the
952 frame pointer, we don't need to restore it. We assume here
953 that if such an elimination is present, it can always be used.
954 This is the case on all known machines; if we don't make this
955 assumption, we do unnecessary saving on many machines. */
956 size_t i;
957 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
958
959 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
960 if (elim_regs[i].from == ARG_POINTER_REGNUM
961 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
962 break;
963
964 if (i == ARRAY_SIZE (elim_regs))
965 #endif
966 {
967 /* Now restore our arg pointer from the address at which it
968 was saved in our stack frame. */
969 emit_move_insn (crtl->args.internal_arg_pointer,
970 copy_to_reg (get_arg_pointer_save_area ()));
971 }
972 }
973 #endif
974
975 #ifdef HAVE_builtin_setjmp_receiver
976 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
977 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
978 else
979 #endif
980 #ifdef HAVE_nonlocal_goto_receiver
981 if (HAVE_nonlocal_goto_receiver)
982 emit_insn (gen_nonlocal_goto_receiver ());
983 else
984 #endif
985 { /* Nothing */ }
986
987 /* We must not allow the code we just generated to be reordered by
988 scheduling. Specifically, the update of the frame pointer must
989 happen immediately, not later. Similarly, we must block
990 (frame-related) register values to be used across this code. */
991 emit_insn (gen_blockage ());
992 }
993
994 /* __builtin_longjmp is passed a pointer to an array of five words (not
995 all will be used on all machines). It operates similarly to the C
996 library function of the same name, but is more efficient. Much of
997 the code below is copied from the handling of non-local gotos. */
998
999 static void
1000 expand_builtin_longjmp (rtx buf_addr, rtx value)
1001 {
1002 rtx fp, lab, stack, insn, last;
1003 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1004
1005 /* DRAP is needed for stack realign if longjmp is expanded to current
1006 function */
1007 if (SUPPORTS_STACK_ALIGNMENT)
1008 crtl->need_drap = true;
1009
1010 if (setjmp_alias_set == -1)
1011 setjmp_alias_set = new_alias_set ();
1012
1013 buf_addr = convert_memory_address (Pmode, buf_addr);
1014
1015 buf_addr = force_reg (Pmode, buf_addr);
1016
1017 /* We require that the user must pass a second argument of 1, because
1018 that is what builtin_setjmp will return. */
1019 gcc_assert (value == const1_rtx);
1020
1021 last = get_last_insn ();
1022 #ifdef HAVE_builtin_longjmp
1023 if (HAVE_builtin_longjmp)
1024 emit_insn (gen_builtin_longjmp (buf_addr));
1025 else
1026 #endif
1027 {
1028 fp = gen_rtx_MEM (Pmode, buf_addr);
1029 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1030 GET_MODE_SIZE (Pmode)));
1031
1032 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1033 2 * GET_MODE_SIZE (Pmode)));
1034 set_mem_alias_set (fp, setjmp_alias_set);
1035 set_mem_alias_set (lab, setjmp_alias_set);
1036 set_mem_alias_set (stack, setjmp_alias_set);
1037
1038 /* Pick up FP, label, and SP from the block and jump. This code is
1039 from expand_goto in stmt.c; see there for detailed comments. */
1040 #ifdef HAVE_nonlocal_goto
1041 if (HAVE_nonlocal_goto)
1042 /* We have to pass a value to the nonlocal_goto pattern that will
1043 get copied into the static_chain pointer, but it does not matter
1044 what that value is, because builtin_setjmp does not use it. */
1045 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1046 else
1047 #endif
1048 {
1049 lab = copy_to_reg (lab);
1050
1051 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1052 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1053
1054 emit_move_insn (hard_frame_pointer_rtx, fp);
1055 emit_stack_restore (SAVE_NONLOCAL, stack);
1056
1057 emit_use (hard_frame_pointer_rtx);
1058 emit_use (stack_pointer_rtx);
1059 emit_indirect_jump (lab);
1060 }
1061 }
1062
1063 /* Search backwards and mark the jump insn as a non-local goto.
1064 Note that this precludes the use of __builtin_longjmp to a
1065 __builtin_setjmp target in the same function. However, we've
1066 already cautioned the user that these functions are for
1067 internal exception handling use only. */
1068 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1069 {
1070 gcc_assert (insn != last);
1071
1072 if (JUMP_P (insn))
1073 {
1074 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1075 break;
1076 }
1077 else if (CALL_P (insn))
1078 break;
1079 }
1080 }
1081
1082 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1083 and the address of the save area. */
1084
1085 static rtx
1086 expand_builtin_nonlocal_goto (tree exp)
1087 {
1088 tree t_label, t_save_area;
1089 rtx r_label, r_save_area, r_fp, r_sp, insn;
1090
1091 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1092 return NULL_RTX;
1093
1094 t_label = CALL_EXPR_ARG (exp, 0);
1095 t_save_area = CALL_EXPR_ARG (exp, 1);
1096
1097 r_label = expand_normal (t_label);
1098 r_label = convert_memory_address (Pmode, r_label);
1099 r_save_area = expand_normal (t_save_area);
1100 r_save_area = convert_memory_address (Pmode, r_save_area);
1101 /* Copy the address of the save location to a register just in case it was
1102 based on the frame pointer. */
1103 r_save_area = copy_to_reg (r_save_area);
1104 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1105 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1106 plus_constant (Pmode, r_save_area,
1107 GET_MODE_SIZE (Pmode)));
1108
1109 crtl->has_nonlocal_goto = 1;
1110
1111 #ifdef HAVE_nonlocal_goto
1112 /* ??? We no longer need to pass the static chain value, afaik. */
1113 if (HAVE_nonlocal_goto)
1114 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1115 else
1116 #endif
1117 {
1118 r_label = copy_to_reg (r_label);
1119
1120 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1121 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1122
1123 /* Restore frame pointer for containing function. */
1124 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1125 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1126
1127 /* USE of hard_frame_pointer_rtx added for consistency;
1128 not clear if really needed. */
1129 emit_use (hard_frame_pointer_rtx);
1130 emit_use (stack_pointer_rtx);
1131
1132 /* If the architecture is using a GP register, we must
1133 conservatively assume that the target function makes use of it.
1134 The prologue of functions with nonlocal gotos must therefore
1135 initialize the GP register to the appropriate value, and we
1136 must then make sure that this value is live at the point
1137 of the jump. (Note that this doesn't necessarily apply
1138 to targets with a nonlocal_goto pattern; they are free
1139 to implement it in their own way. Note also that this is
1140 a no-op if the GP register is a global invariant.) */
1141 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1142 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1143 emit_use (pic_offset_table_rtx);
1144
1145 emit_indirect_jump (r_label);
1146 }
1147
1148 /* Search backwards to the jump insn and mark it as a
1149 non-local goto. */
1150 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1151 {
1152 if (JUMP_P (insn))
1153 {
1154 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1155 break;
1156 }
1157 else if (CALL_P (insn))
1158 break;
1159 }
1160
1161 return const0_rtx;
1162 }
1163
1164 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1165 (not all will be used on all machines) that was passed to __builtin_setjmp.
1166 It updates the stack pointer in that block to correspond to the current
1167 stack pointer. */
1168
1169 static void
1170 expand_builtin_update_setjmp_buf (rtx buf_addr)
1171 {
1172 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1173 rtx stack_save
1174 = gen_rtx_MEM (sa_mode,
1175 memory_address
1176 (sa_mode,
1177 plus_constant (Pmode, buf_addr,
1178 2 * GET_MODE_SIZE (Pmode))));
1179
1180 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1181 }
1182
1183 /* Expand a call to __builtin_prefetch. For a target that does not support
1184 data prefetch, evaluate the memory address argument in case it has side
1185 effects. */
1186
1187 static void
1188 expand_builtin_prefetch (tree exp)
1189 {
1190 tree arg0, arg1, arg2;
1191 int nargs;
1192 rtx op0, op1, op2;
1193
1194 if (!validate_arglist (exp, POINTER_TYPE, 0))
1195 return;
1196
1197 arg0 = CALL_EXPR_ARG (exp, 0);
1198
1199 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1200 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1201 locality). */
1202 nargs = call_expr_nargs (exp);
1203 if (nargs > 1)
1204 arg1 = CALL_EXPR_ARG (exp, 1);
1205 else
1206 arg1 = integer_zero_node;
1207 if (nargs > 2)
1208 arg2 = CALL_EXPR_ARG (exp, 2);
1209 else
1210 arg2 = integer_three_node;
1211
1212 /* Argument 0 is an address. */
1213 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1214
1215 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1216 if (TREE_CODE (arg1) != INTEGER_CST)
1217 {
1218 error ("second argument to %<__builtin_prefetch%> must be a constant");
1219 arg1 = integer_zero_node;
1220 }
1221 op1 = expand_normal (arg1);
1222 /* Argument 1 must be either zero or one. */
1223 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1224 {
1225 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1226 " using zero");
1227 op1 = const0_rtx;
1228 }
1229
1230 /* Argument 2 (locality) must be a compile-time constant int. */
1231 if (TREE_CODE (arg2) != INTEGER_CST)
1232 {
1233 error ("third argument to %<__builtin_prefetch%> must be a constant");
1234 arg2 = integer_zero_node;
1235 }
1236 op2 = expand_normal (arg2);
1237 /* Argument 2 must be 0, 1, 2, or 3. */
1238 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1239 {
1240 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1241 op2 = const0_rtx;
1242 }
1243
1244 #ifdef HAVE_prefetch
1245 if (HAVE_prefetch)
1246 {
1247 struct expand_operand ops[3];
1248
1249 create_address_operand (&ops[0], op0);
1250 create_integer_operand (&ops[1], INTVAL (op1));
1251 create_integer_operand (&ops[2], INTVAL (op2));
1252 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1253 return;
1254 }
1255 #endif
1256
1257 /* Don't do anything with direct references to volatile memory, but
1258 generate code to handle other side effects. */
1259 if (!MEM_P (op0) && side_effects_p (op0))
1260 emit_insn (op0);
1261 }
1262
1263 /* Get a MEM rtx for expression EXP which is the address of an operand
1264 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1265 the maximum length of the block of memory that might be accessed or
1266 NULL if unknown. */
1267
1268 static rtx
1269 get_memory_rtx (tree exp, tree len)
1270 {
1271 tree orig_exp = exp;
1272 rtx addr, mem;
1273
1274 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1275 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1276 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1277 exp = TREE_OPERAND (exp, 0);
1278
1279 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1280 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1281
1282 /* Get an expression we can use to find the attributes to assign to MEM.
1283 First remove any nops. */
1284 while (CONVERT_EXPR_P (exp)
1285 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1286 exp = TREE_OPERAND (exp, 0);
1287
1288 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1289 (as builtin stringops may alias with anything). */
1290 exp = fold_build2 (MEM_REF,
1291 build_array_type (char_type_node,
1292 build_range_type (sizetype,
1293 size_one_node, len)),
1294 exp, build_int_cst (ptr_type_node, 0));
1295
1296 /* If the MEM_REF has no acceptable address, try to get the base object
1297 from the original address we got, and build an all-aliasing
1298 unknown-sized access to that one. */
1299 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1300 set_mem_attributes (mem, exp, 0);
1301 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1302 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1303 0))))
1304 {
1305 exp = build_fold_addr_expr (exp);
1306 exp = fold_build2 (MEM_REF,
1307 build_array_type (char_type_node,
1308 build_range_type (sizetype,
1309 size_zero_node,
1310 NULL)),
1311 exp, build_int_cst (ptr_type_node, 0));
1312 set_mem_attributes (mem, exp, 0);
1313 }
1314 set_mem_alias_set (mem, 0);
1315 return mem;
1316 }
1317 \f
1318 /* Built-in functions to perform an untyped call and return. */
1319
1320 #define apply_args_mode \
1321 (this_target_builtins->x_apply_args_mode)
1322 #define apply_result_mode \
1323 (this_target_builtins->x_apply_result_mode)
1324
1325 /* Return the size required for the block returned by __builtin_apply_args,
1326 and initialize apply_args_mode. */
1327
1328 static int
1329 apply_args_size (void)
1330 {
1331 static int size = -1;
1332 int align;
1333 unsigned int regno;
1334 enum machine_mode mode;
1335
1336 /* The values computed by this function never change. */
1337 if (size < 0)
1338 {
1339 /* The first value is the incoming arg-pointer. */
1340 size = GET_MODE_SIZE (Pmode);
1341
1342 /* The second value is the structure value address unless this is
1343 passed as an "invisible" first argument. */
1344 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1345 size += GET_MODE_SIZE (Pmode);
1346
1347 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1348 if (FUNCTION_ARG_REGNO_P (regno))
1349 {
1350 mode = targetm.calls.get_raw_arg_mode (regno);
1351
1352 gcc_assert (mode != VOIDmode);
1353
1354 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1355 if (size % align != 0)
1356 size = CEIL (size, align) * align;
1357 size += GET_MODE_SIZE (mode);
1358 apply_args_mode[regno] = mode;
1359 }
1360 else
1361 {
1362 apply_args_mode[regno] = VOIDmode;
1363 }
1364 }
1365 return size;
1366 }
1367
1368 /* Return the size required for the block returned by __builtin_apply,
1369 and initialize apply_result_mode. */
1370
1371 static int
1372 apply_result_size (void)
1373 {
1374 static int size = -1;
1375 int align, regno;
1376 enum machine_mode mode;
1377
1378 /* The values computed by this function never change. */
1379 if (size < 0)
1380 {
1381 size = 0;
1382
1383 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1384 if (targetm.calls.function_value_regno_p (regno))
1385 {
1386 mode = targetm.calls.get_raw_result_mode (regno);
1387
1388 gcc_assert (mode != VOIDmode);
1389
1390 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1391 if (size % align != 0)
1392 size = CEIL (size, align) * align;
1393 size += GET_MODE_SIZE (mode);
1394 apply_result_mode[regno] = mode;
1395 }
1396 else
1397 apply_result_mode[regno] = VOIDmode;
1398
1399 /* Allow targets that use untyped_call and untyped_return to override
1400 the size so that machine-specific information can be stored here. */
1401 #ifdef APPLY_RESULT_SIZE
1402 size = APPLY_RESULT_SIZE;
1403 #endif
1404 }
1405 return size;
1406 }
1407
1408 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1409 /* Create a vector describing the result block RESULT. If SAVEP is true,
1410 the result block is used to save the values; otherwise it is used to
1411 restore the values. */
1412
1413 static rtx
1414 result_vector (int savep, rtx result)
1415 {
1416 int regno, size, align, nelts;
1417 enum machine_mode mode;
1418 rtx reg, mem;
1419 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1420
1421 size = nelts = 0;
1422 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1423 if ((mode = apply_result_mode[regno]) != VOIDmode)
1424 {
1425 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1426 if (size % align != 0)
1427 size = CEIL (size, align) * align;
1428 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1429 mem = adjust_address (result, mode, size);
1430 savevec[nelts++] = (savep
1431 ? gen_rtx_SET (VOIDmode, mem, reg)
1432 : gen_rtx_SET (VOIDmode, reg, mem));
1433 size += GET_MODE_SIZE (mode);
1434 }
1435 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1436 }
1437 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1438
1439 /* Save the state required to perform an untyped call with the same
1440 arguments as were passed to the current function. */
1441
1442 static rtx
1443 expand_builtin_apply_args_1 (void)
1444 {
1445 rtx registers, tem;
1446 int size, align, regno;
1447 enum machine_mode mode;
1448 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1449
1450 /* Create a block where the arg-pointer, structure value address,
1451 and argument registers can be saved. */
1452 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1453
1454 /* Walk past the arg-pointer and structure value address. */
1455 size = GET_MODE_SIZE (Pmode);
1456 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1457 size += GET_MODE_SIZE (Pmode);
1458
1459 /* Save each register used in calling a function to the block. */
1460 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1461 if ((mode = apply_args_mode[regno]) != VOIDmode)
1462 {
1463 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1464 if (size % align != 0)
1465 size = CEIL (size, align) * align;
1466
1467 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1468
1469 emit_move_insn (adjust_address (registers, mode, size), tem);
1470 size += GET_MODE_SIZE (mode);
1471 }
1472
1473 /* Save the arg pointer to the block. */
1474 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1475 #ifdef STACK_GROWS_DOWNWARD
1476 /* We need the pointer as the caller actually passed them to us, not
1477 as we might have pretended they were passed. Make sure it's a valid
1478 operand, as emit_move_insn isn't expected to handle a PLUS. */
1479 tem
1480 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1481 NULL_RTX);
1482 #endif
1483 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1484
1485 size = GET_MODE_SIZE (Pmode);
1486
1487 /* Save the structure value address unless this is passed as an
1488 "invisible" first argument. */
1489 if (struct_incoming_value)
1490 {
1491 emit_move_insn (adjust_address (registers, Pmode, size),
1492 copy_to_reg (struct_incoming_value));
1493 size += GET_MODE_SIZE (Pmode);
1494 }
1495
1496 /* Return the address of the block. */
1497 return copy_addr_to_reg (XEXP (registers, 0));
1498 }
1499
1500 /* __builtin_apply_args returns block of memory allocated on
1501 the stack into which is stored the arg pointer, structure
1502 value address, static chain, and all the registers that might
1503 possibly be used in performing a function call. The code is
1504 moved to the start of the function so the incoming values are
1505 saved. */
1506
1507 static rtx
1508 expand_builtin_apply_args (void)
1509 {
1510 /* Don't do __builtin_apply_args more than once in a function.
1511 Save the result of the first call and reuse it. */
1512 if (apply_args_value != 0)
1513 return apply_args_value;
1514 {
1515 /* When this function is called, it means that registers must be
1516 saved on entry to this function. So we migrate the
1517 call to the first insn of this function. */
1518 rtx temp;
1519 rtx seq;
1520
1521 start_sequence ();
1522 temp = expand_builtin_apply_args_1 ();
1523 seq = get_insns ();
1524 end_sequence ();
1525
1526 apply_args_value = temp;
1527
1528 /* Put the insns after the NOTE that starts the function.
1529 If this is inside a start_sequence, make the outer-level insn
1530 chain current, so the code is placed at the start of the
1531 function. If internal_arg_pointer is a non-virtual pseudo,
1532 it needs to be placed after the function that initializes
1533 that pseudo. */
1534 push_topmost_sequence ();
1535 if (REG_P (crtl->args.internal_arg_pointer)
1536 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1537 emit_insn_before (seq, parm_birth_insn);
1538 else
1539 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1540 pop_topmost_sequence ();
1541 return temp;
1542 }
1543 }
1544
1545 /* Perform an untyped call and save the state required to perform an
1546 untyped return of whatever value was returned by the given function. */
1547
1548 static rtx
1549 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1550 {
1551 int size, align, regno;
1552 enum machine_mode mode;
1553 rtx incoming_args, result, reg, dest, src, call_insn;
1554 rtx old_stack_level = 0;
1555 rtx call_fusage = 0;
1556 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1557
1558 arguments = convert_memory_address (Pmode, arguments);
1559
1560 /* Create a block where the return registers can be saved. */
1561 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1562
1563 /* Fetch the arg pointer from the ARGUMENTS block. */
1564 incoming_args = gen_reg_rtx (Pmode);
1565 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1566 #ifndef STACK_GROWS_DOWNWARD
1567 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1568 incoming_args, 0, OPTAB_LIB_WIDEN);
1569 #endif
1570
1571 /* Push a new argument block and copy the arguments. Do not allow
1572 the (potential) memcpy call below to interfere with our stack
1573 manipulations. */
1574 do_pending_stack_adjust ();
1575 NO_DEFER_POP;
1576
1577 /* Save the stack with nonlocal if available. */
1578 #ifdef HAVE_save_stack_nonlocal
1579 if (HAVE_save_stack_nonlocal)
1580 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1581 else
1582 #endif
1583 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1584
1585 /* Allocate a block of memory onto the stack and copy the memory
1586 arguments to the outgoing arguments address. We can pass TRUE
1587 as the 4th argument because we just saved the stack pointer
1588 and will restore it right after the call. */
1589 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1590
1591 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1592 may have already set current_function_calls_alloca to true.
1593 current_function_calls_alloca won't be set if argsize is zero,
1594 so we have to guarantee need_drap is true here. */
1595 if (SUPPORTS_STACK_ALIGNMENT)
1596 crtl->need_drap = true;
1597
1598 dest = virtual_outgoing_args_rtx;
1599 #ifndef STACK_GROWS_DOWNWARD
1600 if (CONST_INT_P (argsize))
1601 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1602 else
1603 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1604 #endif
1605 dest = gen_rtx_MEM (BLKmode, dest);
1606 set_mem_align (dest, PARM_BOUNDARY);
1607 src = gen_rtx_MEM (BLKmode, incoming_args);
1608 set_mem_align (src, PARM_BOUNDARY);
1609 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1610
1611 /* Refer to the argument block. */
1612 apply_args_size ();
1613 arguments = gen_rtx_MEM (BLKmode, arguments);
1614 set_mem_align (arguments, PARM_BOUNDARY);
1615
1616 /* Walk past the arg-pointer and structure value address. */
1617 size = GET_MODE_SIZE (Pmode);
1618 if (struct_value)
1619 size += GET_MODE_SIZE (Pmode);
1620
1621 /* Restore each of the registers previously saved. Make USE insns
1622 for each of these registers for use in making the call. */
1623 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1624 if ((mode = apply_args_mode[regno]) != VOIDmode)
1625 {
1626 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1627 if (size % align != 0)
1628 size = CEIL (size, align) * align;
1629 reg = gen_rtx_REG (mode, regno);
1630 emit_move_insn (reg, adjust_address (arguments, mode, size));
1631 use_reg (&call_fusage, reg);
1632 size += GET_MODE_SIZE (mode);
1633 }
1634
1635 /* Restore the structure value address unless this is passed as an
1636 "invisible" first argument. */
1637 size = GET_MODE_SIZE (Pmode);
1638 if (struct_value)
1639 {
1640 rtx value = gen_reg_rtx (Pmode);
1641 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1642 emit_move_insn (struct_value, value);
1643 if (REG_P (struct_value))
1644 use_reg (&call_fusage, struct_value);
1645 size += GET_MODE_SIZE (Pmode);
1646 }
1647
1648 /* All arguments and registers used for the call are set up by now! */
1649 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1650
1651 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1652 and we don't want to load it into a register as an optimization,
1653 because prepare_call_address already did it if it should be done. */
1654 if (GET_CODE (function) != SYMBOL_REF)
1655 function = memory_address (FUNCTION_MODE, function);
1656
1657 /* Generate the actual call instruction and save the return value. */
1658 #ifdef HAVE_untyped_call
1659 if (HAVE_untyped_call)
1660 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1661 result, result_vector (1, result)));
1662 else
1663 #endif
1664 #ifdef HAVE_call_value
1665 if (HAVE_call_value)
1666 {
1667 rtx valreg = 0;
1668
1669 /* Locate the unique return register. It is not possible to
1670 express a call that sets more than one return register using
1671 call_value; use untyped_call for that. In fact, untyped_call
1672 only needs to save the return registers in the given block. */
1673 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1674 if ((mode = apply_result_mode[regno]) != VOIDmode)
1675 {
1676 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1677
1678 valreg = gen_rtx_REG (mode, regno);
1679 }
1680
1681 emit_call_insn (GEN_CALL_VALUE (valreg,
1682 gen_rtx_MEM (FUNCTION_MODE, function),
1683 const0_rtx, NULL_RTX, const0_rtx));
1684
1685 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1686 }
1687 else
1688 #endif
1689 gcc_unreachable ();
1690
1691 /* Find the CALL insn we just emitted, and attach the register usage
1692 information. */
1693 call_insn = last_call_insn ();
1694 add_function_usage_to (call_insn, call_fusage);
1695
1696 /* Restore the stack. */
1697 #ifdef HAVE_save_stack_nonlocal
1698 if (HAVE_save_stack_nonlocal)
1699 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1700 else
1701 #endif
1702 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1703 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1704
1705 OK_DEFER_POP;
1706
1707 /* Return the address of the result block. */
1708 result = copy_addr_to_reg (XEXP (result, 0));
1709 return convert_memory_address (ptr_mode, result);
1710 }
1711
1712 /* Perform an untyped return. */
1713
1714 static void
1715 expand_builtin_return (rtx result)
1716 {
1717 int size, align, regno;
1718 enum machine_mode mode;
1719 rtx reg;
1720 rtx call_fusage = 0;
1721
1722 result = convert_memory_address (Pmode, result);
1723
1724 apply_result_size ();
1725 result = gen_rtx_MEM (BLKmode, result);
1726
1727 #ifdef HAVE_untyped_return
1728 if (HAVE_untyped_return)
1729 {
1730 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1731 emit_barrier ();
1732 return;
1733 }
1734 #endif
1735
1736 /* Restore the return value and note that each value is used. */
1737 size = 0;
1738 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1739 if ((mode = apply_result_mode[regno]) != VOIDmode)
1740 {
1741 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1742 if (size % align != 0)
1743 size = CEIL (size, align) * align;
1744 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1745 emit_move_insn (reg, adjust_address (result, mode, size));
1746
1747 push_to_sequence (call_fusage);
1748 emit_use (reg);
1749 call_fusage = get_insns ();
1750 end_sequence ();
1751 size += GET_MODE_SIZE (mode);
1752 }
1753
1754 /* Put the USE insns before the return. */
1755 emit_insn (call_fusage);
1756
1757 /* Return whatever values was restored by jumping directly to the end
1758 of the function. */
1759 expand_naked_return ();
1760 }
1761
1762 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1763
1764 static enum type_class
1765 type_to_class (tree type)
1766 {
1767 switch (TREE_CODE (type))
1768 {
1769 case VOID_TYPE: return void_type_class;
1770 case INTEGER_TYPE: return integer_type_class;
1771 case ENUMERAL_TYPE: return enumeral_type_class;
1772 case BOOLEAN_TYPE: return boolean_type_class;
1773 case POINTER_TYPE: return pointer_type_class;
1774 case REFERENCE_TYPE: return reference_type_class;
1775 case OFFSET_TYPE: return offset_type_class;
1776 case REAL_TYPE: return real_type_class;
1777 case COMPLEX_TYPE: return complex_type_class;
1778 case FUNCTION_TYPE: return function_type_class;
1779 case METHOD_TYPE: return method_type_class;
1780 case RECORD_TYPE: return record_type_class;
1781 case UNION_TYPE:
1782 case QUAL_UNION_TYPE: return union_type_class;
1783 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1784 ? string_type_class : array_type_class);
1785 case LANG_TYPE: return lang_type_class;
1786 default: return no_type_class;
1787 }
1788 }
1789
1790 /* Expand a call EXP to __builtin_classify_type. */
1791
1792 static rtx
1793 expand_builtin_classify_type (tree exp)
1794 {
1795 if (call_expr_nargs (exp))
1796 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1797 return GEN_INT (no_type_class);
1798 }
1799
1800 /* This helper macro, meant to be used in mathfn_built_in below,
1801 determines which among a set of three builtin math functions is
1802 appropriate for a given type mode. The `F' and `L' cases are
1803 automatically generated from the `double' case. */
1804 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1805 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1806 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1807 fcodel = BUILT_IN_MATHFN##L ; break;
1808 /* Similar to above, but appends _R after any F/L suffix. */
1809 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1810 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1811 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1812 fcodel = BUILT_IN_MATHFN##L_R ; break;
1813
1814 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1815 if available. If IMPLICIT is true use the implicit builtin declaration,
1816 otherwise use the explicit declaration. If we can't do the conversion,
1817 return zero. */
1818
1819 static tree
1820 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1821 {
1822 enum built_in_function fcode, fcodef, fcodel, fcode2;
1823
1824 switch (fn)
1825 {
1826 CASE_MATHFN (BUILT_IN_ACOS)
1827 CASE_MATHFN (BUILT_IN_ACOSH)
1828 CASE_MATHFN (BUILT_IN_ASIN)
1829 CASE_MATHFN (BUILT_IN_ASINH)
1830 CASE_MATHFN (BUILT_IN_ATAN)
1831 CASE_MATHFN (BUILT_IN_ATAN2)
1832 CASE_MATHFN (BUILT_IN_ATANH)
1833 CASE_MATHFN (BUILT_IN_CBRT)
1834 CASE_MATHFN (BUILT_IN_CEIL)
1835 CASE_MATHFN (BUILT_IN_CEXPI)
1836 CASE_MATHFN (BUILT_IN_COPYSIGN)
1837 CASE_MATHFN (BUILT_IN_COS)
1838 CASE_MATHFN (BUILT_IN_COSH)
1839 CASE_MATHFN (BUILT_IN_DREM)
1840 CASE_MATHFN (BUILT_IN_ERF)
1841 CASE_MATHFN (BUILT_IN_ERFC)
1842 CASE_MATHFN (BUILT_IN_EXP)
1843 CASE_MATHFN (BUILT_IN_EXP10)
1844 CASE_MATHFN (BUILT_IN_EXP2)
1845 CASE_MATHFN (BUILT_IN_EXPM1)
1846 CASE_MATHFN (BUILT_IN_FABS)
1847 CASE_MATHFN (BUILT_IN_FDIM)
1848 CASE_MATHFN (BUILT_IN_FLOOR)
1849 CASE_MATHFN (BUILT_IN_FMA)
1850 CASE_MATHFN (BUILT_IN_FMAX)
1851 CASE_MATHFN (BUILT_IN_FMIN)
1852 CASE_MATHFN (BUILT_IN_FMOD)
1853 CASE_MATHFN (BUILT_IN_FREXP)
1854 CASE_MATHFN (BUILT_IN_GAMMA)
1855 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1856 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1857 CASE_MATHFN (BUILT_IN_HYPOT)
1858 CASE_MATHFN (BUILT_IN_ILOGB)
1859 CASE_MATHFN (BUILT_IN_ICEIL)
1860 CASE_MATHFN (BUILT_IN_IFLOOR)
1861 CASE_MATHFN (BUILT_IN_INF)
1862 CASE_MATHFN (BUILT_IN_IRINT)
1863 CASE_MATHFN (BUILT_IN_IROUND)
1864 CASE_MATHFN (BUILT_IN_ISINF)
1865 CASE_MATHFN (BUILT_IN_J0)
1866 CASE_MATHFN (BUILT_IN_J1)
1867 CASE_MATHFN (BUILT_IN_JN)
1868 CASE_MATHFN (BUILT_IN_LCEIL)
1869 CASE_MATHFN (BUILT_IN_LDEXP)
1870 CASE_MATHFN (BUILT_IN_LFLOOR)
1871 CASE_MATHFN (BUILT_IN_LGAMMA)
1872 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1873 CASE_MATHFN (BUILT_IN_LLCEIL)
1874 CASE_MATHFN (BUILT_IN_LLFLOOR)
1875 CASE_MATHFN (BUILT_IN_LLRINT)
1876 CASE_MATHFN (BUILT_IN_LLROUND)
1877 CASE_MATHFN (BUILT_IN_LOG)
1878 CASE_MATHFN (BUILT_IN_LOG10)
1879 CASE_MATHFN (BUILT_IN_LOG1P)
1880 CASE_MATHFN (BUILT_IN_LOG2)
1881 CASE_MATHFN (BUILT_IN_LOGB)
1882 CASE_MATHFN (BUILT_IN_LRINT)
1883 CASE_MATHFN (BUILT_IN_LROUND)
1884 CASE_MATHFN (BUILT_IN_MODF)
1885 CASE_MATHFN (BUILT_IN_NAN)
1886 CASE_MATHFN (BUILT_IN_NANS)
1887 CASE_MATHFN (BUILT_IN_NEARBYINT)
1888 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1889 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1890 CASE_MATHFN (BUILT_IN_POW)
1891 CASE_MATHFN (BUILT_IN_POWI)
1892 CASE_MATHFN (BUILT_IN_POW10)
1893 CASE_MATHFN (BUILT_IN_REMAINDER)
1894 CASE_MATHFN (BUILT_IN_REMQUO)
1895 CASE_MATHFN (BUILT_IN_RINT)
1896 CASE_MATHFN (BUILT_IN_ROUND)
1897 CASE_MATHFN (BUILT_IN_SCALB)
1898 CASE_MATHFN (BUILT_IN_SCALBLN)
1899 CASE_MATHFN (BUILT_IN_SCALBN)
1900 CASE_MATHFN (BUILT_IN_SIGNBIT)
1901 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1902 CASE_MATHFN (BUILT_IN_SIN)
1903 CASE_MATHFN (BUILT_IN_SINCOS)
1904 CASE_MATHFN (BUILT_IN_SINH)
1905 CASE_MATHFN (BUILT_IN_SQRT)
1906 CASE_MATHFN (BUILT_IN_TAN)
1907 CASE_MATHFN (BUILT_IN_TANH)
1908 CASE_MATHFN (BUILT_IN_TGAMMA)
1909 CASE_MATHFN (BUILT_IN_TRUNC)
1910 CASE_MATHFN (BUILT_IN_Y0)
1911 CASE_MATHFN (BUILT_IN_Y1)
1912 CASE_MATHFN (BUILT_IN_YN)
1913
1914 default:
1915 return NULL_TREE;
1916 }
1917
1918 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1919 fcode2 = fcode;
1920 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1921 fcode2 = fcodef;
1922 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1923 fcode2 = fcodel;
1924 else
1925 return NULL_TREE;
1926
1927 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1928 return NULL_TREE;
1929
1930 return builtin_decl_explicit (fcode2);
1931 }
1932
1933 /* Like mathfn_built_in_1(), but always use the implicit array. */
1934
1935 tree
1936 mathfn_built_in (tree type, enum built_in_function fn)
1937 {
1938 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1939 }
1940
1941 /* If errno must be maintained, expand the RTL to check if the result,
1942 TARGET, of a built-in function call, EXP, is NaN, and if so set
1943 errno to EDOM. */
1944
1945 static void
1946 expand_errno_check (tree exp, rtx target)
1947 {
1948 rtx lab = gen_label_rtx ();
1949
1950 /* Test the result; if it is NaN, set errno=EDOM because
1951 the argument was not in the domain. */
1952 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1953 NULL_RTX, NULL_RTX, lab,
1954 /* The jump is very likely. */
1955 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1956
1957 #ifdef TARGET_EDOM
1958 /* If this built-in doesn't throw an exception, set errno directly. */
1959 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1960 {
1961 #ifdef GEN_ERRNO_RTX
1962 rtx errno_rtx = GEN_ERRNO_RTX;
1963 #else
1964 rtx errno_rtx
1965 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1966 #endif
1967 emit_move_insn (errno_rtx,
1968 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1969 emit_label (lab);
1970 return;
1971 }
1972 #endif
1973
1974 /* Make sure the library call isn't expanded as a tail call. */
1975 CALL_EXPR_TAILCALL (exp) = 0;
1976
1977 /* We can't set errno=EDOM directly; let the library call do it.
1978 Pop the arguments right away in case the call gets deleted. */
1979 NO_DEFER_POP;
1980 expand_call (exp, target, 0);
1981 OK_DEFER_POP;
1982 emit_label (lab);
1983 }
1984
1985 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1986 Return NULL_RTX if a normal call should be emitted rather than expanding
1987 the function in-line. EXP is the expression that is a call to the builtin
1988 function; if convenient, the result should be placed in TARGET.
1989 SUBTARGET may be used as the target for computing one of EXP's operands. */
1990
1991 static rtx
1992 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1993 {
1994 optab builtin_optab;
1995 rtx op0, insns;
1996 tree fndecl = get_callee_fndecl (exp);
1997 enum machine_mode mode;
1998 bool errno_set = false;
1999 bool try_widening = false;
2000 tree arg;
2001
2002 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2003 return NULL_RTX;
2004
2005 arg = CALL_EXPR_ARG (exp, 0);
2006
2007 switch (DECL_FUNCTION_CODE (fndecl))
2008 {
2009 CASE_FLT_FN (BUILT_IN_SQRT):
2010 errno_set = ! tree_expr_nonnegative_p (arg);
2011 try_widening = true;
2012 builtin_optab = sqrt_optab;
2013 break;
2014 CASE_FLT_FN (BUILT_IN_EXP):
2015 errno_set = true; builtin_optab = exp_optab; break;
2016 CASE_FLT_FN (BUILT_IN_EXP10):
2017 CASE_FLT_FN (BUILT_IN_POW10):
2018 errno_set = true; builtin_optab = exp10_optab; break;
2019 CASE_FLT_FN (BUILT_IN_EXP2):
2020 errno_set = true; builtin_optab = exp2_optab; break;
2021 CASE_FLT_FN (BUILT_IN_EXPM1):
2022 errno_set = true; builtin_optab = expm1_optab; break;
2023 CASE_FLT_FN (BUILT_IN_LOGB):
2024 errno_set = true; builtin_optab = logb_optab; break;
2025 CASE_FLT_FN (BUILT_IN_LOG):
2026 errno_set = true; builtin_optab = log_optab; break;
2027 CASE_FLT_FN (BUILT_IN_LOG10):
2028 errno_set = true; builtin_optab = log10_optab; break;
2029 CASE_FLT_FN (BUILT_IN_LOG2):
2030 errno_set = true; builtin_optab = log2_optab; break;
2031 CASE_FLT_FN (BUILT_IN_LOG1P):
2032 errno_set = true; builtin_optab = log1p_optab; break;
2033 CASE_FLT_FN (BUILT_IN_ASIN):
2034 builtin_optab = asin_optab; break;
2035 CASE_FLT_FN (BUILT_IN_ACOS):
2036 builtin_optab = acos_optab; break;
2037 CASE_FLT_FN (BUILT_IN_TAN):
2038 builtin_optab = tan_optab; break;
2039 CASE_FLT_FN (BUILT_IN_ATAN):
2040 builtin_optab = atan_optab; break;
2041 CASE_FLT_FN (BUILT_IN_FLOOR):
2042 builtin_optab = floor_optab; break;
2043 CASE_FLT_FN (BUILT_IN_CEIL):
2044 builtin_optab = ceil_optab; break;
2045 CASE_FLT_FN (BUILT_IN_TRUNC):
2046 builtin_optab = btrunc_optab; break;
2047 CASE_FLT_FN (BUILT_IN_ROUND):
2048 builtin_optab = round_optab; break;
2049 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2050 builtin_optab = nearbyint_optab;
2051 if (flag_trapping_math)
2052 break;
2053 /* Else fallthrough and expand as rint. */
2054 CASE_FLT_FN (BUILT_IN_RINT):
2055 builtin_optab = rint_optab; break;
2056 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2057 builtin_optab = significand_optab; break;
2058 default:
2059 gcc_unreachable ();
2060 }
2061
2062 /* Make a suitable register to place result in. */
2063 mode = TYPE_MODE (TREE_TYPE (exp));
2064
2065 if (! flag_errno_math || ! HONOR_NANS (mode))
2066 errno_set = false;
2067
2068 /* Before working hard, check whether the instruction is available, but try
2069 to widen the mode for specific operations. */
2070 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2071 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2072 && (!errno_set || !optimize_insn_for_size_p ()))
2073 {
2074 rtx result = gen_reg_rtx (mode);
2075
2076 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2077 need to expand the argument again. This way, we will not perform
2078 side-effects more the once. */
2079 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2080
2081 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2082
2083 start_sequence ();
2084
2085 /* Compute into RESULT.
2086 Set RESULT to wherever the result comes back. */
2087 result = expand_unop (mode, builtin_optab, op0, result, 0);
2088
2089 if (result != 0)
2090 {
2091 if (errno_set)
2092 expand_errno_check (exp, result);
2093
2094 /* Output the entire sequence. */
2095 insns = get_insns ();
2096 end_sequence ();
2097 emit_insn (insns);
2098 return result;
2099 }
2100
2101 /* If we were unable to expand via the builtin, stop the sequence
2102 (without outputting the insns) and call to the library function
2103 with the stabilized argument list. */
2104 end_sequence ();
2105 }
2106
2107 return expand_call (exp, target, target == const0_rtx);
2108 }
2109
2110 /* Expand a call to the builtin binary math functions (pow and atan2).
2111 Return NULL_RTX if a normal call should be emitted rather than expanding the
2112 function in-line. EXP is the expression that is a call to the builtin
2113 function; if convenient, the result should be placed in TARGET.
2114 SUBTARGET may be used as the target for computing one of EXP's
2115 operands. */
2116
2117 static rtx
2118 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2119 {
2120 optab builtin_optab;
2121 rtx op0, op1, insns, result;
2122 int op1_type = REAL_TYPE;
2123 tree fndecl = get_callee_fndecl (exp);
2124 tree arg0, arg1;
2125 enum machine_mode mode;
2126 bool errno_set = true;
2127
2128 switch (DECL_FUNCTION_CODE (fndecl))
2129 {
2130 CASE_FLT_FN (BUILT_IN_SCALBN):
2131 CASE_FLT_FN (BUILT_IN_SCALBLN):
2132 CASE_FLT_FN (BUILT_IN_LDEXP):
2133 op1_type = INTEGER_TYPE;
2134 default:
2135 break;
2136 }
2137
2138 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2139 return NULL_RTX;
2140
2141 arg0 = CALL_EXPR_ARG (exp, 0);
2142 arg1 = CALL_EXPR_ARG (exp, 1);
2143
2144 switch (DECL_FUNCTION_CODE (fndecl))
2145 {
2146 CASE_FLT_FN (BUILT_IN_POW):
2147 builtin_optab = pow_optab; break;
2148 CASE_FLT_FN (BUILT_IN_ATAN2):
2149 builtin_optab = atan2_optab; break;
2150 CASE_FLT_FN (BUILT_IN_SCALB):
2151 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2152 return 0;
2153 builtin_optab = scalb_optab; break;
2154 CASE_FLT_FN (BUILT_IN_SCALBN):
2155 CASE_FLT_FN (BUILT_IN_SCALBLN):
2156 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2157 return 0;
2158 /* Fall through... */
2159 CASE_FLT_FN (BUILT_IN_LDEXP):
2160 builtin_optab = ldexp_optab; break;
2161 CASE_FLT_FN (BUILT_IN_FMOD):
2162 builtin_optab = fmod_optab; break;
2163 CASE_FLT_FN (BUILT_IN_REMAINDER):
2164 CASE_FLT_FN (BUILT_IN_DREM):
2165 builtin_optab = remainder_optab; break;
2166 default:
2167 gcc_unreachable ();
2168 }
2169
2170 /* Make a suitable register to place result in. */
2171 mode = TYPE_MODE (TREE_TYPE (exp));
2172
2173 /* Before working hard, check whether the instruction is available. */
2174 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2175 return NULL_RTX;
2176
2177 result = gen_reg_rtx (mode);
2178
2179 if (! flag_errno_math || ! HONOR_NANS (mode))
2180 errno_set = false;
2181
2182 if (errno_set && optimize_insn_for_size_p ())
2183 return 0;
2184
2185 /* Always stabilize the argument list. */
2186 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2187 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2188
2189 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2190 op1 = expand_normal (arg1);
2191
2192 start_sequence ();
2193
2194 /* Compute into RESULT.
2195 Set RESULT to wherever the result comes back. */
2196 result = expand_binop (mode, builtin_optab, op0, op1,
2197 result, 0, OPTAB_DIRECT);
2198
2199 /* If we were unable to expand via the builtin, stop the sequence
2200 (without outputting the insns) and call to the library function
2201 with the stabilized argument list. */
2202 if (result == 0)
2203 {
2204 end_sequence ();
2205 return expand_call (exp, target, target == const0_rtx);
2206 }
2207
2208 if (errno_set)
2209 expand_errno_check (exp, result);
2210
2211 /* Output the entire sequence. */
2212 insns = get_insns ();
2213 end_sequence ();
2214 emit_insn (insns);
2215
2216 return result;
2217 }
2218
2219 /* Expand a call to the builtin trinary math functions (fma).
2220 Return NULL_RTX if a normal call should be emitted rather than expanding the
2221 function in-line. EXP is the expression that is a call to the builtin
2222 function; if convenient, the result should be placed in TARGET.
2223 SUBTARGET may be used as the target for computing one of EXP's
2224 operands. */
2225
2226 static rtx
2227 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2228 {
2229 optab builtin_optab;
2230 rtx op0, op1, op2, insns, result;
2231 tree fndecl = get_callee_fndecl (exp);
2232 tree arg0, arg1, arg2;
2233 enum machine_mode mode;
2234
2235 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2236 return NULL_RTX;
2237
2238 arg0 = CALL_EXPR_ARG (exp, 0);
2239 arg1 = CALL_EXPR_ARG (exp, 1);
2240 arg2 = CALL_EXPR_ARG (exp, 2);
2241
2242 switch (DECL_FUNCTION_CODE (fndecl))
2243 {
2244 CASE_FLT_FN (BUILT_IN_FMA):
2245 builtin_optab = fma_optab; break;
2246 default:
2247 gcc_unreachable ();
2248 }
2249
2250 /* Make a suitable register to place result in. */
2251 mode = TYPE_MODE (TREE_TYPE (exp));
2252
2253 /* Before working hard, check whether the instruction is available. */
2254 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2255 return NULL_RTX;
2256
2257 result = gen_reg_rtx (mode);
2258
2259 /* Always stabilize the argument list. */
2260 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2261 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2262 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2263
2264 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2265 op1 = expand_normal (arg1);
2266 op2 = expand_normal (arg2);
2267
2268 start_sequence ();
2269
2270 /* Compute into RESULT.
2271 Set RESULT to wherever the result comes back. */
2272 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2273 result, 0);
2274
2275 /* If we were unable to expand via the builtin, stop the sequence
2276 (without outputting the insns) and call to the library function
2277 with the stabilized argument list. */
2278 if (result == 0)
2279 {
2280 end_sequence ();
2281 return expand_call (exp, target, target == const0_rtx);
2282 }
2283
2284 /* Output the entire sequence. */
2285 insns = get_insns ();
2286 end_sequence ();
2287 emit_insn (insns);
2288
2289 return result;
2290 }
2291
2292 /* Expand a call to the builtin sin and cos math functions.
2293 Return NULL_RTX if a normal call should be emitted rather than expanding the
2294 function in-line. EXP is the expression that is a call to the builtin
2295 function; if convenient, the result should be placed in TARGET.
2296 SUBTARGET may be used as the target for computing one of EXP's
2297 operands. */
2298
2299 static rtx
2300 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2301 {
2302 optab builtin_optab;
2303 rtx op0, insns;
2304 tree fndecl = get_callee_fndecl (exp);
2305 enum machine_mode mode;
2306 tree arg;
2307
2308 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2309 return NULL_RTX;
2310
2311 arg = CALL_EXPR_ARG (exp, 0);
2312
2313 switch (DECL_FUNCTION_CODE (fndecl))
2314 {
2315 CASE_FLT_FN (BUILT_IN_SIN):
2316 CASE_FLT_FN (BUILT_IN_COS):
2317 builtin_optab = sincos_optab; break;
2318 default:
2319 gcc_unreachable ();
2320 }
2321
2322 /* Make a suitable register to place result in. */
2323 mode = TYPE_MODE (TREE_TYPE (exp));
2324
2325 /* Check if sincos insn is available, otherwise fallback
2326 to sin or cos insn. */
2327 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2328 switch (DECL_FUNCTION_CODE (fndecl))
2329 {
2330 CASE_FLT_FN (BUILT_IN_SIN):
2331 builtin_optab = sin_optab; break;
2332 CASE_FLT_FN (BUILT_IN_COS):
2333 builtin_optab = cos_optab; break;
2334 default:
2335 gcc_unreachable ();
2336 }
2337
2338 /* Before working hard, check whether the instruction is available. */
2339 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2340 {
2341 rtx result = gen_reg_rtx (mode);
2342
2343 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2344 need to expand the argument again. This way, we will not perform
2345 side-effects more the once. */
2346 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2347
2348 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2349
2350 start_sequence ();
2351
2352 /* Compute into RESULT.
2353 Set RESULT to wherever the result comes back. */
2354 if (builtin_optab == sincos_optab)
2355 {
2356 int ok;
2357
2358 switch (DECL_FUNCTION_CODE (fndecl))
2359 {
2360 CASE_FLT_FN (BUILT_IN_SIN):
2361 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2362 break;
2363 CASE_FLT_FN (BUILT_IN_COS):
2364 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2365 break;
2366 default:
2367 gcc_unreachable ();
2368 }
2369 gcc_assert (ok);
2370 }
2371 else
2372 result = expand_unop (mode, builtin_optab, op0, result, 0);
2373
2374 if (result != 0)
2375 {
2376 /* Output the entire sequence. */
2377 insns = get_insns ();
2378 end_sequence ();
2379 emit_insn (insns);
2380 return result;
2381 }
2382
2383 /* If we were unable to expand via the builtin, stop the sequence
2384 (without outputting the insns) and call to the library function
2385 with the stabilized argument list. */
2386 end_sequence ();
2387 }
2388
2389 return expand_call (exp, target, target == const0_rtx);
2390 }
2391
2392 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2393 return an RTL instruction code that implements the functionality.
2394 If that isn't possible or available return CODE_FOR_nothing. */
2395
2396 static enum insn_code
2397 interclass_mathfn_icode (tree arg, tree fndecl)
2398 {
2399 bool errno_set = false;
2400 optab builtin_optab = unknown_optab;
2401 enum machine_mode mode;
2402
2403 switch (DECL_FUNCTION_CODE (fndecl))
2404 {
2405 CASE_FLT_FN (BUILT_IN_ILOGB):
2406 errno_set = true; builtin_optab = ilogb_optab; break;
2407 CASE_FLT_FN (BUILT_IN_ISINF):
2408 builtin_optab = isinf_optab; break;
2409 case BUILT_IN_ISNORMAL:
2410 case BUILT_IN_ISFINITE:
2411 CASE_FLT_FN (BUILT_IN_FINITE):
2412 case BUILT_IN_FINITED32:
2413 case BUILT_IN_FINITED64:
2414 case BUILT_IN_FINITED128:
2415 case BUILT_IN_ISINFD32:
2416 case BUILT_IN_ISINFD64:
2417 case BUILT_IN_ISINFD128:
2418 /* These builtins have no optabs (yet). */
2419 break;
2420 default:
2421 gcc_unreachable ();
2422 }
2423
2424 /* There's no easy way to detect the case we need to set EDOM. */
2425 if (flag_errno_math && errno_set)
2426 return CODE_FOR_nothing;
2427
2428 /* Optab mode depends on the mode of the input argument. */
2429 mode = TYPE_MODE (TREE_TYPE (arg));
2430
2431 if (builtin_optab)
2432 return optab_handler (builtin_optab, mode);
2433 return CODE_FOR_nothing;
2434 }
2435
2436 /* Expand a call to one of the builtin math functions that operate on
2437 floating point argument and output an integer result (ilogb, isinf,
2438 isnan, etc).
2439 Return 0 if a normal call should be emitted rather than expanding the
2440 function in-line. EXP is the expression that is a call to the builtin
2441 function; if convenient, the result should be placed in TARGET. */
2442
2443 static rtx
2444 expand_builtin_interclass_mathfn (tree exp, rtx target)
2445 {
2446 enum insn_code icode = CODE_FOR_nothing;
2447 rtx op0;
2448 tree fndecl = get_callee_fndecl (exp);
2449 enum machine_mode mode;
2450 tree arg;
2451
2452 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2453 return NULL_RTX;
2454
2455 arg = CALL_EXPR_ARG (exp, 0);
2456 icode = interclass_mathfn_icode (arg, fndecl);
2457 mode = TYPE_MODE (TREE_TYPE (arg));
2458
2459 if (icode != CODE_FOR_nothing)
2460 {
2461 struct expand_operand ops[1];
2462 rtx last = get_last_insn ();
2463 tree orig_arg = arg;
2464
2465 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2466 need to expand the argument again. This way, we will not perform
2467 side-effects more the once. */
2468 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2469
2470 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2471
2472 if (mode != GET_MODE (op0))
2473 op0 = convert_to_mode (mode, op0, 0);
2474
2475 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2476 if (maybe_legitimize_operands (icode, 0, 1, ops)
2477 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2478 return ops[0].value;
2479
2480 delete_insns_since (last);
2481 CALL_EXPR_ARG (exp, 0) = orig_arg;
2482 }
2483
2484 return NULL_RTX;
2485 }
2486
2487 /* Expand a call to the builtin sincos math function.
2488 Return NULL_RTX if a normal call should be emitted rather than expanding the
2489 function in-line. EXP is the expression that is a call to the builtin
2490 function. */
2491
2492 static rtx
2493 expand_builtin_sincos (tree exp)
2494 {
2495 rtx op0, op1, op2, target1, target2;
2496 enum machine_mode mode;
2497 tree arg, sinp, cosp;
2498 int result;
2499 location_t loc = EXPR_LOCATION (exp);
2500 tree alias_type, alias_off;
2501
2502 if (!validate_arglist (exp, REAL_TYPE,
2503 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2504 return NULL_RTX;
2505
2506 arg = CALL_EXPR_ARG (exp, 0);
2507 sinp = CALL_EXPR_ARG (exp, 1);
2508 cosp = CALL_EXPR_ARG (exp, 2);
2509
2510 /* Make a suitable register to place result in. */
2511 mode = TYPE_MODE (TREE_TYPE (arg));
2512
2513 /* Check if sincos insn is available, otherwise emit the call. */
2514 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2515 return NULL_RTX;
2516
2517 target1 = gen_reg_rtx (mode);
2518 target2 = gen_reg_rtx (mode);
2519
2520 op0 = expand_normal (arg);
2521 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2522 alias_off = build_int_cst (alias_type, 0);
2523 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2524 sinp, alias_off));
2525 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2526 cosp, alias_off));
2527
2528 /* Compute into target1 and target2.
2529 Set TARGET to wherever the result comes back. */
2530 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2531 gcc_assert (result);
2532
2533 /* Move target1 and target2 to the memory locations indicated
2534 by op1 and op2. */
2535 emit_move_insn (op1, target1);
2536 emit_move_insn (op2, target2);
2537
2538 return const0_rtx;
2539 }
2540
2541 /* Expand a call to the internal cexpi builtin to the sincos math function.
2542 EXP is the expression that is a call to the builtin function; if convenient,
2543 the result should be placed in TARGET. */
2544
2545 static rtx
2546 expand_builtin_cexpi (tree exp, rtx target)
2547 {
2548 tree fndecl = get_callee_fndecl (exp);
2549 tree arg, type;
2550 enum machine_mode mode;
2551 rtx op0, op1, op2;
2552 location_t loc = EXPR_LOCATION (exp);
2553
2554 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2555 return NULL_RTX;
2556
2557 arg = CALL_EXPR_ARG (exp, 0);
2558 type = TREE_TYPE (arg);
2559 mode = TYPE_MODE (TREE_TYPE (arg));
2560
2561 /* Try expanding via a sincos optab, fall back to emitting a libcall
2562 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2563 is only generated from sincos, cexp or if we have either of them. */
2564 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2565 {
2566 op1 = gen_reg_rtx (mode);
2567 op2 = gen_reg_rtx (mode);
2568
2569 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2570
2571 /* Compute into op1 and op2. */
2572 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2573 }
2574 else if (targetm.libc_has_function (function_sincos))
2575 {
2576 tree call, fn = NULL_TREE;
2577 tree top1, top2;
2578 rtx op1a, op2a;
2579
2580 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2581 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2582 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2583 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2584 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2585 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2586 else
2587 gcc_unreachable ();
2588
2589 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2590 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2591 op1a = copy_addr_to_reg (XEXP (op1, 0));
2592 op2a = copy_addr_to_reg (XEXP (op2, 0));
2593 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2594 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2595
2596 /* Make sure not to fold the sincos call again. */
2597 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2598 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2599 call, 3, arg, top1, top2));
2600 }
2601 else
2602 {
2603 tree call, fn = NULL_TREE, narg;
2604 tree ctype = build_complex_type (type);
2605
2606 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2607 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2608 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2609 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2610 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2611 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2612 else
2613 gcc_unreachable ();
2614
2615 /* If we don't have a decl for cexp create one. This is the
2616 friendliest fallback if the user calls __builtin_cexpi
2617 without full target C99 function support. */
2618 if (fn == NULL_TREE)
2619 {
2620 tree fntype;
2621 const char *name = NULL;
2622
2623 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2624 name = "cexpf";
2625 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2626 name = "cexp";
2627 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2628 name = "cexpl";
2629
2630 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2631 fn = build_fn_decl (name, fntype);
2632 }
2633
2634 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2635 build_real (type, dconst0), arg);
2636
2637 /* Make sure not to fold the cexp call again. */
2638 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2639 return expand_expr (build_call_nary (ctype, call, 1, narg),
2640 target, VOIDmode, EXPAND_NORMAL);
2641 }
2642
2643 /* Now build the proper return type. */
2644 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2645 make_tree (TREE_TYPE (arg), op2),
2646 make_tree (TREE_TYPE (arg), op1)),
2647 target, VOIDmode, EXPAND_NORMAL);
2648 }
2649
2650 /* Conveniently construct a function call expression. FNDECL names the
2651 function to be called, N is the number of arguments, and the "..."
2652 parameters are the argument expressions. Unlike build_call_exr
2653 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2654
2655 static tree
2656 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2657 {
2658 va_list ap;
2659 tree fntype = TREE_TYPE (fndecl);
2660 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2661
2662 va_start (ap, n);
2663 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2664 va_end (ap);
2665 SET_EXPR_LOCATION (fn, loc);
2666 return fn;
2667 }
2668
2669 /* Expand a call to one of the builtin rounding functions gcc defines
2670 as an extension (lfloor and lceil). As these are gcc extensions we
2671 do not need to worry about setting errno to EDOM.
2672 If expanding via optab fails, lower expression to (int)(floor(x)).
2673 EXP is the expression that is a call to the builtin function;
2674 if convenient, the result should be placed in TARGET. */
2675
2676 static rtx
2677 expand_builtin_int_roundingfn (tree exp, rtx target)
2678 {
2679 convert_optab builtin_optab;
2680 rtx op0, insns, tmp;
2681 tree fndecl = get_callee_fndecl (exp);
2682 enum built_in_function fallback_fn;
2683 tree fallback_fndecl;
2684 enum machine_mode mode;
2685 tree arg;
2686
2687 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2688 gcc_unreachable ();
2689
2690 arg = CALL_EXPR_ARG (exp, 0);
2691
2692 switch (DECL_FUNCTION_CODE (fndecl))
2693 {
2694 CASE_FLT_FN (BUILT_IN_ICEIL):
2695 CASE_FLT_FN (BUILT_IN_LCEIL):
2696 CASE_FLT_FN (BUILT_IN_LLCEIL):
2697 builtin_optab = lceil_optab;
2698 fallback_fn = BUILT_IN_CEIL;
2699 break;
2700
2701 CASE_FLT_FN (BUILT_IN_IFLOOR):
2702 CASE_FLT_FN (BUILT_IN_LFLOOR):
2703 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2704 builtin_optab = lfloor_optab;
2705 fallback_fn = BUILT_IN_FLOOR;
2706 break;
2707
2708 default:
2709 gcc_unreachable ();
2710 }
2711
2712 /* Make a suitable register to place result in. */
2713 mode = TYPE_MODE (TREE_TYPE (exp));
2714
2715 target = gen_reg_rtx (mode);
2716
2717 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2718 need to expand the argument again. This way, we will not perform
2719 side-effects more the once. */
2720 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2721
2722 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2723
2724 start_sequence ();
2725
2726 /* Compute into TARGET. */
2727 if (expand_sfix_optab (target, op0, builtin_optab))
2728 {
2729 /* Output the entire sequence. */
2730 insns = get_insns ();
2731 end_sequence ();
2732 emit_insn (insns);
2733 return target;
2734 }
2735
2736 /* If we were unable to expand via the builtin, stop the sequence
2737 (without outputting the insns). */
2738 end_sequence ();
2739
2740 /* Fall back to floating point rounding optab. */
2741 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2742
2743 /* For non-C99 targets we may end up without a fallback fndecl here
2744 if the user called __builtin_lfloor directly. In this case emit
2745 a call to the floor/ceil variants nevertheless. This should result
2746 in the best user experience for not full C99 targets. */
2747 if (fallback_fndecl == NULL_TREE)
2748 {
2749 tree fntype;
2750 const char *name = NULL;
2751
2752 switch (DECL_FUNCTION_CODE (fndecl))
2753 {
2754 case BUILT_IN_ICEIL:
2755 case BUILT_IN_LCEIL:
2756 case BUILT_IN_LLCEIL:
2757 name = "ceil";
2758 break;
2759 case BUILT_IN_ICEILF:
2760 case BUILT_IN_LCEILF:
2761 case BUILT_IN_LLCEILF:
2762 name = "ceilf";
2763 break;
2764 case BUILT_IN_ICEILL:
2765 case BUILT_IN_LCEILL:
2766 case BUILT_IN_LLCEILL:
2767 name = "ceill";
2768 break;
2769 case BUILT_IN_IFLOOR:
2770 case BUILT_IN_LFLOOR:
2771 case BUILT_IN_LLFLOOR:
2772 name = "floor";
2773 break;
2774 case BUILT_IN_IFLOORF:
2775 case BUILT_IN_LFLOORF:
2776 case BUILT_IN_LLFLOORF:
2777 name = "floorf";
2778 break;
2779 case BUILT_IN_IFLOORL:
2780 case BUILT_IN_LFLOORL:
2781 case BUILT_IN_LLFLOORL:
2782 name = "floorl";
2783 break;
2784 default:
2785 gcc_unreachable ();
2786 }
2787
2788 fntype = build_function_type_list (TREE_TYPE (arg),
2789 TREE_TYPE (arg), NULL_TREE);
2790 fallback_fndecl = build_fn_decl (name, fntype);
2791 }
2792
2793 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2794
2795 tmp = expand_normal (exp);
2796 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2797
2798 /* Truncate the result of floating point optab to integer
2799 via expand_fix (). */
2800 target = gen_reg_rtx (mode);
2801 expand_fix (target, tmp, 0);
2802
2803 return target;
2804 }
2805
2806 /* Expand a call to one of the builtin math functions doing integer
2807 conversion (lrint).
2808 Return 0 if a normal call should be emitted rather than expanding the
2809 function in-line. EXP is the expression that is a call to the builtin
2810 function; if convenient, the result should be placed in TARGET. */
2811
2812 static rtx
2813 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2814 {
2815 convert_optab builtin_optab;
2816 rtx op0, insns;
2817 tree fndecl = get_callee_fndecl (exp);
2818 tree arg;
2819 enum machine_mode mode;
2820 enum built_in_function fallback_fn = BUILT_IN_NONE;
2821
2822 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2823 gcc_unreachable ();
2824
2825 arg = CALL_EXPR_ARG (exp, 0);
2826
2827 switch (DECL_FUNCTION_CODE (fndecl))
2828 {
2829 CASE_FLT_FN (BUILT_IN_IRINT):
2830 fallback_fn = BUILT_IN_LRINT;
2831 /* FALLTHRU */
2832 CASE_FLT_FN (BUILT_IN_LRINT):
2833 CASE_FLT_FN (BUILT_IN_LLRINT):
2834 builtin_optab = lrint_optab;
2835 break;
2836
2837 CASE_FLT_FN (BUILT_IN_IROUND):
2838 fallback_fn = BUILT_IN_LROUND;
2839 /* FALLTHRU */
2840 CASE_FLT_FN (BUILT_IN_LROUND):
2841 CASE_FLT_FN (BUILT_IN_LLROUND):
2842 builtin_optab = lround_optab;
2843 break;
2844
2845 default:
2846 gcc_unreachable ();
2847 }
2848
2849 /* There's no easy way to detect the case we need to set EDOM. */
2850 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2851 return NULL_RTX;
2852
2853 /* Make a suitable register to place result in. */
2854 mode = TYPE_MODE (TREE_TYPE (exp));
2855
2856 /* There's no easy way to detect the case we need to set EDOM. */
2857 if (!flag_errno_math)
2858 {
2859 rtx result = gen_reg_rtx (mode);
2860
2861 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2862 need to expand the argument again. This way, we will not perform
2863 side-effects more the once. */
2864 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2865
2866 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2867
2868 start_sequence ();
2869
2870 if (expand_sfix_optab (result, op0, builtin_optab))
2871 {
2872 /* Output the entire sequence. */
2873 insns = get_insns ();
2874 end_sequence ();
2875 emit_insn (insns);
2876 return result;
2877 }
2878
2879 /* If we were unable to expand via the builtin, stop the sequence
2880 (without outputting the insns) and call to the library function
2881 with the stabilized argument list. */
2882 end_sequence ();
2883 }
2884
2885 if (fallback_fn != BUILT_IN_NONE)
2886 {
2887 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2888 targets, (int) round (x) should never be transformed into
2889 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2890 a call to lround in the hope that the target provides at least some
2891 C99 functions. This should result in the best user experience for
2892 not full C99 targets. */
2893 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2894 fallback_fn, 0);
2895
2896 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2897 fallback_fndecl, 1, arg);
2898
2899 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2900 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2901 return convert_to_mode (mode, target, 0);
2902 }
2903
2904 return expand_call (exp, target, target == const0_rtx);
2905 }
2906
2907 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2908 a normal call should be emitted rather than expanding the function
2909 in-line. EXP is the expression that is a call to the builtin
2910 function; if convenient, the result should be placed in TARGET. */
2911
2912 static rtx
2913 expand_builtin_powi (tree exp, rtx target)
2914 {
2915 tree arg0, arg1;
2916 rtx op0, op1;
2917 enum machine_mode mode;
2918 enum machine_mode mode2;
2919
2920 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2921 return NULL_RTX;
2922
2923 arg0 = CALL_EXPR_ARG (exp, 0);
2924 arg1 = CALL_EXPR_ARG (exp, 1);
2925 mode = TYPE_MODE (TREE_TYPE (exp));
2926
2927 /* Emit a libcall to libgcc. */
2928
2929 /* Mode of the 2nd argument must match that of an int. */
2930 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2931
2932 if (target == NULL_RTX)
2933 target = gen_reg_rtx (mode);
2934
2935 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2936 if (GET_MODE (op0) != mode)
2937 op0 = convert_to_mode (mode, op0, 0);
2938 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2939 if (GET_MODE (op1) != mode2)
2940 op1 = convert_to_mode (mode2, op1, 0);
2941
2942 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2943 target, LCT_CONST, mode, 2,
2944 op0, mode, op1, mode2);
2945
2946 return target;
2947 }
2948
2949 /* Expand expression EXP which is a call to the strlen builtin. Return
2950 NULL_RTX if we failed the caller should emit a normal call, otherwise
2951 try to get the result in TARGET, if convenient. */
2952
2953 static rtx
2954 expand_builtin_strlen (tree exp, rtx target,
2955 enum machine_mode target_mode)
2956 {
2957 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2958 return NULL_RTX;
2959 else
2960 {
2961 struct expand_operand ops[4];
2962 rtx pat;
2963 tree len;
2964 tree src = CALL_EXPR_ARG (exp, 0);
2965 rtx src_reg, before_strlen;
2966 enum machine_mode insn_mode = target_mode;
2967 enum insn_code icode = CODE_FOR_nothing;
2968 unsigned int align;
2969
2970 /* If the length can be computed at compile-time, return it. */
2971 len = c_strlen (src, 0);
2972 if (len)
2973 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2974
2975 /* If the length can be computed at compile-time and is constant
2976 integer, but there are side-effects in src, evaluate
2977 src for side-effects, then return len.
2978 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2979 can be optimized into: i++; x = 3; */
2980 len = c_strlen (src, 1);
2981 if (len && TREE_CODE (len) == INTEGER_CST)
2982 {
2983 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2984 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2985 }
2986
2987 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2988
2989 /* If SRC is not a pointer type, don't do this operation inline. */
2990 if (align == 0)
2991 return NULL_RTX;
2992
2993 /* Bail out if we can't compute strlen in the right mode. */
2994 while (insn_mode != VOIDmode)
2995 {
2996 icode = optab_handler (strlen_optab, insn_mode);
2997 if (icode != CODE_FOR_nothing)
2998 break;
2999
3000 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3001 }
3002 if (insn_mode == VOIDmode)
3003 return NULL_RTX;
3004
3005 /* Make a place to hold the source address. We will not expand
3006 the actual source until we are sure that the expansion will
3007 not fail -- there are trees that cannot be expanded twice. */
3008 src_reg = gen_reg_rtx (Pmode);
3009
3010 /* Mark the beginning of the strlen sequence so we can emit the
3011 source operand later. */
3012 before_strlen = get_last_insn ();
3013
3014 create_output_operand (&ops[0], target, insn_mode);
3015 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3016 create_integer_operand (&ops[2], 0);
3017 create_integer_operand (&ops[3], align);
3018 if (!maybe_expand_insn (icode, 4, ops))
3019 return NULL_RTX;
3020
3021 /* Now that we are assured of success, expand the source. */
3022 start_sequence ();
3023 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3024 if (pat != src_reg)
3025 {
3026 #ifdef POINTERS_EXTEND_UNSIGNED
3027 if (GET_MODE (pat) != Pmode)
3028 pat = convert_to_mode (Pmode, pat,
3029 POINTERS_EXTEND_UNSIGNED);
3030 #endif
3031 emit_move_insn (src_reg, pat);
3032 }
3033 pat = get_insns ();
3034 end_sequence ();
3035
3036 if (before_strlen)
3037 emit_insn_after (pat, before_strlen);
3038 else
3039 emit_insn_before (pat, get_insns ());
3040
3041 /* Return the value in the proper mode for this function. */
3042 if (GET_MODE (ops[0].value) == target_mode)
3043 target = ops[0].value;
3044 else if (target != 0)
3045 convert_move (target, ops[0].value, 0);
3046 else
3047 target = convert_to_mode (target_mode, ops[0].value, 0);
3048
3049 return target;
3050 }
3051 }
3052
3053 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3054 bytes from constant string DATA + OFFSET and return it as target
3055 constant. */
3056
3057 static rtx
3058 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3059 enum machine_mode mode)
3060 {
3061 const char *str = (const char *) data;
3062
3063 gcc_assert (offset >= 0
3064 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3065 <= strlen (str) + 1));
3066
3067 return c_readstr (str + offset, mode);
3068 }
3069
3070 /* Expand a call EXP to the memcpy builtin.
3071 Return NULL_RTX if we failed, the caller should emit a normal call,
3072 otherwise try to get the result in TARGET, if convenient (and in
3073 mode MODE if that's convenient). */
3074
3075 static rtx
3076 expand_builtin_memcpy (tree exp, rtx target)
3077 {
3078 if (!validate_arglist (exp,
3079 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3080 return NULL_RTX;
3081 else
3082 {
3083 tree dest = CALL_EXPR_ARG (exp, 0);
3084 tree src = CALL_EXPR_ARG (exp, 1);
3085 tree len = CALL_EXPR_ARG (exp, 2);
3086 const char *src_str;
3087 unsigned int src_align = get_pointer_alignment (src);
3088 unsigned int dest_align = get_pointer_alignment (dest);
3089 rtx dest_mem, src_mem, dest_addr, len_rtx;
3090 HOST_WIDE_INT expected_size = -1;
3091 unsigned int expected_align = 0;
3092
3093 /* If DEST is not a pointer type, call the normal function. */
3094 if (dest_align == 0)
3095 return NULL_RTX;
3096
3097 /* If either SRC is not a pointer type, don't do this
3098 operation in-line. */
3099 if (src_align == 0)
3100 return NULL_RTX;
3101
3102 if (currently_expanding_gimple_stmt)
3103 stringop_block_profile (currently_expanding_gimple_stmt,
3104 &expected_align, &expected_size);
3105
3106 if (expected_align < dest_align)
3107 expected_align = dest_align;
3108 dest_mem = get_memory_rtx (dest, len);
3109 set_mem_align (dest_mem, dest_align);
3110 len_rtx = expand_normal (len);
3111 src_str = c_getstr (src);
3112
3113 /* If SRC is a string constant and block move would be done
3114 by pieces, we can avoid loading the string from memory
3115 and only stored the computed constants. */
3116 if (src_str
3117 && CONST_INT_P (len_rtx)
3118 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3119 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3120 CONST_CAST (char *, src_str),
3121 dest_align, false))
3122 {
3123 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3124 builtin_memcpy_read_str,
3125 CONST_CAST (char *, src_str),
3126 dest_align, false, 0);
3127 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3128 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3129 return dest_mem;
3130 }
3131
3132 src_mem = get_memory_rtx (src, len);
3133 set_mem_align (src_mem, src_align);
3134
3135 /* Copy word part most expediently. */
3136 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3137 CALL_EXPR_TAILCALL (exp)
3138 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3139 expected_align, expected_size);
3140
3141 if (dest_addr == 0)
3142 {
3143 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3144 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3145 }
3146 return dest_addr;
3147 }
3148 }
3149
3150 /* Expand a call EXP to the mempcpy builtin.
3151 Return NULL_RTX if we failed; the caller should emit a normal call,
3152 otherwise try to get the result in TARGET, if convenient (and in
3153 mode MODE if that's convenient). If ENDP is 0 return the
3154 destination pointer, if ENDP is 1 return the end pointer ala
3155 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3156 stpcpy. */
3157
3158 static rtx
3159 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3160 {
3161 if (!validate_arglist (exp,
3162 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3163 return NULL_RTX;
3164 else
3165 {
3166 tree dest = CALL_EXPR_ARG (exp, 0);
3167 tree src = CALL_EXPR_ARG (exp, 1);
3168 tree len = CALL_EXPR_ARG (exp, 2);
3169 return expand_builtin_mempcpy_args (dest, src, len,
3170 target, mode, /*endp=*/ 1);
3171 }
3172 }
3173
3174 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3175 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3176 so that this can also be called without constructing an actual CALL_EXPR.
3177 The other arguments and return value are the same as for
3178 expand_builtin_mempcpy. */
3179
3180 static rtx
3181 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3182 rtx target, enum machine_mode mode, int endp)
3183 {
3184 /* If return value is ignored, transform mempcpy into memcpy. */
3185 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3186 {
3187 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3188 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3189 dest, src, len);
3190 return expand_expr (result, target, mode, EXPAND_NORMAL);
3191 }
3192 else
3193 {
3194 const char *src_str;
3195 unsigned int src_align = get_pointer_alignment (src);
3196 unsigned int dest_align = get_pointer_alignment (dest);
3197 rtx dest_mem, src_mem, len_rtx;
3198
3199 /* If either SRC or DEST is not a pointer type, don't do this
3200 operation in-line. */
3201 if (dest_align == 0 || src_align == 0)
3202 return NULL_RTX;
3203
3204 /* If LEN is not constant, call the normal function. */
3205 if (! host_integerp (len, 1))
3206 return NULL_RTX;
3207
3208 len_rtx = expand_normal (len);
3209 src_str = c_getstr (src);
3210
3211 /* If SRC is a string constant and block move would be done
3212 by pieces, we can avoid loading the string from memory
3213 and only stored the computed constants. */
3214 if (src_str
3215 && CONST_INT_P (len_rtx)
3216 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3217 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3218 CONST_CAST (char *, src_str),
3219 dest_align, false))
3220 {
3221 dest_mem = get_memory_rtx (dest, len);
3222 set_mem_align (dest_mem, dest_align);
3223 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3224 builtin_memcpy_read_str,
3225 CONST_CAST (char *, src_str),
3226 dest_align, false, endp);
3227 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3228 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3229 return dest_mem;
3230 }
3231
3232 if (CONST_INT_P (len_rtx)
3233 && can_move_by_pieces (INTVAL (len_rtx),
3234 MIN (dest_align, src_align)))
3235 {
3236 dest_mem = get_memory_rtx (dest, len);
3237 set_mem_align (dest_mem, dest_align);
3238 src_mem = get_memory_rtx (src, len);
3239 set_mem_align (src_mem, src_align);
3240 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3241 MIN (dest_align, src_align), endp);
3242 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3243 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3244 return dest_mem;
3245 }
3246
3247 return NULL_RTX;
3248 }
3249 }
3250
3251 #ifndef HAVE_movstr
3252 # define HAVE_movstr 0
3253 # define CODE_FOR_movstr CODE_FOR_nothing
3254 #endif
3255
3256 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3257 we failed, the caller should emit a normal call, otherwise try to
3258 get the result in TARGET, if convenient. If ENDP is 0 return the
3259 destination pointer, if ENDP is 1 return the end pointer ala
3260 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3261 stpcpy. */
3262
3263 static rtx
3264 expand_movstr (tree dest, tree src, rtx target, int endp)
3265 {
3266 struct expand_operand ops[3];
3267 rtx dest_mem;
3268 rtx src_mem;
3269
3270 if (!HAVE_movstr)
3271 return NULL_RTX;
3272
3273 dest_mem = get_memory_rtx (dest, NULL);
3274 src_mem = get_memory_rtx (src, NULL);
3275 if (!endp)
3276 {
3277 target = force_reg (Pmode, XEXP (dest_mem, 0));
3278 dest_mem = replace_equiv_address (dest_mem, target);
3279 }
3280
3281 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3282 create_fixed_operand (&ops[1], dest_mem);
3283 create_fixed_operand (&ops[2], src_mem);
3284 expand_insn (CODE_FOR_movstr, 3, ops);
3285
3286 if (endp && target != const0_rtx)
3287 {
3288 target = ops[0].value;
3289 /* movstr is supposed to set end to the address of the NUL
3290 terminator. If the caller requested a mempcpy-like return value,
3291 adjust it. */
3292 if (endp == 1)
3293 {
3294 rtx tem = plus_constant (GET_MODE (target),
3295 gen_lowpart (GET_MODE (target), target), 1);
3296 emit_move_insn (target, force_operand (tem, NULL_RTX));
3297 }
3298 }
3299 return target;
3300 }
3301
3302 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3303 NULL_RTX if we failed the caller should emit a normal call, otherwise
3304 try to get the result in TARGET, if convenient (and in mode MODE if that's
3305 convenient). */
3306
3307 static rtx
3308 expand_builtin_strcpy (tree exp, rtx target)
3309 {
3310 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3311 {
3312 tree dest = CALL_EXPR_ARG (exp, 0);
3313 tree src = CALL_EXPR_ARG (exp, 1);
3314 return expand_builtin_strcpy_args (dest, src, target);
3315 }
3316 return NULL_RTX;
3317 }
3318
3319 /* Helper function to do the actual work for expand_builtin_strcpy. The
3320 arguments to the builtin_strcpy call DEST and SRC are broken out
3321 so that this can also be called without constructing an actual CALL_EXPR.
3322 The other arguments and return value are the same as for
3323 expand_builtin_strcpy. */
3324
3325 static rtx
3326 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3327 {
3328 return expand_movstr (dest, src, target, /*endp=*/0);
3329 }
3330
3331 /* Expand a call EXP to the stpcpy builtin.
3332 Return NULL_RTX if we failed the caller should emit a normal call,
3333 otherwise try to get the result in TARGET, if convenient (and in
3334 mode MODE if that's convenient). */
3335
3336 static rtx
3337 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3338 {
3339 tree dst, src;
3340 location_t loc = EXPR_LOCATION (exp);
3341
3342 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3343 return NULL_RTX;
3344
3345 dst = CALL_EXPR_ARG (exp, 0);
3346 src = CALL_EXPR_ARG (exp, 1);
3347
3348 /* If return value is ignored, transform stpcpy into strcpy. */
3349 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3350 {
3351 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3352 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3353 return expand_expr (result, target, mode, EXPAND_NORMAL);
3354 }
3355 else
3356 {
3357 tree len, lenp1;
3358 rtx ret;
3359
3360 /* Ensure we get an actual string whose length can be evaluated at
3361 compile-time, not an expression containing a string. This is
3362 because the latter will potentially produce pessimized code
3363 when used to produce the return value. */
3364 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3365 return expand_movstr (dst, src, target, /*endp=*/2);
3366
3367 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3368 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3369 target, mode, /*endp=*/2);
3370
3371 if (ret)
3372 return ret;
3373
3374 if (TREE_CODE (len) == INTEGER_CST)
3375 {
3376 rtx len_rtx = expand_normal (len);
3377
3378 if (CONST_INT_P (len_rtx))
3379 {
3380 ret = expand_builtin_strcpy_args (dst, src, target);
3381
3382 if (ret)
3383 {
3384 if (! target)
3385 {
3386 if (mode != VOIDmode)
3387 target = gen_reg_rtx (mode);
3388 else
3389 target = gen_reg_rtx (GET_MODE (ret));
3390 }
3391 if (GET_MODE (target) != GET_MODE (ret))
3392 ret = gen_lowpart (GET_MODE (target), ret);
3393
3394 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3395 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3396 gcc_assert (ret);
3397
3398 return target;
3399 }
3400 }
3401 }
3402
3403 return expand_movstr (dst, src, target, /*endp=*/2);
3404 }
3405 }
3406
3407 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3408 bytes from constant string DATA + OFFSET and return it as target
3409 constant. */
3410
3411 rtx
3412 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3413 enum machine_mode mode)
3414 {
3415 const char *str = (const char *) data;
3416
3417 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3418 return const0_rtx;
3419
3420 return c_readstr (str + offset, mode);
3421 }
3422
3423 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3424 NULL_RTX if we failed the caller should emit a normal call. */
3425
3426 static rtx
3427 expand_builtin_strncpy (tree exp, rtx target)
3428 {
3429 location_t loc = EXPR_LOCATION (exp);
3430
3431 if (validate_arglist (exp,
3432 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3433 {
3434 tree dest = CALL_EXPR_ARG (exp, 0);
3435 tree src = CALL_EXPR_ARG (exp, 1);
3436 tree len = CALL_EXPR_ARG (exp, 2);
3437 tree slen = c_strlen (src, 1);
3438
3439 /* We must be passed a constant len and src parameter. */
3440 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3441 return NULL_RTX;
3442
3443 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3444
3445 /* We're required to pad with trailing zeros if the requested
3446 len is greater than strlen(s2)+1. In that case try to
3447 use store_by_pieces, if it fails, punt. */
3448 if (tree_int_cst_lt (slen, len))
3449 {
3450 unsigned int dest_align = get_pointer_alignment (dest);
3451 const char *p = c_getstr (src);
3452 rtx dest_mem;
3453
3454 if (!p || dest_align == 0 || !host_integerp (len, 1)
3455 || !can_store_by_pieces (tree_low_cst (len, 1),
3456 builtin_strncpy_read_str,
3457 CONST_CAST (char *, p),
3458 dest_align, false))
3459 return NULL_RTX;
3460
3461 dest_mem = get_memory_rtx (dest, len);
3462 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3463 builtin_strncpy_read_str,
3464 CONST_CAST (char *, p), dest_align, false, 0);
3465 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3466 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3467 return dest_mem;
3468 }
3469 }
3470 return NULL_RTX;
3471 }
3472
3473 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3474 bytes from constant string DATA + OFFSET and return it as target
3475 constant. */
3476
3477 rtx
3478 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3479 enum machine_mode mode)
3480 {
3481 const char *c = (const char *) data;
3482 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3483
3484 memset (p, *c, GET_MODE_SIZE (mode));
3485
3486 return c_readstr (p, mode);
3487 }
3488
3489 /* Callback routine for store_by_pieces. Return the RTL of a register
3490 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3491 char value given in the RTL register data. For example, if mode is
3492 4 bytes wide, return the RTL for 0x01010101*data. */
3493
3494 static rtx
3495 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3496 enum machine_mode mode)
3497 {
3498 rtx target, coeff;
3499 size_t size;
3500 char *p;
3501
3502 size = GET_MODE_SIZE (mode);
3503 if (size == 1)
3504 return (rtx) data;
3505
3506 p = XALLOCAVEC (char, size);
3507 memset (p, 1, size);
3508 coeff = c_readstr (p, mode);
3509
3510 target = convert_to_mode (mode, (rtx) data, 1);
3511 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3512 return force_reg (mode, target);
3513 }
3514
3515 /* Expand expression EXP, which is a call to the memset builtin. Return
3516 NULL_RTX if we failed the caller should emit a normal call, otherwise
3517 try to get the result in TARGET, if convenient (and in mode MODE if that's
3518 convenient). */
3519
3520 static rtx
3521 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3522 {
3523 if (!validate_arglist (exp,
3524 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3525 return NULL_RTX;
3526 else
3527 {
3528 tree dest = CALL_EXPR_ARG (exp, 0);
3529 tree val = CALL_EXPR_ARG (exp, 1);
3530 tree len = CALL_EXPR_ARG (exp, 2);
3531 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3532 }
3533 }
3534
3535 /* Helper function to do the actual work for expand_builtin_memset. The
3536 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3537 so that this can also be called without constructing an actual CALL_EXPR.
3538 The other arguments and return value are the same as for
3539 expand_builtin_memset. */
3540
3541 static rtx
3542 expand_builtin_memset_args (tree dest, tree val, tree len,
3543 rtx target, enum machine_mode mode, tree orig_exp)
3544 {
3545 tree fndecl, fn;
3546 enum built_in_function fcode;
3547 enum machine_mode val_mode;
3548 char c;
3549 unsigned int dest_align;
3550 rtx dest_mem, dest_addr, len_rtx;
3551 HOST_WIDE_INT expected_size = -1;
3552 unsigned int expected_align = 0;
3553
3554 dest_align = get_pointer_alignment (dest);
3555
3556 /* If DEST is not a pointer type, don't do this operation in-line. */
3557 if (dest_align == 0)
3558 return NULL_RTX;
3559
3560 if (currently_expanding_gimple_stmt)
3561 stringop_block_profile (currently_expanding_gimple_stmt,
3562 &expected_align, &expected_size);
3563
3564 if (expected_align < dest_align)
3565 expected_align = dest_align;
3566
3567 /* If the LEN parameter is zero, return DEST. */
3568 if (integer_zerop (len))
3569 {
3570 /* Evaluate and ignore VAL in case it has side-effects. */
3571 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3572 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3573 }
3574
3575 /* Stabilize the arguments in case we fail. */
3576 dest = builtin_save_expr (dest);
3577 val = builtin_save_expr (val);
3578 len = builtin_save_expr (len);
3579
3580 len_rtx = expand_normal (len);
3581 dest_mem = get_memory_rtx (dest, len);
3582 val_mode = TYPE_MODE (unsigned_char_type_node);
3583
3584 if (TREE_CODE (val) != INTEGER_CST)
3585 {
3586 rtx val_rtx;
3587
3588 val_rtx = expand_normal (val);
3589 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3590
3591 /* Assume that we can memset by pieces if we can store
3592 * the coefficients by pieces (in the required modes).
3593 * We can't pass builtin_memset_gen_str as that emits RTL. */
3594 c = 1;
3595 if (host_integerp (len, 1)
3596 && can_store_by_pieces (tree_low_cst (len, 1),
3597 builtin_memset_read_str, &c, dest_align,
3598 true))
3599 {
3600 val_rtx = force_reg (val_mode, val_rtx);
3601 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3602 builtin_memset_gen_str, val_rtx, dest_align,
3603 true, 0);
3604 }
3605 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3606 dest_align, expected_align,
3607 expected_size))
3608 goto do_libcall;
3609
3610 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3611 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3612 return dest_mem;
3613 }
3614
3615 if (target_char_cast (val, &c))
3616 goto do_libcall;
3617
3618 if (c)
3619 {
3620 if (host_integerp (len, 1)
3621 && can_store_by_pieces (tree_low_cst (len, 1),
3622 builtin_memset_read_str, &c, dest_align,
3623 true))
3624 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3625 builtin_memset_read_str, &c, dest_align, true, 0);
3626 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3627 gen_int_mode (c, val_mode),
3628 dest_align, expected_align,
3629 expected_size))
3630 goto do_libcall;
3631
3632 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3633 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3634 return dest_mem;
3635 }
3636
3637 set_mem_align (dest_mem, dest_align);
3638 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3639 CALL_EXPR_TAILCALL (orig_exp)
3640 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3641 expected_align, expected_size);
3642
3643 if (dest_addr == 0)
3644 {
3645 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3646 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3647 }
3648
3649 return dest_addr;
3650
3651 do_libcall:
3652 fndecl = get_callee_fndecl (orig_exp);
3653 fcode = DECL_FUNCTION_CODE (fndecl);
3654 if (fcode == BUILT_IN_MEMSET)
3655 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3656 dest, val, len);
3657 else if (fcode == BUILT_IN_BZERO)
3658 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3659 dest, len);
3660 else
3661 gcc_unreachable ();
3662 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3663 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3664 return expand_call (fn, target, target == const0_rtx);
3665 }
3666
3667 /* Expand expression EXP, which is a call to the bzero builtin. Return
3668 NULL_RTX if we failed the caller should emit a normal call. */
3669
3670 static rtx
3671 expand_builtin_bzero (tree exp)
3672 {
3673 tree dest, size;
3674 location_t loc = EXPR_LOCATION (exp);
3675
3676 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3677 return NULL_RTX;
3678
3679 dest = CALL_EXPR_ARG (exp, 0);
3680 size = CALL_EXPR_ARG (exp, 1);
3681
3682 /* New argument list transforming bzero(ptr x, int y) to
3683 memset(ptr x, int 0, size_t y). This is done this way
3684 so that if it isn't expanded inline, we fallback to
3685 calling bzero instead of memset. */
3686
3687 return expand_builtin_memset_args (dest, integer_zero_node,
3688 fold_convert_loc (loc,
3689 size_type_node, size),
3690 const0_rtx, VOIDmode, exp);
3691 }
3692
3693 /* Expand expression EXP, which is a call to the memcmp built-in function.
3694 Return NULL_RTX if we failed and the caller should emit a normal call,
3695 otherwise try to get the result in TARGET, if convenient (and in mode
3696 MODE, if that's convenient). */
3697
3698 static rtx
3699 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3700 ATTRIBUTE_UNUSED enum machine_mode mode)
3701 {
3702 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3703
3704 if (!validate_arglist (exp,
3705 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3706 return NULL_RTX;
3707
3708 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3709 implementing memcmp because it will stop if it encounters two
3710 zero bytes. */
3711 #if defined HAVE_cmpmemsi
3712 {
3713 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3714 rtx result;
3715 rtx insn;
3716 tree arg1 = CALL_EXPR_ARG (exp, 0);
3717 tree arg2 = CALL_EXPR_ARG (exp, 1);
3718 tree len = CALL_EXPR_ARG (exp, 2);
3719
3720 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3721 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3722 enum machine_mode insn_mode;
3723
3724 if (HAVE_cmpmemsi)
3725 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3726 else
3727 return NULL_RTX;
3728
3729 /* If we don't have POINTER_TYPE, call the function. */
3730 if (arg1_align == 0 || arg2_align == 0)
3731 return NULL_RTX;
3732
3733 /* Make a place to write the result of the instruction. */
3734 result = target;
3735 if (! (result != 0
3736 && REG_P (result) && GET_MODE (result) == insn_mode
3737 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3738 result = gen_reg_rtx (insn_mode);
3739
3740 arg1_rtx = get_memory_rtx (arg1, len);
3741 arg2_rtx = get_memory_rtx (arg2, len);
3742 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3743
3744 /* Set MEM_SIZE as appropriate. */
3745 if (CONST_INT_P (arg3_rtx))
3746 {
3747 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3748 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3749 }
3750
3751 if (HAVE_cmpmemsi)
3752 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3753 GEN_INT (MIN (arg1_align, arg2_align)));
3754 else
3755 gcc_unreachable ();
3756
3757 if (insn)
3758 emit_insn (insn);
3759 else
3760 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3761 TYPE_MODE (integer_type_node), 3,
3762 XEXP (arg1_rtx, 0), Pmode,
3763 XEXP (arg2_rtx, 0), Pmode,
3764 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3765 TYPE_UNSIGNED (sizetype)),
3766 TYPE_MODE (sizetype));
3767
3768 /* Return the value in the proper mode for this function. */
3769 mode = TYPE_MODE (TREE_TYPE (exp));
3770 if (GET_MODE (result) == mode)
3771 return result;
3772 else if (target != 0)
3773 {
3774 convert_move (target, result, 0);
3775 return target;
3776 }
3777 else
3778 return convert_to_mode (mode, result, 0);
3779 }
3780 #endif /* HAVE_cmpmemsi. */
3781
3782 return NULL_RTX;
3783 }
3784
3785 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3786 if we failed the caller should emit a normal call, otherwise try to get
3787 the result in TARGET, if convenient. */
3788
3789 static rtx
3790 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3791 {
3792 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3793 return NULL_RTX;
3794
3795 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3796 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3797 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3798 {
3799 rtx arg1_rtx, arg2_rtx;
3800 rtx result, insn = NULL_RTX;
3801 tree fndecl, fn;
3802 tree arg1 = CALL_EXPR_ARG (exp, 0);
3803 tree arg2 = CALL_EXPR_ARG (exp, 1);
3804
3805 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3806 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3807
3808 /* If we don't have POINTER_TYPE, call the function. */
3809 if (arg1_align == 0 || arg2_align == 0)
3810 return NULL_RTX;
3811
3812 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3813 arg1 = builtin_save_expr (arg1);
3814 arg2 = builtin_save_expr (arg2);
3815
3816 arg1_rtx = get_memory_rtx (arg1, NULL);
3817 arg2_rtx = get_memory_rtx (arg2, NULL);
3818
3819 #ifdef HAVE_cmpstrsi
3820 /* Try to call cmpstrsi. */
3821 if (HAVE_cmpstrsi)
3822 {
3823 enum machine_mode insn_mode
3824 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3825
3826 /* Make a place to write the result of the instruction. */
3827 result = target;
3828 if (! (result != 0
3829 && REG_P (result) && GET_MODE (result) == insn_mode
3830 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3831 result = gen_reg_rtx (insn_mode);
3832
3833 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3834 GEN_INT (MIN (arg1_align, arg2_align)));
3835 }
3836 #endif
3837 #ifdef HAVE_cmpstrnsi
3838 /* Try to determine at least one length and call cmpstrnsi. */
3839 if (!insn && HAVE_cmpstrnsi)
3840 {
3841 tree len;
3842 rtx arg3_rtx;
3843
3844 enum machine_mode insn_mode
3845 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3846 tree len1 = c_strlen (arg1, 1);
3847 tree len2 = c_strlen (arg2, 1);
3848
3849 if (len1)
3850 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3851 if (len2)
3852 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3853
3854 /* If we don't have a constant length for the first, use the length
3855 of the second, if we know it. We don't require a constant for
3856 this case; some cost analysis could be done if both are available
3857 but neither is constant. For now, assume they're equally cheap,
3858 unless one has side effects. If both strings have constant lengths,
3859 use the smaller. */
3860
3861 if (!len1)
3862 len = len2;
3863 else if (!len2)
3864 len = len1;
3865 else if (TREE_SIDE_EFFECTS (len1))
3866 len = len2;
3867 else if (TREE_SIDE_EFFECTS (len2))
3868 len = len1;
3869 else if (TREE_CODE (len1) != INTEGER_CST)
3870 len = len2;
3871 else if (TREE_CODE (len2) != INTEGER_CST)
3872 len = len1;
3873 else if (tree_int_cst_lt (len1, len2))
3874 len = len1;
3875 else
3876 len = len2;
3877
3878 /* If both arguments have side effects, we cannot optimize. */
3879 if (!len || TREE_SIDE_EFFECTS (len))
3880 goto do_libcall;
3881
3882 arg3_rtx = expand_normal (len);
3883
3884 /* Make a place to write the result of the instruction. */
3885 result = target;
3886 if (! (result != 0
3887 && REG_P (result) && GET_MODE (result) == insn_mode
3888 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3889 result = gen_reg_rtx (insn_mode);
3890
3891 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3892 GEN_INT (MIN (arg1_align, arg2_align)));
3893 }
3894 #endif
3895
3896 if (insn)
3897 {
3898 enum machine_mode mode;
3899 emit_insn (insn);
3900
3901 /* Return the value in the proper mode for this function. */
3902 mode = TYPE_MODE (TREE_TYPE (exp));
3903 if (GET_MODE (result) == mode)
3904 return result;
3905 if (target == 0)
3906 return convert_to_mode (mode, result, 0);
3907 convert_move (target, result, 0);
3908 return target;
3909 }
3910
3911 /* Expand the library call ourselves using a stabilized argument
3912 list to avoid re-evaluating the function's arguments twice. */
3913 #ifdef HAVE_cmpstrnsi
3914 do_libcall:
3915 #endif
3916 fndecl = get_callee_fndecl (exp);
3917 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3918 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3919 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3920 return expand_call (fn, target, target == const0_rtx);
3921 }
3922 #endif
3923 return NULL_RTX;
3924 }
3925
3926 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3927 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3928 the result in TARGET, if convenient. */
3929
3930 static rtx
3931 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3932 ATTRIBUTE_UNUSED enum machine_mode mode)
3933 {
3934 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3935
3936 if (!validate_arglist (exp,
3937 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3938 return NULL_RTX;
3939
3940 /* If c_strlen can determine an expression for one of the string
3941 lengths, and it doesn't have side effects, then emit cmpstrnsi
3942 using length MIN(strlen(string)+1, arg3). */
3943 #ifdef HAVE_cmpstrnsi
3944 if (HAVE_cmpstrnsi)
3945 {
3946 tree len, len1, len2;
3947 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3948 rtx result, insn;
3949 tree fndecl, fn;
3950 tree arg1 = CALL_EXPR_ARG (exp, 0);
3951 tree arg2 = CALL_EXPR_ARG (exp, 1);
3952 tree arg3 = CALL_EXPR_ARG (exp, 2);
3953
3954 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3955 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3956 enum machine_mode insn_mode
3957 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3958
3959 len1 = c_strlen (arg1, 1);
3960 len2 = c_strlen (arg2, 1);
3961
3962 if (len1)
3963 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3964 if (len2)
3965 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3966
3967 /* If we don't have a constant length for the first, use the length
3968 of the second, if we know it. We don't require a constant for
3969 this case; some cost analysis could be done if both are available
3970 but neither is constant. For now, assume they're equally cheap,
3971 unless one has side effects. If both strings have constant lengths,
3972 use the smaller. */
3973
3974 if (!len1)
3975 len = len2;
3976 else if (!len2)
3977 len = len1;
3978 else if (TREE_SIDE_EFFECTS (len1))
3979 len = len2;
3980 else if (TREE_SIDE_EFFECTS (len2))
3981 len = len1;
3982 else if (TREE_CODE (len1) != INTEGER_CST)
3983 len = len2;
3984 else if (TREE_CODE (len2) != INTEGER_CST)
3985 len = len1;
3986 else if (tree_int_cst_lt (len1, len2))
3987 len = len1;
3988 else
3989 len = len2;
3990
3991 /* If both arguments have side effects, we cannot optimize. */
3992 if (!len || TREE_SIDE_EFFECTS (len))
3993 return NULL_RTX;
3994
3995 /* The actual new length parameter is MIN(len,arg3). */
3996 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3997 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3998
3999 /* If we don't have POINTER_TYPE, call the function. */
4000 if (arg1_align == 0 || arg2_align == 0)
4001 return NULL_RTX;
4002
4003 /* Make a place to write the result of the instruction. */
4004 result = target;
4005 if (! (result != 0
4006 && REG_P (result) && GET_MODE (result) == insn_mode
4007 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4008 result = gen_reg_rtx (insn_mode);
4009
4010 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4011 arg1 = builtin_save_expr (arg1);
4012 arg2 = builtin_save_expr (arg2);
4013 len = builtin_save_expr (len);
4014
4015 arg1_rtx = get_memory_rtx (arg1, len);
4016 arg2_rtx = get_memory_rtx (arg2, len);
4017 arg3_rtx = expand_normal (len);
4018 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4019 GEN_INT (MIN (arg1_align, arg2_align)));
4020 if (insn)
4021 {
4022 emit_insn (insn);
4023
4024 /* Return the value in the proper mode for this function. */
4025 mode = TYPE_MODE (TREE_TYPE (exp));
4026 if (GET_MODE (result) == mode)
4027 return result;
4028 if (target == 0)
4029 return convert_to_mode (mode, result, 0);
4030 convert_move (target, result, 0);
4031 return target;
4032 }
4033
4034 /* Expand the library call ourselves using a stabilized argument
4035 list to avoid re-evaluating the function's arguments twice. */
4036 fndecl = get_callee_fndecl (exp);
4037 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4038 arg1, arg2, len);
4039 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4040 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4041 return expand_call (fn, target, target == const0_rtx);
4042 }
4043 #endif
4044 return NULL_RTX;
4045 }
4046
4047 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4048 if that's convenient. */
4049
4050 rtx
4051 expand_builtin_saveregs (void)
4052 {
4053 rtx val, seq;
4054
4055 /* Don't do __builtin_saveregs more than once in a function.
4056 Save the result of the first call and reuse it. */
4057 if (saveregs_value != 0)
4058 return saveregs_value;
4059
4060 /* When this function is called, it means that registers must be
4061 saved on entry to this function. So we migrate the call to the
4062 first insn of this function. */
4063
4064 start_sequence ();
4065
4066 /* Do whatever the machine needs done in this case. */
4067 val = targetm.calls.expand_builtin_saveregs ();
4068
4069 seq = get_insns ();
4070 end_sequence ();
4071
4072 saveregs_value = val;
4073
4074 /* Put the insns after the NOTE that starts the function. If this
4075 is inside a start_sequence, make the outer-level insn chain current, so
4076 the code is placed at the start of the function. */
4077 push_topmost_sequence ();
4078 emit_insn_after (seq, entry_of_function ());
4079 pop_topmost_sequence ();
4080
4081 return val;
4082 }
4083
4084 /* Expand a call to __builtin_next_arg. */
4085
4086 static rtx
4087 expand_builtin_next_arg (void)
4088 {
4089 /* Checking arguments is already done in fold_builtin_next_arg
4090 that must be called before this function. */
4091 return expand_binop (ptr_mode, add_optab,
4092 crtl->args.internal_arg_pointer,
4093 crtl->args.arg_offset_rtx,
4094 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4095 }
4096
4097 /* Make it easier for the backends by protecting the valist argument
4098 from multiple evaluations. */
4099
4100 static tree
4101 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4102 {
4103 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4104
4105 /* The current way of determining the type of valist is completely
4106 bogus. We should have the information on the va builtin instead. */
4107 if (!vatype)
4108 vatype = targetm.fn_abi_va_list (cfun->decl);
4109
4110 if (TREE_CODE (vatype) == ARRAY_TYPE)
4111 {
4112 if (TREE_SIDE_EFFECTS (valist))
4113 valist = save_expr (valist);
4114
4115 /* For this case, the backends will be expecting a pointer to
4116 vatype, but it's possible we've actually been given an array
4117 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4118 So fix it. */
4119 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4120 {
4121 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4122 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4123 }
4124 }
4125 else
4126 {
4127 tree pt = build_pointer_type (vatype);
4128
4129 if (! needs_lvalue)
4130 {
4131 if (! TREE_SIDE_EFFECTS (valist))
4132 return valist;
4133
4134 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4135 TREE_SIDE_EFFECTS (valist) = 1;
4136 }
4137
4138 if (TREE_SIDE_EFFECTS (valist))
4139 valist = save_expr (valist);
4140 valist = fold_build2_loc (loc, MEM_REF,
4141 vatype, valist, build_int_cst (pt, 0));
4142 }
4143
4144 return valist;
4145 }
4146
4147 /* The "standard" definition of va_list is void*. */
4148
4149 tree
4150 std_build_builtin_va_list (void)
4151 {
4152 return ptr_type_node;
4153 }
4154
4155 /* The "standard" abi va_list is va_list_type_node. */
4156
4157 tree
4158 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4159 {
4160 return va_list_type_node;
4161 }
4162
4163 /* The "standard" type of va_list is va_list_type_node. */
4164
4165 tree
4166 std_canonical_va_list_type (tree type)
4167 {
4168 tree wtype, htype;
4169
4170 if (INDIRECT_REF_P (type))
4171 type = TREE_TYPE (type);
4172 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4173 type = TREE_TYPE (type);
4174 wtype = va_list_type_node;
4175 htype = type;
4176 /* Treat structure va_list types. */
4177 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4178 htype = TREE_TYPE (htype);
4179 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4180 {
4181 /* If va_list is an array type, the argument may have decayed
4182 to a pointer type, e.g. by being passed to another function.
4183 In that case, unwrap both types so that we can compare the
4184 underlying records. */
4185 if (TREE_CODE (htype) == ARRAY_TYPE
4186 || POINTER_TYPE_P (htype))
4187 {
4188 wtype = TREE_TYPE (wtype);
4189 htype = TREE_TYPE (htype);
4190 }
4191 }
4192 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4193 return va_list_type_node;
4194
4195 return NULL_TREE;
4196 }
4197
4198 /* The "standard" implementation of va_start: just assign `nextarg' to
4199 the variable. */
4200
4201 void
4202 std_expand_builtin_va_start (tree valist, rtx nextarg)
4203 {
4204 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4205 convert_move (va_r, nextarg, 0);
4206 }
4207
4208 /* Expand EXP, a call to __builtin_va_start. */
4209
4210 static rtx
4211 expand_builtin_va_start (tree exp)
4212 {
4213 rtx nextarg;
4214 tree valist;
4215 location_t loc = EXPR_LOCATION (exp);
4216
4217 if (call_expr_nargs (exp) < 2)
4218 {
4219 error_at (loc, "too few arguments to function %<va_start%>");
4220 return const0_rtx;
4221 }
4222
4223 if (fold_builtin_next_arg (exp, true))
4224 return const0_rtx;
4225
4226 nextarg = expand_builtin_next_arg ();
4227 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4228
4229 if (targetm.expand_builtin_va_start)
4230 targetm.expand_builtin_va_start (valist, nextarg);
4231 else
4232 std_expand_builtin_va_start (valist, nextarg);
4233
4234 return const0_rtx;
4235 }
4236
4237 /* Expand EXP, a call to __builtin_va_end. */
4238
4239 static rtx
4240 expand_builtin_va_end (tree exp)
4241 {
4242 tree valist = CALL_EXPR_ARG (exp, 0);
4243
4244 /* Evaluate for side effects, if needed. I hate macros that don't
4245 do that. */
4246 if (TREE_SIDE_EFFECTS (valist))
4247 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4248
4249 return const0_rtx;
4250 }
4251
4252 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4253 builtin rather than just as an assignment in stdarg.h because of the
4254 nastiness of array-type va_list types. */
4255
4256 static rtx
4257 expand_builtin_va_copy (tree exp)
4258 {
4259 tree dst, src, t;
4260 location_t loc = EXPR_LOCATION (exp);
4261
4262 dst = CALL_EXPR_ARG (exp, 0);
4263 src = CALL_EXPR_ARG (exp, 1);
4264
4265 dst = stabilize_va_list_loc (loc, dst, 1);
4266 src = stabilize_va_list_loc (loc, src, 0);
4267
4268 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4269
4270 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4271 {
4272 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4273 TREE_SIDE_EFFECTS (t) = 1;
4274 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4275 }
4276 else
4277 {
4278 rtx dstb, srcb, size;
4279
4280 /* Evaluate to pointers. */
4281 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4282 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4283 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4284 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4285
4286 dstb = convert_memory_address (Pmode, dstb);
4287 srcb = convert_memory_address (Pmode, srcb);
4288
4289 /* "Dereference" to BLKmode memories. */
4290 dstb = gen_rtx_MEM (BLKmode, dstb);
4291 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4292 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4293 srcb = gen_rtx_MEM (BLKmode, srcb);
4294 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4295 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4296
4297 /* Copy. */
4298 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4299 }
4300
4301 return const0_rtx;
4302 }
4303
4304 /* Expand a call to one of the builtin functions __builtin_frame_address or
4305 __builtin_return_address. */
4306
4307 static rtx
4308 expand_builtin_frame_address (tree fndecl, tree exp)
4309 {
4310 /* The argument must be a nonnegative integer constant.
4311 It counts the number of frames to scan up the stack.
4312 The value is the return address saved in that frame. */
4313 if (call_expr_nargs (exp) == 0)
4314 /* Warning about missing arg was already issued. */
4315 return const0_rtx;
4316 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4317 {
4318 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4319 error ("invalid argument to %<__builtin_frame_address%>");
4320 else
4321 error ("invalid argument to %<__builtin_return_address%>");
4322 return const0_rtx;
4323 }
4324 else
4325 {
4326 rtx tem
4327 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4328 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4329
4330 /* Some ports cannot access arbitrary stack frames. */
4331 if (tem == NULL)
4332 {
4333 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4334 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4335 else
4336 warning (0, "unsupported argument to %<__builtin_return_address%>");
4337 return const0_rtx;
4338 }
4339
4340 /* For __builtin_frame_address, return what we've got. */
4341 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4342 return tem;
4343
4344 if (!REG_P (tem)
4345 && ! CONSTANT_P (tem))
4346 tem = copy_addr_to_reg (tem);
4347 return tem;
4348 }
4349 }
4350
4351 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4352 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4353 is the same as for allocate_dynamic_stack_space. */
4354
4355 static rtx
4356 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4357 {
4358 rtx op0;
4359 rtx result;
4360 bool valid_arglist;
4361 unsigned int align;
4362 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4363 == BUILT_IN_ALLOCA_WITH_ALIGN);
4364
4365 /* Emit normal call if we use mudflap. */
4366 if (flag_mudflap)
4367 return NULL_RTX;
4368
4369 valid_arglist
4370 = (alloca_with_align
4371 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4372 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4373
4374 if (!valid_arglist)
4375 return NULL_RTX;
4376
4377 /* Compute the argument. */
4378 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4379
4380 /* Compute the alignment. */
4381 align = (alloca_with_align
4382 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4383 : BIGGEST_ALIGNMENT);
4384
4385 /* Allocate the desired space. */
4386 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4387 result = convert_memory_address (ptr_mode, result);
4388
4389 return result;
4390 }
4391
4392 /* Expand a call to bswap builtin in EXP.
4393 Return NULL_RTX if a normal call should be emitted rather than expanding the
4394 function in-line. If convenient, the result should be placed in TARGET.
4395 SUBTARGET may be used as the target for computing one of EXP's operands. */
4396
4397 static rtx
4398 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4399 rtx subtarget)
4400 {
4401 tree arg;
4402 rtx op0;
4403
4404 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4405 return NULL_RTX;
4406
4407 arg = CALL_EXPR_ARG (exp, 0);
4408 op0 = expand_expr (arg,
4409 subtarget && GET_MODE (subtarget) == target_mode
4410 ? subtarget : NULL_RTX,
4411 target_mode, EXPAND_NORMAL);
4412 if (GET_MODE (op0) != target_mode)
4413 op0 = convert_to_mode (target_mode, op0, 1);
4414
4415 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4416
4417 gcc_assert (target);
4418
4419 return convert_to_mode (target_mode, target, 1);
4420 }
4421
4422 /* Expand a call to a unary builtin in EXP.
4423 Return NULL_RTX if a normal call should be emitted rather than expanding the
4424 function in-line. If convenient, the result should be placed in TARGET.
4425 SUBTARGET may be used as the target for computing one of EXP's operands. */
4426
4427 static rtx
4428 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4429 rtx subtarget, optab op_optab)
4430 {
4431 rtx op0;
4432
4433 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4434 return NULL_RTX;
4435
4436 /* Compute the argument. */
4437 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4438 (subtarget
4439 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4440 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4441 VOIDmode, EXPAND_NORMAL);
4442 /* Compute op, into TARGET if possible.
4443 Set TARGET to wherever the result comes back. */
4444 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4445 op_optab, op0, target, op_optab != clrsb_optab);
4446 gcc_assert (target);
4447
4448 return convert_to_mode (target_mode, target, 0);
4449 }
4450
4451 /* Expand a call to __builtin_expect. We just return our argument
4452 as the builtin_expect semantic should've been already executed by
4453 tree branch prediction pass. */
4454
4455 static rtx
4456 expand_builtin_expect (tree exp, rtx target)
4457 {
4458 tree arg;
4459
4460 if (call_expr_nargs (exp) < 2)
4461 return const0_rtx;
4462 arg = CALL_EXPR_ARG (exp, 0);
4463
4464 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4465 /* When guessing was done, the hints should be already stripped away. */
4466 gcc_assert (!flag_guess_branch_prob
4467 || optimize == 0 || seen_error ());
4468 return target;
4469 }
4470
4471 /* Expand a call to __builtin_assume_aligned. We just return our first
4472 argument as the builtin_assume_aligned semantic should've been already
4473 executed by CCP. */
4474
4475 static rtx
4476 expand_builtin_assume_aligned (tree exp, rtx target)
4477 {
4478 if (call_expr_nargs (exp) < 2)
4479 return const0_rtx;
4480 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4481 EXPAND_NORMAL);
4482 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4483 && (call_expr_nargs (exp) < 3
4484 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4485 return target;
4486 }
4487
4488 void
4489 expand_builtin_trap (void)
4490 {
4491 #ifdef HAVE_trap
4492 if (HAVE_trap)
4493 {
4494 rtx insn = emit_insn (gen_trap ());
4495 /* For trap insns when not accumulating outgoing args force
4496 REG_ARGS_SIZE note to prevent crossjumping of calls with
4497 different args sizes. */
4498 if (!ACCUMULATE_OUTGOING_ARGS)
4499 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4500 }
4501 else
4502 #endif
4503 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4504 emit_barrier ();
4505 }
4506
4507 /* Expand a call to __builtin_unreachable. We do nothing except emit
4508 a barrier saying that control flow will not pass here.
4509
4510 It is the responsibility of the program being compiled to ensure
4511 that control flow does never reach __builtin_unreachable. */
4512 static void
4513 expand_builtin_unreachable (void)
4514 {
4515 emit_barrier ();
4516 }
4517
4518 /* Expand EXP, a call to fabs, fabsf or fabsl.
4519 Return NULL_RTX if a normal call should be emitted rather than expanding
4520 the function inline. If convenient, the result should be placed
4521 in TARGET. SUBTARGET may be used as the target for computing
4522 the operand. */
4523
4524 static rtx
4525 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4526 {
4527 enum machine_mode mode;
4528 tree arg;
4529 rtx op0;
4530
4531 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4532 return NULL_RTX;
4533
4534 arg = CALL_EXPR_ARG (exp, 0);
4535 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4536 mode = TYPE_MODE (TREE_TYPE (arg));
4537 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4538 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4539 }
4540
4541 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4542 Return NULL is a normal call should be emitted rather than expanding the
4543 function inline. If convenient, the result should be placed in TARGET.
4544 SUBTARGET may be used as the target for computing the operand. */
4545
4546 static rtx
4547 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4548 {
4549 rtx op0, op1;
4550 tree arg;
4551
4552 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4553 return NULL_RTX;
4554
4555 arg = CALL_EXPR_ARG (exp, 0);
4556 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4557
4558 arg = CALL_EXPR_ARG (exp, 1);
4559 op1 = expand_normal (arg);
4560
4561 return expand_copysign (op0, op1, target);
4562 }
4563
4564 /* Create a new constant string literal and return a char* pointer to it.
4565 The STRING_CST value is the LEN characters at STR. */
4566 tree
4567 build_string_literal (int len, const char *str)
4568 {
4569 tree t, elem, index, type;
4570
4571 t = build_string (len, str);
4572 elem = build_type_variant (char_type_node, 1, 0);
4573 index = build_index_type (size_int (len - 1));
4574 type = build_array_type (elem, index);
4575 TREE_TYPE (t) = type;
4576 TREE_CONSTANT (t) = 1;
4577 TREE_READONLY (t) = 1;
4578 TREE_STATIC (t) = 1;
4579
4580 type = build_pointer_type (elem);
4581 t = build1 (ADDR_EXPR, type,
4582 build4 (ARRAY_REF, elem,
4583 t, integer_zero_node, NULL_TREE, NULL_TREE));
4584 return t;
4585 }
4586
4587 /* Expand a call to __builtin___clear_cache. */
4588
4589 static rtx
4590 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4591 {
4592 #ifndef HAVE_clear_cache
4593 #ifdef CLEAR_INSN_CACHE
4594 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4595 does something. Just do the default expansion to a call to
4596 __clear_cache(). */
4597 return NULL_RTX;
4598 #else
4599 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4600 does nothing. There is no need to call it. Do nothing. */
4601 return const0_rtx;
4602 #endif /* CLEAR_INSN_CACHE */
4603 #else
4604 /* We have a "clear_cache" insn, and it will handle everything. */
4605 tree begin, end;
4606 rtx begin_rtx, end_rtx;
4607
4608 /* We must not expand to a library call. If we did, any
4609 fallback library function in libgcc that might contain a call to
4610 __builtin___clear_cache() would recurse infinitely. */
4611 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4612 {
4613 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4614 return const0_rtx;
4615 }
4616
4617 if (HAVE_clear_cache)
4618 {
4619 struct expand_operand ops[2];
4620
4621 begin = CALL_EXPR_ARG (exp, 0);
4622 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4623
4624 end = CALL_EXPR_ARG (exp, 1);
4625 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4626
4627 create_address_operand (&ops[0], begin_rtx);
4628 create_address_operand (&ops[1], end_rtx);
4629 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4630 return const0_rtx;
4631 }
4632 return const0_rtx;
4633 #endif /* HAVE_clear_cache */
4634 }
4635
4636 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4637
4638 static rtx
4639 round_trampoline_addr (rtx tramp)
4640 {
4641 rtx temp, addend, mask;
4642
4643 /* If we don't need too much alignment, we'll have been guaranteed
4644 proper alignment by get_trampoline_type. */
4645 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4646 return tramp;
4647
4648 /* Round address up to desired boundary. */
4649 temp = gen_reg_rtx (Pmode);
4650 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4651 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4652
4653 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4654 temp, 0, OPTAB_LIB_WIDEN);
4655 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4656 temp, 0, OPTAB_LIB_WIDEN);
4657
4658 return tramp;
4659 }
4660
4661 static rtx
4662 expand_builtin_init_trampoline (tree exp, bool onstack)
4663 {
4664 tree t_tramp, t_func, t_chain;
4665 rtx m_tramp, r_tramp, r_chain, tmp;
4666
4667 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4668 POINTER_TYPE, VOID_TYPE))
4669 return NULL_RTX;
4670
4671 t_tramp = CALL_EXPR_ARG (exp, 0);
4672 t_func = CALL_EXPR_ARG (exp, 1);
4673 t_chain = CALL_EXPR_ARG (exp, 2);
4674
4675 r_tramp = expand_normal (t_tramp);
4676 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4677 MEM_NOTRAP_P (m_tramp) = 1;
4678
4679 /* If ONSTACK, the TRAMP argument should be the address of a field
4680 within the local function's FRAME decl. Either way, let's see if
4681 we can fill in the MEM_ATTRs for this memory. */
4682 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4683 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4684
4685 /* Creator of a heap trampoline is responsible for making sure the
4686 address is aligned to at least STACK_BOUNDARY. Normally malloc
4687 will ensure this anyhow. */
4688 tmp = round_trampoline_addr (r_tramp);
4689 if (tmp != r_tramp)
4690 {
4691 m_tramp = change_address (m_tramp, BLKmode, tmp);
4692 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4693 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4694 }
4695
4696 /* The FUNC argument should be the address of the nested function.
4697 Extract the actual function decl to pass to the hook. */
4698 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4699 t_func = TREE_OPERAND (t_func, 0);
4700 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4701
4702 r_chain = expand_normal (t_chain);
4703
4704 /* Generate insns to initialize the trampoline. */
4705 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4706
4707 if (onstack)
4708 {
4709 trampolines_created = 1;
4710
4711 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4712 "trampoline generated for nested function %qD", t_func);
4713 }
4714
4715 return const0_rtx;
4716 }
4717
4718 static rtx
4719 expand_builtin_adjust_trampoline (tree exp)
4720 {
4721 rtx tramp;
4722
4723 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4724 return NULL_RTX;
4725
4726 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4727 tramp = round_trampoline_addr (tramp);
4728 if (targetm.calls.trampoline_adjust_address)
4729 tramp = targetm.calls.trampoline_adjust_address (tramp);
4730
4731 return tramp;
4732 }
4733
4734 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4735 function. The function first checks whether the back end provides
4736 an insn to implement signbit for the respective mode. If not, it
4737 checks whether the floating point format of the value is such that
4738 the sign bit can be extracted. If that is not the case, the
4739 function returns NULL_RTX to indicate that a normal call should be
4740 emitted rather than expanding the function in-line. EXP is the
4741 expression that is a call to the builtin function; if convenient,
4742 the result should be placed in TARGET. */
4743 static rtx
4744 expand_builtin_signbit (tree exp, rtx target)
4745 {
4746 const struct real_format *fmt;
4747 enum machine_mode fmode, imode, rmode;
4748 tree arg;
4749 int word, bitpos;
4750 enum insn_code icode;
4751 rtx temp;
4752 location_t loc = EXPR_LOCATION (exp);
4753
4754 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4755 return NULL_RTX;
4756
4757 arg = CALL_EXPR_ARG (exp, 0);
4758 fmode = TYPE_MODE (TREE_TYPE (arg));
4759 rmode = TYPE_MODE (TREE_TYPE (exp));
4760 fmt = REAL_MODE_FORMAT (fmode);
4761
4762 arg = builtin_save_expr (arg);
4763
4764 /* Expand the argument yielding a RTX expression. */
4765 temp = expand_normal (arg);
4766
4767 /* Check if the back end provides an insn that handles signbit for the
4768 argument's mode. */
4769 icode = optab_handler (signbit_optab, fmode);
4770 if (icode != CODE_FOR_nothing)
4771 {
4772 rtx last = get_last_insn ();
4773 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4774 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4775 return target;
4776 delete_insns_since (last);
4777 }
4778
4779 /* For floating point formats without a sign bit, implement signbit
4780 as "ARG < 0.0". */
4781 bitpos = fmt->signbit_ro;
4782 if (bitpos < 0)
4783 {
4784 /* But we can't do this if the format supports signed zero. */
4785 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4786 return NULL_RTX;
4787
4788 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4789 build_real (TREE_TYPE (arg), dconst0));
4790 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4791 }
4792
4793 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4794 {
4795 imode = int_mode_for_mode (fmode);
4796 if (imode == BLKmode)
4797 return NULL_RTX;
4798 temp = gen_lowpart (imode, temp);
4799 }
4800 else
4801 {
4802 imode = word_mode;
4803 /* Handle targets with different FP word orders. */
4804 if (FLOAT_WORDS_BIG_ENDIAN)
4805 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4806 else
4807 word = bitpos / BITS_PER_WORD;
4808 temp = operand_subword_force (temp, word, fmode);
4809 bitpos = bitpos % BITS_PER_WORD;
4810 }
4811
4812 /* Force the intermediate word_mode (or narrower) result into a
4813 register. This avoids attempting to create paradoxical SUBREGs
4814 of floating point modes below. */
4815 temp = force_reg (imode, temp);
4816
4817 /* If the bitpos is within the "result mode" lowpart, the operation
4818 can be implement with a single bitwise AND. Otherwise, we need
4819 a right shift and an AND. */
4820
4821 if (bitpos < GET_MODE_BITSIZE (rmode))
4822 {
4823 double_int mask = double_int_zero.set_bit (bitpos);
4824
4825 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4826 temp = gen_lowpart (rmode, temp);
4827 temp = expand_binop (rmode, and_optab, temp,
4828 immed_double_int_const (mask, rmode),
4829 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4830 }
4831 else
4832 {
4833 /* Perform a logical right shift to place the signbit in the least
4834 significant bit, then truncate the result to the desired mode
4835 and mask just this bit. */
4836 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4837 temp = gen_lowpart (rmode, temp);
4838 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4839 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4840 }
4841
4842 return temp;
4843 }
4844
4845 /* Expand fork or exec calls. TARGET is the desired target of the
4846 call. EXP is the call. FN is the
4847 identificator of the actual function. IGNORE is nonzero if the
4848 value is to be ignored. */
4849
4850 static rtx
4851 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4852 {
4853 tree id, decl;
4854 tree call;
4855
4856 /* If we are not profiling, just call the function. */
4857 if (!profile_arc_flag)
4858 return NULL_RTX;
4859
4860 /* Otherwise call the wrapper. This should be equivalent for the rest of
4861 compiler, so the code does not diverge, and the wrapper may run the
4862 code necessary for keeping the profiling sane. */
4863
4864 switch (DECL_FUNCTION_CODE (fn))
4865 {
4866 case BUILT_IN_FORK:
4867 id = get_identifier ("__gcov_fork");
4868 break;
4869
4870 case BUILT_IN_EXECL:
4871 id = get_identifier ("__gcov_execl");
4872 break;
4873
4874 case BUILT_IN_EXECV:
4875 id = get_identifier ("__gcov_execv");
4876 break;
4877
4878 case BUILT_IN_EXECLP:
4879 id = get_identifier ("__gcov_execlp");
4880 break;
4881
4882 case BUILT_IN_EXECLE:
4883 id = get_identifier ("__gcov_execle");
4884 break;
4885
4886 case BUILT_IN_EXECVP:
4887 id = get_identifier ("__gcov_execvp");
4888 break;
4889
4890 case BUILT_IN_EXECVE:
4891 id = get_identifier ("__gcov_execve");
4892 break;
4893
4894 default:
4895 gcc_unreachable ();
4896 }
4897
4898 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4899 FUNCTION_DECL, id, TREE_TYPE (fn));
4900 DECL_EXTERNAL (decl) = 1;
4901 TREE_PUBLIC (decl) = 1;
4902 DECL_ARTIFICIAL (decl) = 1;
4903 TREE_NOTHROW (decl) = 1;
4904 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4905 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4906 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4907 return expand_call (call, target, ignore);
4908 }
4909
4910
4911 \f
4912 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4913 the pointer in these functions is void*, the tree optimizers may remove
4914 casts. The mode computed in expand_builtin isn't reliable either, due
4915 to __sync_bool_compare_and_swap.
4916
4917 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4918 group of builtins. This gives us log2 of the mode size. */
4919
4920 static inline enum machine_mode
4921 get_builtin_sync_mode (int fcode_diff)
4922 {
4923 /* The size is not negotiable, so ask not to get BLKmode in return
4924 if the target indicates that a smaller size would be better. */
4925 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
4926 }
4927
4928 /* Expand the memory expression LOC and return the appropriate memory operand
4929 for the builtin_sync operations. */
4930
4931 static rtx
4932 get_builtin_sync_mem (tree loc, enum machine_mode mode)
4933 {
4934 rtx addr, mem;
4935
4936 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
4937 addr = convert_memory_address (Pmode, addr);
4938
4939 /* Note that we explicitly do not want any alias information for this
4940 memory, so that we kill all other live memories. Otherwise we don't
4941 satisfy the full barrier semantics of the intrinsic. */
4942 mem = validize_mem (gen_rtx_MEM (mode, addr));
4943
4944 /* The alignment needs to be at least according to that of the mode. */
4945 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
4946 get_pointer_alignment (loc)));
4947 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
4948 MEM_VOLATILE_P (mem) = 1;
4949
4950 return mem;
4951 }
4952
4953 /* Make sure an argument is in the right mode.
4954 EXP is the tree argument.
4955 MODE is the mode it should be in. */
4956
4957 static rtx
4958 expand_expr_force_mode (tree exp, enum machine_mode mode)
4959 {
4960 rtx val;
4961 enum machine_mode old_mode;
4962
4963 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
4964 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
4965 of CONST_INTs, where we know the old_mode only from the call argument. */
4966
4967 old_mode = GET_MODE (val);
4968 if (old_mode == VOIDmode)
4969 old_mode = TYPE_MODE (TREE_TYPE (exp));
4970 val = convert_modes (mode, old_mode, val, 1);
4971 return val;
4972 }
4973
4974
4975 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
4976 EXP is the CALL_EXPR. CODE is the rtx code
4977 that corresponds to the arithmetic or logical operation from the name;
4978 an exception here is that NOT actually means NAND. TARGET is an optional
4979 place for us to store the results; AFTER is true if this is the
4980 fetch_and_xxx form. */
4981
4982 static rtx
4983 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
4984 enum rtx_code code, bool after,
4985 rtx target)
4986 {
4987 rtx val, mem;
4988 location_t loc = EXPR_LOCATION (exp);
4989
4990 if (code == NOT && warn_sync_nand)
4991 {
4992 tree fndecl = get_callee_fndecl (exp);
4993 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4994
4995 static bool warned_f_a_n, warned_n_a_f;
4996
4997 switch (fcode)
4998 {
4999 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5000 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5001 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5002 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5003 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5004 if (warned_f_a_n)
5005 break;
5006
5007 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5008 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5009 warned_f_a_n = true;
5010 break;
5011
5012 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5013 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5014 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5015 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5016 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5017 if (warned_n_a_f)
5018 break;
5019
5020 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5021 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5022 warned_n_a_f = true;
5023 break;
5024
5025 default:
5026 gcc_unreachable ();
5027 }
5028 }
5029
5030 /* Expand the operands. */
5031 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5032 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5033
5034 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5035 after);
5036 }
5037
5038 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5039 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5040 true if this is the boolean form. TARGET is a place for us to store the
5041 results; this is NOT optional if IS_BOOL is true. */
5042
5043 static rtx
5044 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5045 bool is_bool, rtx target)
5046 {
5047 rtx old_val, new_val, mem;
5048 rtx *pbool, *poval;
5049
5050 /* Expand the operands. */
5051 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5052 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5053 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5054
5055 pbool = poval = NULL;
5056 if (target != const0_rtx)
5057 {
5058 if (is_bool)
5059 pbool = &target;
5060 else
5061 poval = &target;
5062 }
5063 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5064 false, MEMMODEL_SEQ_CST,
5065 MEMMODEL_SEQ_CST))
5066 return NULL_RTX;
5067
5068 return target;
5069 }
5070
5071 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5072 general form is actually an atomic exchange, and some targets only
5073 support a reduced form with the second argument being a constant 1.
5074 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5075 the results. */
5076
5077 static rtx
5078 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5079 rtx target)
5080 {
5081 rtx val, mem;
5082
5083 /* Expand the operands. */
5084 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5085 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5086
5087 return expand_sync_lock_test_and_set (target, mem, val);
5088 }
5089
5090 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5091
5092 static void
5093 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5094 {
5095 rtx mem;
5096
5097 /* Expand the operands. */
5098 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5099
5100 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5101 }
5102
5103 /* Given an integer representing an ``enum memmodel'', verify its
5104 correctness and return the memory model enum. */
5105
5106 static enum memmodel
5107 get_memmodel (tree exp)
5108 {
5109 rtx op;
5110 unsigned HOST_WIDE_INT val;
5111
5112 /* If the parameter is not a constant, it's a run time value so we'll just
5113 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5114 if (TREE_CODE (exp) != INTEGER_CST)
5115 return MEMMODEL_SEQ_CST;
5116
5117 op = expand_normal (exp);
5118
5119 val = INTVAL (op);
5120 if (targetm.memmodel_check)
5121 val = targetm.memmodel_check (val);
5122 else if (val & ~MEMMODEL_MASK)
5123 {
5124 warning (OPT_Winvalid_memory_model,
5125 "Unknown architecture specifier in memory model to builtin.");
5126 return MEMMODEL_SEQ_CST;
5127 }
5128
5129 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5130 {
5131 warning (OPT_Winvalid_memory_model,
5132 "invalid memory model argument to builtin");
5133 return MEMMODEL_SEQ_CST;
5134 }
5135
5136 return (enum memmodel) val;
5137 }
5138
5139 /* Expand the __atomic_exchange intrinsic:
5140 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5141 EXP is the CALL_EXPR.
5142 TARGET is an optional place for us to store the results. */
5143
5144 static rtx
5145 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5146 {
5147 rtx val, mem;
5148 enum memmodel model;
5149
5150 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5151 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5152 {
5153 error ("invalid memory model for %<__atomic_exchange%>");
5154 return NULL_RTX;
5155 }
5156
5157 if (!flag_inline_atomics)
5158 return NULL_RTX;
5159
5160 /* Expand the operands. */
5161 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5162 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5163
5164 return expand_atomic_exchange (target, mem, val, model);
5165 }
5166
5167 /* Expand the __atomic_compare_exchange intrinsic:
5168 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5169 TYPE desired, BOOL weak,
5170 enum memmodel success,
5171 enum memmodel failure)
5172 EXP is the CALL_EXPR.
5173 TARGET is an optional place for us to store the results. */
5174
5175 static rtx
5176 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5177 rtx target)
5178 {
5179 rtx expect, desired, mem, oldval;
5180 enum memmodel success, failure;
5181 tree weak;
5182 bool is_weak;
5183
5184 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5185 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5186
5187 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5188 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5189 {
5190 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5191 return NULL_RTX;
5192 }
5193
5194 if (failure > success)
5195 {
5196 error ("failure memory model cannot be stronger than success "
5197 "memory model for %<__atomic_compare_exchange%>");
5198 return NULL_RTX;
5199 }
5200
5201 if (!flag_inline_atomics)
5202 return NULL_RTX;
5203
5204 /* Expand the operands. */
5205 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5206
5207 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5208 expect = convert_memory_address (Pmode, expect);
5209 expect = gen_rtx_MEM (mode, expect);
5210 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5211
5212 weak = CALL_EXPR_ARG (exp, 3);
5213 is_weak = false;
5214 if (host_integerp (weak, 0) && tree_low_cst (weak, 0) != 0)
5215 is_weak = true;
5216
5217 oldval = expect;
5218 if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
5219 &oldval, mem, oldval, desired,
5220 is_weak, success, failure))
5221 return NULL_RTX;
5222
5223 if (oldval != expect)
5224 emit_move_insn (expect, oldval);
5225
5226 return target;
5227 }
5228
5229 /* Expand the __atomic_load intrinsic:
5230 TYPE __atomic_load (TYPE *object, enum memmodel)
5231 EXP is the CALL_EXPR.
5232 TARGET is an optional place for us to store the results. */
5233
5234 static rtx
5235 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5236 {
5237 rtx mem;
5238 enum memmodel model;
5239
5240 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5241 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5242 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5243 {
5244 error ("invalid memory model for %<__atomic_load%>");
5245 return NULL_RTX;
5246 }
5247
5248 if (!flag_inline_atomics)
5249 return NULL_RTX;
5250
5251 /* Expand the operand. */
5252 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5253
5254 return expand_atomic_load (target, mem, model);
5255 }
5256
5257
5258 /* Expand the __atomic_store intrinsic:
5259 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5260 EXP is the CALL_EXPR.
5261 TARGET is an optional place for us to store the results. */
5262
5263 static rtx
5264 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5265 {
5266 rtx mem, val;
5267 enum memmodel model;
5268
5269 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5270 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5271 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5272 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5273 {
5274 error ("invalid memory model for %<__atomic_store%>");
5275 return NULL_RTX;
5276 }
5277
5278 if (!flag_inline_atomics)
5279 return NULL_RTX;
5280
5281 /* Expand the operands. */
5282 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5283 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5284
5285 return expand_atomic_store (mem, val, model, false);
5286 }
5287
5288 /* Expand the __atomic_fetch_XXX intrinsic:
5289 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5290 EXP is the CALL_EXPR.
5291 TARGET is an optional place for us to store the results.
5292 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5293 FETCH_AFTER is true if returning the result of the operation.
5294 FETCH_AFTER is false if returning the value before the operation.
5295 IGNORE is true if the result is not used.
5296 EXT_CALL is the correct builtin for an external call if this cannot be
5297 resolved to an instruction sequence. */
5298
5299 static rtx
5300 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5301 enum rtx_code code, bool fetch_after,
5302 bool ignore, enum built_in_function ext_call)
5303 {
5304 rtx val, mem, ret;
5305 enum memmodel model;
5306 tree fndecl;
5307 tree addr;
5308
5309 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5310
5311 /* Expand the operands. */
5312 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5313 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5314
5315 /* Only try generating instructions if inlining is turned on. */
5316 if (flag_inline_atomics)
5317 {
5318 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5319 if (ret)
5320 return ret;
5321 }
5322
5323 /* Return if a different routine isn't needed for the library call. */
5324 if (ext_call == BUILT_IN_NONE)
5325 return NULL_RTX;
5326
5327 /* Change the call to the specified function. */
5328 fndecl = get_callee_fndecl (exp);
5329 addr = CALL_EXPR_FN (exp);
5330 STRIP_NOPS (addr);
5331
5332 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5333 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5334
5335 /* Expand the call here so we can emit trailing code. */
5336 ret = expand_call (exp, target, ignore);
5337
5338 /* Replace the original function just in case it matters. */
5339 TREE_OPERAND (addr, 0) = fndecl;
5340
5341 /* Then issue the arithmetic correction to return the right result. */
5342 if (!ignore)
5343 {
5344 if (code == NOT)
5345 {
5346 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5347 OPTAB_LIB_WIDEN);
5348 ret = expand_simple_unop (mode, NOT, ret, target, true);
5349 }
5350 else
5351 ret = expand_simple_binop (mode, code, ret, val, target, true,
5352 OPTAB_LIB_WIDEN);
5353 }
5354 return ret;
5355 }
5356
5357
5358 #ifndef HAVE_atomic_clear
5359 # define HAVE_atomic_clear 0
5360 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5361 #endif
5362
5363 /* Expand an atomic clear operation.
5364 void _atomic_clear (BOOL *obj, enum memmodel)
5365 EXP is the call expression. */
5366
5367 static rtx
5368 expand_builtin_atomic_clear (tree exp)
5369 {
5370 enum machine_mode mode;
5371 rtx mem, ret;
5372 enum memmodel model;
5373
5374 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5375 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5376 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5377
5378 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5379 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5380 {
5381 error ("invalid memory model for %<__atomic_store%>");
5382 return const0_rtx;
5383 }
5384
5385 if (HAVE_atomic_clear)
5386 {
5387 emit_insn (gen_atomic_clear (mem, model));
5388 return const0_rtx;
5389 }
5390
5391 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5392 Failing that, a store is issued by __atomic_store. The only way this can
5393 fail is if the bool type is larger than a word size. Unlikely, but
5394 handle it anyway for completeness. Assume a single threaded model since
5395 there is no atomic support in this case, and no barriers are required. */
5396 ret = expand_atomic_store (mem, const0_rtx, model, true);
5397 if (!ret)
5398 emit_move_insn (mem, const0_rtx);
5399 return const0_rtx;
5400 }
5401
5402 /* Expand an atomic test_and_set operation.
5403 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5404 EXP is the call expression. */
5405
5406 static rtx
5407 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5408 {
5409 rtx mem;
5410 enum memmodel model;
5411 enum machine_mode mode;
5412
5413 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5414 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5415 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5416
5417 return expand_atomic_test_and_set (target, mem, model);
5418 }
5419
5420
5421 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5422 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5423
5424 static tree
5425 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5426 {
5427 int size;
5428 enum machine_mode mode;
5429 unsigned int mode_align, type_align;
5430
5431 if (TREE_CODE (arg0) != INTEGER_CST)
5432 return NULL_TREE;
5433
5434 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5435 mode = mode_for_size (size, MODE_INT, 0);
5436 mode_align = GET_MODE_ALIGNMENT (mode);
5437
5438 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5439 type_align = mode_align;
5440 else
5441 {
5442 tree ttype = TREE_TYPE (arg1);
5443
5444 /* This function is usually invoked and folded immediately by the front
5445 end before anything else has a chance to look at it. The pointer
5446 parameter at this point is usually cast to a void *, so check for that
5447 and look past the cast. */
5448 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5449 && VOID_TYPE_P (TREE_TYPE (ttype)))
5450 arg1 = TREE_OPERAND (arg1, 0);
5451
5452 ttype = TREE_TYPE (arg1);
5453 gcc_assert (POINTER_TYPE_P (ttype));
5454
5455 /* Get the underlying type of the object. */
5456 ttype = TREE_TYPE (ttype);
5457 type_align = TYPE_ALIGN (ttype);
5458 }
5459
5460 /* If the object has smaller alignment, the the lock free routines cannot
5461 be used. */
5462 if (type_align < mode_align)
5463 return boolean_false_node;
5464
5465 /* Check if a compare_and_swap pattern exists for the mode which represents
5466 the required size. The pattern is not allowed to fail, so the existence
5467 of the pattern indicates support is present. */
5468 if (can_compare_and_swap_p (mode, true))
5469 return boolean_true_node;
5470 else
5471 return boolean_false_node;
5472 }
5473
5474 /* Return true if the parameters to call EXP represent an object which will
5475 always generate lock free instructions. The first argument represents the
5476 size of the object, and the second parameter is a pointer to the object
5477 itself. If NULL is passed for the object, then the result is based on
5478 typical alignment for an object of the specified size. Otherwise return
5479 false. */
5480
5481 static rtx
5482 expand_builtin_atomic_always_lock_free (tree exp)
5483 {
5484 tree size;
5485 tree arg0 = CALL_EXPR_ARG (exp, 0);
5486 tree arg1 = CALL_EXPR_ARG (exp, 1);
5487
5488 if (TREE_CODE (arg0) != INTEGER_CST)
5489 {
5490 error ("non-constant argument 1 to __atomic_always_lock_free");
5491 return const0_rtx;
5492 }
5493
5494 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5495 if (size == boolean_true_node)
5496 return const1_rtx;
5497 return const0_rtx;
5498 }
5499
5500 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5501 is lock free on this architecture. */
5502
5503 static tree
5504 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5505 {
5506 if (!flag_inline_atomics)
5507 return NULL_TREE;
5508
5509 /* If it isn't always lock free, don't generate a result. */
5510 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5511 return boolean_true_node;
5512
5513 return NULL_TREE;
5514 }
5515
5516 /* Return true if the parameters to call EXP represent an object which will
5517 always generate lock free instructions. The first argument represents the
5518 size of the object, and the second parameter is a pointer to the object
5519 itself. If NULL is passed for the object, then the result is based on
5520 typical alignment for an object of the specified size. Otherwise return
5521 NULL*/
5522
5523 static rtx
5524 expand_builtin_atomic_is_lock_free (tree exp)
5525 {
5526 tree size;
5527 tree arg0 = CALL_EXPR_ARG (exp, 0);
5528 tree arg1 = CALL_EXPR_ARG (exp, 1);
5529
5530 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5531 {
5532 error ("non-integer argument 1 to __atomic_is_lock_free");
5533 return NULL_RTX;
5534 }
5535
5536 if (!flag_inline_atomics)
5537 return NULL_RTX;
5538
5539 /* If the value is known at compile time, return the RTX for it. */
5540 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5541 if (size == boolean_true_node)
5542 return const1_rtx;
5543
5544 return NULL_RTX;
5545 }
5546
5547 /* Expand the __atomic_thread_fence intrinsic:
5548 void __atomic_thread_fence (enum memmodel)
5549 EXP is the CALL_EXPR. */
5550
5551 static void
5552 expand_builtin_atomic_thread_fence (tree exp)
5553 {
5554 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5555 expand_mem_thread_fence (model);
5556 }
5557
5558 /* Expand the __atomic_signal_fence intrinsic:
5559 void __atomic_signal_fence (enum memmodel)
5560 EXP is the CALL_EXPR. */
5561
5562 static void
5563 expand_builtin_atomic_signal_fence (tree exp)
5564 {
5565 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5566 expand_mem_signal_fence (model);
5567 }
5568
5569 /* Expand the __sync_synchronize intrinsic. */
5570
5571 static void
5572 expand_builtin_sync_synchronize (void)
5573 {
5574 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5575 }
5576
5577 static rtx
5578 expand_builtin_thread_pointer (tree exp, rtx target)
5579 {
5580 enum insn_code icode;
5581 if (!validate_arglist (exp, VOID_TYPE))
5582 return const0_rtx;
5583 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5584 if (icode != CODE_FOR_nothing)
5585 {
5586 struct expand_operand op;
5587 if (!REG_P (target) || GET_MODE (target) != Pmode)
5588 target = gen_reg_rtx (Pmode);
5589 create_output_operand (&op, target, Pmode);
5590 expand_insn (icode, 1, &op);
5591 return target;
5592 }
5593 error ("__builtin_thread_pointer is not supported on this target");
5594 return const0_rtx;
5595 }
5596
5597 static void
5598 expand_builtin_set_thread_pointer (tree exp)
5599 {
5600 enum insn_code icode;
5601 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5602 return;
5603 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5604 if (icode != CODE_FOR_nothing)
5605 {
5606 struct expand_operand op;
5607 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5608 Pmode, EXPAND_NORMAL);
5609 create_input_operand (&op, val, Pmode);
5610 expand_insn (icode, 1, &op);
5611 return;
5612 }
5613 error ("__builtin_set_thread_pointer is not supported on this target");
5614 }
5615
5616 \f
5617 /* Expand an expression EXP that calls a built-in function,
5618 with result going to TARGET if that's convenient
5619 (and in mode MODE if that's convenient).
5620 SUBTARGET may be used as the target for computing one of EXP's operands.
5621 IGNORE is nonzero if the value is to be ignored. */
5622
5623 rtx
5624 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5625 int ignore)
5626 {
5627 tree fndecl = get_callee_fndecl (exp);
5628 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5629 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5630 int flags;
5631
5632 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5633 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5634
5635 /* When not optimizing, generate calls to library functions for a certain
5636 set of builtins. */
5637 if (!optimize
5638 && !called_as_built_in (fndecl)
5639 && fcode != BUILT_IN_FORK
5640 && fcode != BUILT_IN_EXECL
5641 && fcode != BUILT_IN_EXECV
5642 && fcode != BUILT_IN_EXECLP
5643 && fcode != BUILT_IN_EXECLE
5644 && fcode != BUILT_IN_EXECVP
5645 && fcode != BUILT_IN_EXECVE
5646 && fcode != BUILT_IN_ALLOCA
5647 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5648 && fcode != BUILT_IN_FREE)
5649 return expand_call (exp, target, ignore);
5650
5651 /* The built-in function expanders test for target == const0_rtx
5652 to determine whether the function's result will be ignored. */
5653 if (ignore)
5654 target = const0_rtx;
5655
5656 /* If the result of a pure or const built-in function is ignored, and
5657 none of its arguments are volatile, we can avoid expanding the
5658 built-in call and just evaluate the arguments for side-effects. */
5659 if (target == const0_rtx
5660 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5661 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5662 {
5663 bool volatilep = false;
5664 tree arg;
5665 call_expr_arg_iterator iter;
5666
5667 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5668 if (TREE_THIS_VOLATILE (arg))
5669 {
5670 volatilep = true;
5671 break;
5672 }
5673
5674 if (! volatilep)
5675 {
5676 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5677 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5678 return const0_rtx;
5679 }
5680 }
5681
5682 switch (fcode)
5683 {
5684 CASE_FLT_FN (BUILT_IN_FABS):
5685 case BUILT_IN_FABSD32:
5686 case BUILT_IN_FABSD64:
5687 case BUILT_IN_FABSD128:
5688 target = expand_builtin_fabs (exp, target, subtarget);
5689 if (target)
5690 return target;
5691 break;
5692
5693 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5694 target = expand_builtin_copysign (exp, target, subtarget);
5695 if (target)
5696 return target;
5697 break;
5698
5699 /* Just do a normal library call if we were unable to fold
5700 the values. */
5701 CASE_FLT_FN (BUILT_IN_CABS):
5702 break;
5703
5704 CASE_FLT_FN (BUILT_IN_EXP):
5705 CASE_FLT_FN (BUILT_IN_EXP10):
5706 CASE_FLT_FN (BUILT_IN_POW10):
5707 CASE_FLT_FN (BUILT_IN_EXP2):
5708 CASE_FLT_FN (BUILT_IN_EXPM1):
5709 CASE_FLT_FN (BUILT_IN_LOGB):
5710 CASE_FLT_FN (BUILT_IN_LOG):
5711 CASE_FLT_FN (BUILT_IN_LOG10):
5712 CASE_FLT_FN (BUILT_IN_LOG2):
5713 CASE_FLT_FN (BUILT_IN_LOG1P):
5714 CASE_FLT_FN (BUILT_IN_TAN):
5715 CASE_FLT_FN (BUILT_IN_ASIN):
5716 CASE_FLT_FN (BUILT_IN_ACOS):
5717 CASE_FLT_FN (BUILT_IN_ATAN):
5718 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5719 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5720 because of possible accuracy problems. */
5721 if (! flag_unsafe_math_optimizations)
5722 break;
5723 CASE_FLT_FN (BUILT_IN_SQRT):
5724 CASE_FLT_FN (BUILT_IN_FLOOR):
5725 CASE_FLT_FN (BUILT_IN_CEIL):
5726 CASE_FLT_FN (BUILT_IN_TRUNC):
5727 CASE_FLT_FN (BUILT_IN_ROUND):
5728 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5729 CASE_FLT_FN (BUILT_IN_RINT):
5730 target = expand_builtin_mathfn (exp, target, subtarget);
5731 if (target)
5732 return target;
5733 break;
5734
5735 CASE_FLT_FN (BUILT_IN_FMA):
5736 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5737 if (target)
5738 return target;
5739 break;
5740
5741 CASE_FLT_FN (BUILT_IN_ILOGB):
5742 if (! flag_unsafe_math_optimizations)
5743 break;
5744 CASE_FLT_FN (BUILT_IN_ISINF):
5745 CASE_FLT_FN (BUILT_IN_FINITE):
5746 case BUILT_IN_ISFINITE:
5747 case BUILT_IN_ISNORMAL:
5748 target = expand_builtin_interclass_mathfn (exp, target);
5749 if (target)
5750 return target;
5751 break;
5752
5753 CASE_FLT_FN (BUILT_IN_ICEIL):
5754 CASE_FLT_FN (BUILT_IN_LCEIL):
5755 CASE_FLT_FN (BUILT_IN_LLCEIL):
5756 CASE_FLT_FN (BUILT_IN_LFLOOR):
5757 CASE_FLT_FN (BUILT_IN_IFLOOR):
5758 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5759 target = expand_builtin_int_roundingfn (exp, target);
5760 if (target)
5761 return target;
5762 break;
5763
5764 CASE_FLT_FN (BUILT_IN_IRINT):
5765 CASE_FLT_FN (BUILT_IN_LRINT):
5766 CASE_FLT_FN (BUILT_IN_LLRINT):
5767 CASE_FLT_FN (BUILT_IN_IROUND):
5768 CASE_FLT_FN (BUILT_IN_LROUND):
5769 CASE_FLT_FN (BUILT_IN_LLROUND):
5770 target = expand_builtin_int_roundingfn_2 (exp, target);
5771 if (target)
5772 return target;
5773 break;
5774
5775 CASE_FLT_FN (BUILT_IN_POWI):
5776 target = expand_builtin_powi (exp, target);
5777 if (target)
5778 return target;
5779 break;
5780
5781 CASE_FLT_FN (BUILT_IN_ATAN2):
5782 CASE_FLT_FN (BUILT_IN_LDEXP):
5783 CASE_FLT_FN (BUILT_IN_SCALB):
5784 CASE_FLT_FN (BUILT_IN_SCALBN):
5785 CASE_FLT_FN (BUILT_IN_SCALBLN):
5786 if (! flag_unsafe_math_optimizations)
5787 break;
5788
5789 CASE_FLT_FN (BUILT_IN_FMOD):
5790 CASE_FLT_FN (BUILT_IN_REMAINDER):
5791 CASE_FLT_FN (BUILT_IN_DREM):
5792 CASE_FLT_FN (BUILT_IN_POW):
5793 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5794 if (target)
5795 return target;
5796 break;
5797
5798 CASE_FLT_FN (BUILT_IN_CEXPI):
5799 target = expand_builtin_cexpi (exp, target);
5800 gcc_assert (target);
5801 return target;
5802
5803 CASE_FLT_FN (BUILT_IN_SIN):
5804 CASE_FLT_FN (BUILT_IN_COS):
5805 if (! flag_unsafe_math_optimizations)
5806 break;
5807 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5808 if (target)
5809 return target;
5810 break;
5811
5812 CASE_FLT_FN (BUILT_IN_SINCOS):
5813 if (! flag_unsafe_math_optimizations)
5814 break;
5815 target = expand_builtin_sincos (exp);
5816 if (target)
5817 return target;
5818 break;
5819
5820 case BUILT_IN_APPLY_ARGS:
5821 return expand_builtin_apply_args ();
5822
5823 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5824 FUNCTION with a copy of the parameters described by
5825 ARGUMENTS, and ARGSIZE. It returns a block of memory
5826 allocated on the stack into which is stored all the registers
5827 that might possibly be used for returning the result of a
5828 function. ARGUMENTS is the value returned by
5829 __builtin_apply_args. ARGSIZE is the number of bytes of
5830 arguments that must be copied. ??? How should this value be
5831 computed? We'll also need a safe worst case value for varargs
5832 functions. */
5833 case BUILT_IN_APPLY:
5834 if (!validate_arglist (exp, POINTER_TYPE,
5835 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5836 && !validate_arglist (exp, REFERENCE_TYPE,
5837 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5838 return const0_rtx;
5839 else
5840 {
5841 rtx ops[3];
5842
5843 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5844 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5845 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5846
5847 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5848 }
5849
5850 /* __builtin_return (RESULT) causes the function to return the
5851 value described by RESULT. RESULT is address of the block of
5852 memory returned by __builtin_apply. */
5853 case BUILT_IN_RETURN:
5854 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5855 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5856 return const0_rtx;
5857
5858 case BUILT_IN_SAVEREGS:
5859 return expand_builtin_saveregs ();
5860
5861 case BUILT_IN_VA_ARG_PACK:
5862 /* All valid uses of __builtin_va_arg_pack () are removed during
5863 inlining. */
5864 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5865 return const0_rtx;
5866
5867 case BUILT_IN_VA_ARG_PACK_LEN:
5868 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5869 inlining. */
5870 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5871 return const0_rtx;
5872
5873 /* Return the address of the first anonymous stack arg. */
5874 case BUILT_IN_NEXT_ARG:
5875 if (fold_builtin_next_arg (exp, false))
5876 return const0_rtx;
5877 return expand_builtin_next_arg ();
5878
5879 case BUILT_IN_CLEAR_CACHE:
5880 target = expand_builtin___clear_cache (exp);
5881 if (target)
5882 return target;
5883 break;
5884
5885 case BUILT_IN_CLASSIFY_TYPE:
5886 return expand_builtin_classify_type (exp);
5887
5888 case BUILT_IN_CONSTANT_P:
5889 return const0_rtx;
5890
5891 case BUILT_IN_FRAME_ADDRESS:
5892 case BUILT_IN_RETURN_ADDRESS:
5893 return expand_builtin_frame_address (fndecl, exp);
5894
5895 /* Returns the address of the area where the structure is returned.
5896 0 otherwise. */
5897 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5898 if (call_expr_nargs (exp) != 0
5899 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5900 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5901 return const0_rtx;
5902 else
5903 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5904
5905 case BUILT_IN_ALLOCA:
5906 case BUILT_IN_ALLOCA_WITH_ALIGN:
5907 /* If the allocation stems from the declaration of a variable-sized
5908 object, it cannot accumulate. */
5909 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
5910 if (target)
5911 return target;
5912 break;
5913
5914 case BUILT_IN_STACK_SAVE:
5915 return expand_stack_save ();
5916
5917 case BUILT_IN_STACK_RESTORE:
5918 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5919 return const0_rtx;
5920
5921 case BUILT_IN_BSWAP16:
5922 case BUILT_IN_BSWAP32:
5923 case BUILT_IN_BSWAP64:
5924 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
5925 if (target)
5926 return target;
5927 break;
5928
5929 CASE_INT_FN (BUILT_IN_FFS):
5930 target = expand_builtin_unop (target_mode, exp, target,
5931 subtarget, ffs_optab);
5932 if (target)
5933 return target;
5934 break;
5935
5936 CASE_INT_FN (BUILT_IN_CLZ):
5937 target = expand_builtin_unop (target_mode, exp, target,
5938 subtarget, clz_optab);
5939 if (target)
5940 return target;
5941 break;
5942
5943 CASE_INT_FN (BUILT_IN_CTZ):
5944 target = expand_builtin_unop (target_mode, exp, target,
5945 subtarget, ctz_optab);
5946 if (target)
5947 return target;
5948 break;
5949
5950 CASE_INT_FN (BUILT_IN_CLRSB):
5951 target = expand_builtin_unop (target_mode, exp, target,
5952 subtarget, clrsb_optab);
5953 if (target)
5954 return target;
5955 break;
5956
5957 CASE_INT_FN (BUILT_IN_POPCOUNT):
5958 target = expand_builtin_unop (target_mode, exp, target,
5959 subtarget, popcount_optab);
5960 if (target)
5961 return target;
5962 break;
5963
5964 CASE_INT_FN (BUILT_IN_PARITY):
5965 target = expand_builtin_unop (target_mode, exp, target,
5966 subtarget, parity_optab);
5967 if (target)
5968 return target;
5969 break;
5970
5971 case BUILT_IN_STRLEN:
5972 target = expand_builtin_strlen (exp, target, target_mode);
5973 if (target)
5974 return target;
5975 break;
5976
5977 case BUILT_IN_STRCPY:
5978 target = expand_builtin_strcpy (exp, target);
5979 if (target)
5980 return target;
5981 break;
5982
5983 case BUILT_IN_STRNCPY:
5984 target = expand_builtin_strncpy (exp, target);
5985 if (target)
5986 return target;
5987 break;
5988
5989 case BUILT_IN_STPCPY:
5990 target = expand_builtin_stpcpy (exp, target, mode);
5991 if (target)
5992 return target;
5993 break;
5994
5995 case BUILT_IN_MEMCPY:
5996 target = expand_builtin_memcpy (exp, target);
5997 if (target)
5998 return target;
5999 break;
6000
6001 case BUILT_IN_MEMPCPY:
6002 target = expand_builtin_mempcpy (exp, target, mode);
6003 if (target)
6004 return target;
6005 break;
6006
6007 case BUILT_IN_MEMSET:
6008 target = expand_builtin_memset (exp, target, mode);
6009 if (target)
6010 return target;
6011 break;
6012
6013 case BUILT_IN_BZERO:
6014 target = expand_builtin_bzero (exp);
6015 if (target)
6016 return target;
6017 break;
6018
6019 case BUILT_IN_STRCMP:
6020 target = expand_builtin_strcmp (exp, target);
6021 if (target)
6022 return target;
6023 break;
6024
6025 case BUILT_IN_STRNCMP:
6026 target = expand_builtin_strncmp (exp, target, mode);
6027 if (target)
6028 return target;
6029 break;
6030
6031 case BUILT_IN_BCMP:
6032 case BUILT_IN_MEMCMP:
6033 target = expand_builtin_memcmp (exp, target, mode);
6034 if (target)
6035 return target;
6036 break;
6037
6038 case BUILT_IN_SETJMP:
6039 /* This should have been lowered to the builtins below. */
6040 gcc_unreachable ();
6041
6042 case BUILT_IN_SETJMP_SETUP:
6043 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6044 and the receiver label. */
6045 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6046 {
6047 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6048 VOIDmode, EXPAND_NORMAL);
6049 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6050 rtx label_r = label_rtx (label);
6051
6052 /* This is copied from the handling of non-local gotos. */
6053 expand_builtin_setjmp_setup (buf_addr, label_r);
6054 nonlocal_goto_handler_labels
6055 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6056 nonlocal_goto_handler_labels);
6057 /* ??? Do not let expand_label treat us as such since we would
6058 not want to be both on the list of non-local labels and on
6059 the list of forced labels. */
6060 FORCED_LABEL (label) = 0;
6061 return const0_rtx;
6062 }
6063 break;
6064
6065 case BUILT_IN_SETJMP_DISPATCHER:
6066 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6067 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6068 {
6069 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6070 rtx label_r = label_rtx (label);
6071
6072 /* Remove the dispatcher label from the list of non-local labels
6073 since the receiver labels have been added to it above. */
6074 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6075 return const0_rtx;
6076 }
6077 break;
6078
6079 case BUILT_IN_SETJMP_RECEIVER:
6080 /* __builtin_setjmp_receiver is passed the receiver label. */
6081 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6082 {
6083 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6084 rtx label_r = label_rtx (label);
6085
6086 expand_builtin_setjmp_receiver (label_r);
6087 return const0_rtx;
6088 }
6089 break;
6090
6091 /* __builtin_longjmp is passed a pointer to an array of five words.
6092 It's similar to the C library longjmp function but works with
6093 __builtin_setjmp above. */
6094 case BUILT_IN_LONGJMP:
6095 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6096 {
6097 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6098 VOIDmode, EXPAND_NORMAL);
6099 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6100
6101 if (value != const1_rtx)
6102 {
6103 error ("%<__builtin_longjmp%> second argument must be 1");
6104 return const0_rtx;
6105 }
6106
6107 expand_builtin_longjmp (buf_addr, value);
6108 return const0_rtx;
6109 }
6110 break;
6111
6112 case BUILT_IN_NONLOCAL_GOTO:
6113 target = expand_builtin_nonlocal_goto (exp);
6114 if (target)
6115 return target;
6116 break;
6117
6118 /* This updates the setjmp buffer that is its argument with the value
6119 of the current stack pointer. */
6120 case BUILT_IN_UPDATE_SETJMP_BUF:
6121 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6122 {
6123 rtx buf_addr
6124 = expand_normal (CALL_EXPR_ARG (exp, 0));
6125
6126 expand_builtin_update_setjmp_buf (buf_addr);
6127 return const0_rtx;
6128 }
6129 break;
6130
6131 case BUILT_IN_TRAP:
6132 expand_builtin_trap ();
6133 return const0_rtx;
6134
6135 case BUILT_IN_UNREACHABLE:
6136 expand_builtin_unreachable ();
6137 return const0_rtx;
6138
6139 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6140 case BUILT_IN_SIGNBITD32:
6141 case BUILT_IN_SIGNBITD64:
6142 case BUILT_IN_SIGNBITD128:
6143 target = expand_builtin_signbit (exp, target);
6144 if (target)
6145 return target;
6146 break;
6147
6148 /* Various hooks for the DWARF 2 __throw routine. */
6149 case BUILT_IN_UNWIND_INIT:
6150 expand_builtin_unwind_init ();
6151 return const0_rtx;
6152 case BUILT_IN_DWARF_CFA:
6153 return virtual_cfa_rtx;
6154 #ifdef DWARF2_UNWIND_INFO
6155 case BUILT_IN_DWARF_SP_COLUMN:
6156 return expand_builtin_dwarf_sp_column ();
6157 case BUILT_IN_INIT_DWARF_REG_SIZES:
6158 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6159 return const0_rtx;
6160 #endif
6161 case BUILT_IN_FROB_RETURN_ADDR:
6162 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6163 case BUILT_IN_EXTRACT_RETURN_ADDR:
6164 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6165 case BUILT_IN_EH_RETURN:
6166 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6167 CALL_EXPR_ARG (exp, 1));
6168 return const0_rtx;
6169 #ifdef EH_RETURN_DATA_REGNO
6170 case BUILT_IN_EH_RETURN_DATA_REGNO:
6171 return expand_builtin_eh_return_data_regno (exp);
6172 #endif
6173 case BUILT_IN_EXTEND_POINTER:
6174 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6175 case BUILT_IN_EH_POINTER:
6176 return expand_builtin_eh_pointer (exp);
6177 case BUILT_IN_EH_FILTER:
6178 return expand_builtin_eh_filter (exp);
6179 case BUILT_IN_EH_COPY_VALUES:
6180 return expand_builtin_eh_copy_values (exp);
6181
6182 case BUILT_IN_VA_START:
6183 return expand_builtin_va_start (exp);
6184 case BUILT_IN_VA_END:
6185 return expand_builtin_va_end (exp);
6186 case BUILT_IN_VA_COPY:
6187 return expand_builtin_va_copy (exp);
6188 case BUILT_IN_EXPECT:
6189 return expand_builtin_expect (exp, target);
6190 case BUILT_IN_ASSUME_ALIGNED:
6191 return expand_builtin_assume_aligned (exp, target);
6192 case BUILT_IN_PREFETCH:
6193 expand_builtin_prefetch (exp);
6194 return const0_rtx;
6195
6196 case BUILT_IN_INIT_TRAMPOLINE:
6197 return expand_builtin_init_trampoline (exp, true);
6198 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6199 return expand_builtin_init_trampoline (exp, false);
6200 case BUILT_IN_ADJUST_TRAMPOLINE:
6201 return expand_builtin_adjust_trampoline (exp);
6202
6203 case BUILT_IN_FORK:
6204 case BUILT_IN_EXECL:
6205 case BUILT_IN_EXECV:
6206 case BUILT_IN_EXECLP:
6207 case BUILT_IN_EXECLE:
6208 case BUILT_IN_EXECVP:
6209 case BUILT_IN_EXECVE:
6210 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6211 if (target)
6212 return target;
6213 break;
6214
6215 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6216 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6217 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6218 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6219 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6220 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6221 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6222 if (target)
6223 return target;
6224 break;
6225
6226 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6227 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6228 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6229 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6230 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6231 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6232 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6233 if (target)
6234 return target;
6235 break;
6236
6237 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6238 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6239 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6240 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6241 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6242 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6243 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6244 if (target)
6245 return target;
6246 break;
6247
6248 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6249 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6250 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6251 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6252 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6253 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6254 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6255 if (target)
6256 return target;
6257 break;
6258
6259 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6260 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6261 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6262 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6263 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6264 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6265 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6266 if (target)
6267 return target;
6268 break;
6269
6270 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6271 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6272 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6273 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6274 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6275 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6276 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6277 if (target)
6278 return target;
6279 break;
6280
6281 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6282 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6283 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6284 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6285 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6286 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6287 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6288 if (target)
6289 return target;
6290 break;
6291
6292 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6293 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6294 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6295 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6296 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6297 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6298 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6299 if (target)
6300 return target;
6301 break;
6302
6303 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6304 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6305 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6306 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6307 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6308 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6309 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6310 if (target)
6311 return target;
6312 break;
6313
6314 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6315 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6316 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6317 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6318 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6319 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6320 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6321 if (target)
6322 return target;
6323 break;
6324
6325 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6326 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6327 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6328 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6329 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6330 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6331 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6332 if (target)
6333 return target;
6334 break;
6335
6336 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6337 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6338 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6339 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6340 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6341 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6342 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6343 if (target)
6344 return target;
6345 break;
6346
6347 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6348 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6349 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6350 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6351 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6352 if (mode == VOIDmode)
6353 mode = TYPE_MODE (boolean_type_node);
6354 if (!target || !register_operand (target, mode))
6355 target = gen_reg_rtx (mode);
6356
6357 mode = get_builtin_sync_mode
6358 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6359 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6360 if (target)
6361 return target;
6362 break;
6363
6364 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6365 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6366 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6367 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6368 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6369 mode = get_builtin_sync_mode
6370 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6371 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6372 if (target)
6373 return target;
6374 break;
6375
6376 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6377 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6378 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6379 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6380 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6381 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6382 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6383 if (target)
6384 return target;
6385 break;
6386
6387 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6388 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6389 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6390 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6391 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6392 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6393 expand_builtin_sync_lock_release (mode, exp);
6394 return const0_rtx;
6395
6396 case BUILT_IN_SYNC_SYNCHRONIZE:
6397 expand_builtin_sync_synchronize ();
6398 return const0_rtx;
6399
6400 case BUILT_IN_ATOMIC_EXCHANGE_1:
6401 case BUILT_IN_ATOMIC_EXCHANGE_2:
6402 case BUILT_IN_ATOMIC_EXCHANGE_4:
6403 case BUILT_IN_ATOMIC_EXCHANGE_8:
6404 case BUILT_IN_ATOMIC_EXCHANGE_16:
6405 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6406 target = expand_builtin_atomic_exchange (mode, exp, target);
6407 if (target)
6408 return target;
6409 break;
6410
6411 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6412 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6413 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6414 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6415 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6416 {
6417 unsigned int nargs, z;
6418 vec<tree, va_gc> *vec;
6419
6420 mode =
6421 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6422 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6423 if (target)
6424 return target;
6425
6426 /* If this is turned into an external library call, the weak parameter
6427 must be dropped to match the expected parameter list. */
6428 nargs = call_expr_nargs (exp);
6429 vec_alloc (vec, nargs - 1);
6430 for (z = 0; z < 3; z++)
6431 vec->quick_push (CALL_EXPR_ARG (exp, z));
6432 /* Skip the boolean weak parameter. */
6433 for (z = 4; z < 6; z++)
6434 vec->quick_push (CALL_EXPR_ARG (exp, z));
6435 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6436 break;
6437 }
6438
6439 case BUILT_IN_ATOMIC_LOAD_1:
6440 case BUILT_IN_ATOMIC_LOAD_2:
6441 case BUILT_IN_ATOMIC_LOAD_4:
6442 case BUILT_IN_ATOMIC_LOAD_8:
6443 case BUILT_IN_ATOMIC_LOAD_16:
6444 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6445 target = expand_builtin_atomic_load (mode, exp, target);
6446 if (target)
6447 return target;
6448 break;
6449
6450 case BUILT_IN_ATOMIC_STORE_1:
6451 case BUILT_IN_ATOMIC_STORE_2:
6452 case BUILT_IN_ATOMIC_STORE_4:
6453 case BUILT_IN_ATOMIC_STORE_8:
6454 case BUILT_IN_ATOMIC_STORE_16:
6455 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6456 target = expand_builtin_atomic_store (mode, exp);
6457 if (target)
6458 return const0_rtx;
6459 break;
6460
6461 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6462 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6463 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6464 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6465 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6466 {
6467 enum built_in_function lib;
6468 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6469 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6470 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6471 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6472 ignore, lib);
6473 if (target)
6474 return target;
6475 break;
6476 }
6477 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6478 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6479 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6480 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6481 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6482 {
6483 enum built_in_function lib;
6484 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6485 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6486 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6487 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6488 ignore, lib);
6489 if (target)
6490 return target;
6491 break;
6492 }
6493 case BUILT_IN_ATOMIC_AND_FETCH_1:
6494 case BUILT_IN_ATOMIC_AND_FETCH_2:
6495 case BUILT_IN_ATOMIC_AND_FETCH_4:
6496 case BUILT_IN_ATOMIC_AND_FETCH_8:
6497 case BUILT_IN_ATOMIC_AND_FETCH_16:
6498 {
6499 enum built_in_function lib;
6500 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6501 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6502 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6503 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6504 ignore, lib);
6505 if (target)
6506 return target;
6507 break;
6508 }
6509 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6510 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6511 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6512 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6513 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6514 {
6515 enum built_in_function lib;
6516 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6517 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6518 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6519 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6520 ignore, lib);
6521 if (target)
6522 return target;
6523 break;
6524 }
6525 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6526 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6527 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6528 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6529 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6530 {
6531 enum built_in_function lib;
6532 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6533 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6534 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6535 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6536 ignore, lib);
6537 if (target)
6538 return target;
6539 break;
6540 }
6541 case BUILT_IN_ATOMIC_OR_FETCH_1:
6542 case BUILT_IN_ATOMIC_OR_FETCH_2:
6543 case BUILT_IN_ATOMIC_OR_FETCH_4:
6544 case BUILT_IN_ATOMIC_OR_FETCH_8:
6545 case BUILT_IN_ATOMIC_OR_FETCH_16:
6546 {
6547 enum built_in_function lib;
6548 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6549 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6550 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6551 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6552 ignore, lib);
6553 if (target)
6554 return target;
6555 break;
6556 }
6557 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6558 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6559 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6560 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6561 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6562 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6563 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6564 ignore, BUILT_IN_NONE);
6565 if (target)
6566 return target;
6567 break;
6568
6569 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6570 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6571 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6572 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6573 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6574 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6575 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6576 ignore, BUILT_IN_NONE);
6577 if (target)
6578 return target;
6579 break;
6580
6581 case BUILT_IN_ATOMIC_FETCH_AND_1:
6582 case BUILT_IN_ATOMIC_FETCH_AND_2:
6583 case BUILT_IN_ATOMIC_FETCH_AND_4:
6584 case BUILT_IN_ATOMIC_FETCH_AND_8:
6585 case BUILT_IN_ATOMIC_FETCH_AND_16:
6586 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6587 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6588 ignore, BUILT_IN_NONE);
6589 if (target)
6590 return target;
6591 break;
6592
6593 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6594 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6595 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6596 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6597 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6598 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6599 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6600 ignore, BUILT_IN_NONE);
6601 if (target)
6602 return target;
6603 break;
6604
6605 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6606 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6607 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6608 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6609 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6610 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6611 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6612 ignore, BUILT_IN_NONE);
6613 if (target)
6614 return target;
6615 break;
6616
6617 case BUILT_IN_ATOMIC_FETCH_OR_1:
6618 case BUILT_IN_ATOMIC_FETCH_OR_2:
6619 case BUILT_IN_ATOMIC_FETCH_OR_4:
6620 case BUILT_IN_ATOMIC_FETCH_OR_8:
6621 case BUILT_IN_ATOMIC_FETCH_OR_16:
6622 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6623 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6624 ignore, BUILT_IN_NONE);
6625 if (target)
6626 return target;
6627 break;
6628
6629 case BUILT_IN_ATOMIC_TEST_AND_SET:
6630 return expand_builtin_atomic_test_and_set (exp, target);
6631
6632 case BUILT_IN_ATOMIC_CLEAR:
6633 return expand_builtin_atomic_clear (exp);
6634
6635 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6636 return expand_builtin_atomic_always_lock_free (exp);
6637
6638 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6639 target = expand_builtin_atomic_is_lock_free (exp);
6640 if (target)
6641 return target;
6642 break;
6643
6644 case BUILT_IN_ATOMIC_THREAD_FENCE:
6645 expand_builtin_atomic_thread_fence (exp);
6646 return const0_rtx;
6647
6648 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6649 expand_builtin_atomic_signal_fence (exp);
6650 return const0_rtx;
6651
6652 case BUILT_IN_OBJECT_SIZE:
6653 return expand_builtin_object_size (exp);
6654
6655 case BUILT_IN_MEMCPY_CHK:
6656 case BUILT_IN_MEMPCPY_CHK:
6657 case BUILT_IN_MEMMOVE_CHK:
6658 case BUILT_IN_MEMSET_CHK:
6659 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6660 if (target)
6661 return target;
6662 break;
6663
6664 case BUILT_IN_STRCPY_CHK:
6665 case BUILT_IN_STPCPY_CHK:
6666 case BUILT_IN_STRNCPY_CHK:
6667 case BUILT_IN_STPNCPY_CHK:
6668 case BUILT_IN_STRCAT_CHK:
6669 case BUILT_IN_STRNCAT_CHK:
6670 case BUILT_IN_SNPRINTF_CHK:
6671 case BUILT_IN_VSNPRINTF_CHK:
6672 maybe_emit_chk_warning (exp, fcode);
6673 break;
6674
6675 case BUILT_IN_SPRINTF_CHK:
6676 case BUILT_IN_VSPRINTF_CHK:
6677 maybe_emit_sprintf_chk_warning (exp, fcode);
6678 break;
6679
6680 case BUILT_IN_FREE:
6681 if (warn_free_nonheap_object)
6682 maybe_emit_free_warning (exp);
6683 break;
6684
6685 case BUILT_IN_THREAD_POINTER:
6686 return expand_builtin_thread_pointer (exp, target);
6687
6688 case BUILT_IN_SET_THREAD_POINTER:
6689 expand_builtin_set_thread_pointer (exp);
6690 return const0_rtx;
6691
6692 default: /* just do library call, if unknown builtin */
6693 break;
6694 }
6695
6696 /* The switch statement above can drop through to cause the function
6697 to be called normally. */
6698 return expand_call (exp, target, ignore);
6699 }
6700
6701 /* Determine whether a tree node represents a call to a built-in
6702 function. If the tree T is a call to a built-in function with
6703 the right number of arguments of the appropriate types, return
6704 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6705 Otherwise the return value is END_BUILTINS. */
6706
6707 enum built_in_function
6708 builtin_mathfn_code (const_tree t)
6709 {
6710 const_tree fndecl, arg, parmlist;
6711 const_tree argtype, parmtype;
6712 const_call_expr_arg_iterator iter;
6713
6714 if (TREE_CODE (t) != CALL_EXPR
6715 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6716 return END_BUILTINS;
6717
6718 fndecl = get_callee_fndecl (t);
6719 if (fndecl == NULL_TREE
6720 || TREE_CODE (fndecl) != FUNCTION_DECL
6721 || ! DECL_BUILT_IN (fndecl)
6722 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6723 return END_BUILTINS;
6724
6725 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6726 init_const_call_expr_arg_iterator (t, &iter);
6727 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6728 {
6729 /* If a function doesn't take a variable number of arguments,
6730 the last element in the list will have type `void'. */
6731 parmtype = TREE_VALUE (parmlist);
6732 if (VOID_TYPE_P (parmtype))
6733 {
6734 if (more_const_call_expr_args_p (&iter))
6735 return END_BUILTINS;
6736 return DECL_FUNCTION_CODE (fndecl);
6737 }
6738
6739 if (! more_const_call_expr_args_p (&iter))
6740 return END_BUILTINS;
6741
6742 arg = next_const_call_expr_arg (&iter);
6743 argtype = TREE_TYPE (arg);
6744
6745 if (SCALAR_FLOAT_TYPE_P (parmtype))
6746 {
6747 if (! SCALAR_FLOAT_TYPE_P (argtype))
6748 return END_BUILTINS;
6749 }
6750 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6751 {
6752 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6753 return END_BUILTINS;
6754 }
6755 else if (POINTER_TYPE_P (parmtype))
6756 {
6757 if (! POINTER_TYPE_P (argtype))
6758 return END_BUILTINS;
6759 }
6760 else if (INTEGRAL_TYPE_P (parmtype))
6761 {
6762 if (! INTEGRAL_TYPE_P (argtype))
6763 return END_BUILTINS;
6764 }
6765 else
6766 return END_BUILTINS;
6767 }
6768
6769 /* Variable-length argument list. */
6770 return DECL_FUNCTION_CODE (fndecl);
6771 }
6772
6773 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6774 evaluate to a constant. */
6775
6776 static tree
6777 fold_builtin_constant_p (tree arg)
6778 {
6779 /* We return 1 for a numeric type that's known to be a constant
6780 value at compile-time or for an aggregate type that's a
6781 literal constant. */
6782 STRIP_NOPS (arg);
6783
6784 /* If we know this is a constant, emit the constant of one. */
6785 if (CONSTANT_CLASS_P (arg)
6786 || (TREE_CODE (arg) == CONSTRUCTOR
6787 && TREE_CONSTANT (arg)))
6788 return integer_one_node;
6789 if (TREE_CODE (arg) == ADDR_EXPR)
6790 {
6791 tree op = TREE_OPERAND (arg, 0);
6792 if (TREE_CODE (op) == STRING_CST
6793 || (TREE_CODE (op) == ARRAY_REF
6794 && integer_zerop (TREE_OPERAND (op, 1))
6795 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6796 return integer_one_node;
6797 }
6798
6799 /* If this expression has side effects, show we don't know it to be a
6800 constant. Likewise if it's a pointer or aggregate type since in
6801 those case we only want literals, since those are only optimized
6802 when generating RTL, not later.
6803 And finally, if we are compiling an initializer, not code, we
6804 need to return a definite result now; there's not going to be any
6805 more optimization done. */
6806 if (TREE_SIDE_EFFECTS (arg)
6807 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6808 || POINTER_TYPE_P (TREE_TYPE (arg))
6809 || cfun == 0
6810 || folding_initializer
6811 || force_folding_builtin_constant_p)
6812 return integer_zero_node;
6813
6814 return NULL_TREE;
6815 }
6816
6817 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6818 return it as a truthvalue. */
6819
6820 static tree
6821 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6822 {
6823 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6824
6825 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6826 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6827 ret_type = TREE_TYPE (TREE_TYPE (fn));
6828 pred_type = TREE_VALUE (arg_types);
6829 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6830
6831 pred = fold_convert_loc (loc, pred_type, pred);
6832 expected = fold_convert_loc (loc, expected_type, expected);
6833 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6834
6835 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6836 build_int_cst (ret_type, 0));
6837 }
6838
6839 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6840 NULL_TREE if no simplification is possible. */
6841
6842 static tree
6843 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6844 {
6845 tree inner, fndecl, inner_arg0;
6846 enum tree_code code;
6847
6848 /* Distribute the expected value over short-circuiting operators.
6849 See through the cast from truthvalue_type_node to long. */
6850 inner_arg0 = arg0;
6851 while (TREE_CODE (inner_arg0) == NOP_EXPR
6852 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6853 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6854 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6855
6856 /* If this is a builtin_expect within a builtin_expect keep the
6857 inner one. See through a comparison against a constant. It
6858 might have been added to create a thruthvalue. */
6859 inner = inner_arg0;
6860
6861 if (COMPARISON_CLASS_P (inner)
6862 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6863 inner = TREE_OPERAND (inner, 0);
6864
6865 if (TREE_CODE (inner) == CALL_EXPR
6866 && (fndecl = get_callee_fndecl (inner))
6867 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6868 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6869 return arg0;
6870
6871 inner = inner_arg0;
6872 code = TREE_CODE (inner);
6873 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6874 {
6875 tree op0 = TREE_OPERAND (inner, 0);
6876 tree op1 = TREE_OPERAND (inner, 1);
6877
6878 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6879 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6880 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6881
6882 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6883 }
6884
6885 /* If the argument isn't invariant then there's nothing else we can do. */
6886 if (!TREE_CONSTANT (inner_arg0))
6887 return NULL_TREE;
6888
6889 /* If we expect that a comparison against the argument will fold to
6890 a constant return the constant. In practice, this means a true
6891 constant or the address of a non-weak symbol. */
6892 inner = inner_arg0;
6893 STRIP_NOPS (inner);
6894 if (TREE_CODE (inner) == ADDR_EXPR)
6895 {
6896 do
6897 {
6898 inner = TREE_OPERAND (inner, 0);
6899 }
6900 while (TREE_CODE (inner) == COMPONENT_REF
6901 || TREE_CODE (inner) == ARRAY_REF);
6902 if ((TREE_CODE (inner) == VAR_DECL
6903 || TREE_CODE (inner) == FUNCTION_DECL)
6904 && DECL_WEAK (inner))
6905 return NULL_TREE;
6906 }
6907
6908 /* Otherwise, ARG0 already has the proper type for the return value. */
6909 return arg0;
6910 }
6911
6912 /* Fold a call to __builtin_classify_type with argument ARG. */
6913
6914 static tree
6915 fold_builtin_classify_type (tree arg)
6916 {
6917 if (arg == 0)
6918 return build_int_cst (integer_type_node, no_type_class);
6919
6920 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
6921 }
6922
6923 /* Fold a call to __builtin_strlen with argument ARG. */
6924
6925 static tree
6926 fold_builtin_strlen (location_t loc, tree type, tree arg)
6927 {
6928 if (!validate_arg (arg, POINTER_TYPE))
6929 return NULL_TREE;
6930 else
6931 {
6932 tree len = c_strlen (arg, 0);
6933
6934 if (len)
6935 return fold_convert_loc (loc, type, len);
6936
6937 return NULL_TREE;
6938 }
6939 }
6940
6941 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6942
6943 static tree
6944 fold_builtin_inf (location_t loc, tree type, int warn)
6945 {
6946 REAL_VALUE_TYPE real;
6947
6948 /* __builtin_inff is intended to be usable to define INFINITY on all
6949 targets. If an infinity is not available, INFINITY expands "to a
6950 positive constant of type float that overflows at translation
6951 time", footnote "In this case, using INFINITY will violate the
6952 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6953 Thus we pedwarn to ensure this constraint violation is
6954 diagnosed. */
6955 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6956 pedwarn (loc, 0, "target format does not support infinity");
6957
6958 real_inf (&real);
6959 return build_real (type, real);
6960 }
6961
6962 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6963
6964 static tree
6965 fold_builtin_nan (tree arg, tree type, int quiet)
6966 {
6967 REAL_VALUE_TYPE real;
6968 const char *str;
6969
6970 if (!validate_arg (arg, POINTER_TYPE))
6971 return NULL_TREE;
6972 str = c_getstr (arg);
6973 if (!str)
6974 return NULL_TREE;
6975
6976 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6977 return NULL_TREE;
6978
6979 return build_real (type, real);
6980 }
6981
6982 /* Return true if the floating point expression T has an integer value.
6983 We also allow +Inf, -Inf and NaN to be considered integer values. */
6984
6985 static bool
6986 integer_valued_real_p (tree t)
6987 {
6988 switch (TREE_CODE (t))
6989 {
6990 case FLOAT_EXPR:
6991 return true;
6992
6993 case ABS_EXPR:
6994 case SAVE_EXPR:
6995 return integer_valued_real_p (TREE_OPERAND (t, 0));
6996
6997 case COMPOUND_EXPR:
6998 case MODIFY_EXPR:
6999 case BIND_EXPR:
7000 return integer_valued_real_p (TREE_OPERAND (t, 1));
7001
7002 case PLUS_EXPR:
7003 case MINUS_EXPR:
7004 case MULT_EXPR:
7005 case MIN_EXPR:
7006 case MAX_EXPR:
7007 return integer_valued_real_p (TREE_OPERAND (t, 0))
7008 && integer_valued_real_p (TREE_OPERAND (t, 1));
7009
7010 case COND_EXPR:
7011 return integer_valued_real_p (TREE_OPERAND (t, 1))
7012 && integer_valued_real_p (TREE_OPERAND (t, 2));
7013
7014 case REAL_CST:
7015 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7016
7017 case NOP_EXPR:
7018 {
7019 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7020 if (TREE_CODE (type) == INTEGER_TYPE)
7021 return true;
7022 if (TREE_CODE (type) == REAL_TYPE)
7023 return integer_valued_real_p (TREE_OPERAND (t, 0));
7024 break;
7025 }
7026
7027 case CALL_EXPR:
7028 switch (builtin_mathfn_code (t))
7029 {
7030 CASE_FLT_FN (BUILT_IN_CEIL):
7031 CASE_FLT_FN (BUILT_IN_FLOOR):
7032 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7033 CASE_FLT_FN (BUILT_IN_RINT):
7034 CASE_FLT_FN (BUILT_IN_ROUND):
7035 CASE_FLT_FN (BUILT_IN_TRUNC):
7036 return true;
7037
7038 CASE_FLT_FN (BUILT_IN_FMIN):
7039 CASE_FLT_FN (BUILT_IN_FMAX):
7040 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7041 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7042
7043 default:
7044 break;
7045 }
7046 break;
7047
7048 default:
7049 break;
7050 }
7051 return false;
7052 }
7053
7054 /* FNDECL is assumed to be a builtin where truncation can be propagated
7055 across (for instance floor((double)f) == (double)floorf (f).
7056 Do the transformation for a call with argument ARG. */
7057
7058 static tree
7059 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7060 {
7061 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7062
7063 if (!validate_arg (arg, REAL_TYPE))
7064 return NULL_TREE;
7065
7066 /* Integer rounding functions are idempotent. */
7067 if (fcode == builtin_mathfn_code (arg))
7068 return arg;
7069
7070 /* If argument is already integer valued, and we don't need to worry
7071 about setting errno, there's no need to perform rounding. */
7072 if (! flag_errno_math && integer_valued_real_p (arg))
7073 return arg;
7074
7075 if (optimize)
7076 {
7077 tree arg0 = strip_float_extensions (arg);
7078 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7079 tree newtype = TREE_TYPE (arg0);
7080 tree decl;
7081
7082 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7083 && (decl = mathfn_built_in (newtype, fcode)))
7084 return fold_convert_loc (loc, ftype,
7085 build_call_expr_loc (loc, decl, 1,
7086 fold_convert_loc (loc,
7087 newtype,
7088 arg0)));
7089 }
7090 return NULL_TREE;
7091 }
7092
7093 /* FNDECL is assumed to be builtin which can narrow the FP type of
7094 the argument, for instance lround((double)f) -> lroundf (f).
7095 Do the transformation for a call with argument ARG. */
7096
7097 static tree
7098 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7099 {
7100 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7101
7102 if (!validate_arg (arg, REAL_TYPE))
7103 return NULL_TREE;
7104
7105 /* If argument is already integer valued, and we don't need to worry
7106 about setting errno, there's no need to perform rounding. */
7107 if (! flag_errno_math && integer_valued_real_p (arg))
7108 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7109 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7110
7111 if (optimize)
7112 {
7113 tree ftype = TREE_TYPE (arg);
7114 tree arg0 = strip_float_extensions (arg);
7115 tree newtype = TREE_TYPE (arg0);
7116 tree decl;
7117
7118 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7119 && (decl = mathfn_built_in (newtype, fcode)))
7120 return build_call_expr_loc (loc, decl, 1,
7121 fold_convert_loc (loc, newtype, arg0));
7122 }
7123
7124 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7125 sizeof (int) == sizeof (long). */
7126 if (TYPE_PRECISION (integer_type_node)
7127 == TYPE_PRECISION (long_integer_type_node))
7128 {
7129 tree newfn = NULL_TREE;
7130 switch (fcode)
7131 {
7132 CASE_FLT_FN (BUILT_IN_ICEIL):
7133 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7134 break;
7135
7136 CASE_FLT_FN (BUILT_IN_IFLOOR):
7137 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7138 break;
7139
7140 CASE_FLT_FN (BUILT_IN_IROUND):
7141 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7142 break;
7143
7144 CASE_FLT_FN (BUILT_IN_IRINT):
7145 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7146 break;
7147
7148 default:
7149 break;
7150 }
7151
7152 if (newfn)
7153 {
7154 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7155 return fold_convert_loc (loc,
7156 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7157 }
7158 }
7159
7160 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7161 sizeof (long long) == sizeof (long). */
7162 if (TYPE_PRECISION (long_long_integer_type_node)
7163 == TYPE_PRECISION (long_integer_type_node))
7164 {
7165 tree newfn = NULL_TREE;
7166 switch (fcode)
7167 {
7168 CASE_FLT_FN (BUILT_IN_LLCEIL):
7169 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7170 break;
7171
7172 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7173 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7174 break;
7175
7176 CASE_FLT_FN (BUILT_IN_LLROUND):
7177 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7178 break;
7179
7180 CASE_FLT_FN (BUILT_IN_LLRINT):
7181 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7182 break;
7183
7184 default:
7185 break;
7186 }
7187
7188 if (newfn)
7189 {
7190 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7191 return fold_convert_loc (loc,
7192 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7193 }
7194 }
7195
7196 return NULL_TREE;
7197 }
7198
7199 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7200 return type. Return NULL_TREE if no simplification can be made. */
7201
7202 static tree
7203 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7204 {
7205 tree res;
7206
7207 if (!validate_arg (arg, COMPLEX_TYPE)
7208 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7209 return NULL_TREE;
7210
7211 /* Calculate the result when the argument is a constant. */
7212 if (TREE_CODE (arg) == COMPLEX_CST
7213 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7214 type, mpfr_hypot)))
7215 return res;
7216
7217 if (TREE_CODE (arg) == COMPLEX_EXPR)
7218 {
7219 tree real = TREE_OPERAND (arg, 0);
7220 tree imag = TREE_OPERAND (arg, 1);
7221
7222 /* If either part is zero, cabs is fabs of the other. */
7223 if (real_zerop (real))
7224 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7225 if (real_zerop (imag))
7226 return fold_build1_loc (loc, ABS_EXPR, type, real);
7227
7228 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7229 if (flag_unsafe_math_optimizations
7230 && operand_equal_p (real, imag, OEP_PURE_SAME))
7231 {
7232 const REAL_VALUE_TYPE sqrt2_trunc
7233 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7234 STRIP_NOPS (real);
7235 return fold_build2_loc (loc, MULT_EXPR, type,
7236 fold_build1_loc (loc, ABS_EXPR, type, real),
7237 build_real (type, sqrt2_trunc));
7238 }
7239 }
7240
7241 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7242 if (TREE_CODE (arg) == NEGATE_EXPR
7243 || TREE_CODE (arg) == CONJ_EXPR)
7244 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7245
7246 /* Don't do this when optimizing for size. */
7247 if (flag_unsafe_math_optimizations
7248 && optimize && optimize_function_for_speed_p (cfun))
7249 {
7250 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7251
7252 if (sqrtfn != NULL_TREE)
7253 {
7254 tree rpart, ipart, result;
7255
7256 arg = builtin_save_expr (arg);
7257
7258 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7259 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7260
7261 rpart = builtin_save_expr (rpart);
7262 ipart = builtin_save_expr (ipart);
7263
7264 result = fold_build2_loc (loc, PLUS_EXPR, type,
7265 fold_build2_loc (loc, MULT_EXPR, type,
7266 rpart, rpart),
7267 fold_build2_loc (loc, MULT_EXPR, type,
7268 ipart, ipart));
7269
7270 return build_call_expr_loc (loc, sqrtfn, 1, result);
7271 }
7272 }
7273
7274 return NULL_TREE;
7275 }
7276
7277 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7278 complex tree type of the result. If NEG is true, the imaginary
7279 zero is negative. */
7280
7281 static tree
7282 build_complex_cproj (tree type, bool neg)
7283 {
7284 REAL_VALUE_TYPE rinf, rzero = dconst0;
7285
7286 real_inf (&rinf);
7287 rzero.sign = neg;
7288 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7289 build_real (TREE_TYPE (type), rzero));
7290 }
7291
7292 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7293 return type. Return NULL_TREE if no simplification can be made. */
7294
7295 static tree
7296 fold_builtin_cproj (location_t loc, tree arg, tree type)
7297 {
7298 if (!validate_arg (arg, COMPLEX_TYPE)
7299 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7300 return NULL_TREE;
7301
7302 /* If there are no infinities, return arg. */
7303 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7304 return non_lvalue_loc (loc, arg);
7305
7306 /* Calculate the result when the argument is a constant. */
7307 if (TREE_CODE (arg) == COMPLEX_CST)
7308 {
7309 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7310 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7311
7312 if (real_isinf (real) || real_isinf (imag))
7313 return build_complex_cproj (type, imag->sign);
7314 else
7315 return arg;
7316 }
7317 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7318 {
7319 tree real = TREE_OPERAND (arg, 0);
7320 tree imag = TREE_OPERAND (arg, 1);
7321
7322 STRIP_NOPS (real);
7323 STRIP_NOPS (imag);
7324
7325 /* If the real part is inf and the imag part is known to be
7326 nonnegative, return (inf + 0i). Remember side-effects are
7327 possible in the imag part. */
7328 if (TREE_CODE (real) == REAL_CST
7329 && real_isinf (TREE_REAL_CST_PTR (real))
7330 && tree_expr_nonnegative_p (imag))
7331 return omit_one_operand_loc (loc, type,
7332 build_complex_cproj (type, false),
7333 arg);
7334
7335 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7336 Remember side-effects are possible in the real part. */
7337 if (TREE_CODE (imag) == REAL_CST
7338 && real_isinf (TREE_REAL_CST_PTR (imag)))
7339 return
7340 omit_one_operand_loc (loc, type,
7341 build_complex_cproj (type, TREE_REAL_CST_PTR
7342 (imag)->sign), arg);
7343 }
7344
7345 return NULL_TREE;
7346 }
7347
7348 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7349 Return NULL_TREE if no simplification can be made. */
7350
7351 static tree
7352 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7353 {
7354
7355 enum built_in_function fcode;
7356 tree res;
7357
7358 if (!validate_arg (arg, REAL_TYPE))
7359 return NULL_TREE;
7360
7361 /* Calculate the result when the argument is a constant. */
7362 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7363 return res;
7364
7365 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7366 fcode = builtin_mathfn_code (arg);
7367 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7368 {
7369 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7370 arg = fold_build2_loc (loc, MULT_EXPR, type,
7371 CALL_EXPR_ARG (arg, 0),
7372 build_real (type, dconsthalf));
7373 return build_call_expr_loc (loc, expfn, 1, arg);
7374 }
7375
7376 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7377 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7378 {
7379 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7380
7381 if (powfn)
7382 {
7383 tree arg0 = CALL_EXPR_ARG (arg, 0);
7384 tree tree_root;
7385 /* The inner root was either sqrt or cbrt. */
7386 /* This was a conditional expression but it triggered a bug
7387 in Sun C 5.5. */
7388 REAL_VALUE_TYPE dconstroot;
7389 if (BUILTIN_SQRT_P (fcode))
7390 dconstroot = dconsthalf;
7391 else
7392 dconstroot = dconst_third ();
7393
7394 /* Adjust for the outer root. */
7395 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7396 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7397 tree_root = build_real (type, dconstroot);
7398 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7399 }
7400 }
7401
7402 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7403 if (flag_unsafe_math_optimizations
7404 && (fcode == BUILT_IN_POW
7405 || fcode == BUILT_IN_POWF
7406 || fcode == BUILT_IN_POWL))
7407 {
7408 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7409 tree arg0 = CALL_EXPR_ARG (arg, 0);
7410 tree arg1 = CALL_EXPR_ARG (arg, 1);
7411 tree narg1;
7412 if (!tree_expr_nonnegative_p (arg0))
7413 arg0 = build1 (ABS_EXPR, type, arg0);
7414 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7415 build_real (type, dconsthalf));
7416 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7417 }
7418
7419 return NULL_TREE;
7420 }
7421
7422 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7423 Return NULL_TREE if no simplification can be made. */
7424
7425 static tree
7426 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7427 {
7428 const enum built_in_function fcode = builtin_mathfn_code (arg);
7429 tree res;
7430
7431 if (!validate_arg (arg, REAL_TYPE))
7432 return NULL_TREE;
7433
7434 /* Calculate the result when the argument is a constant. */
7435 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7436 return res;
7437
7438 if (flag_unsafe_math_optimizations)
7439 {
7440 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7441 if (BUILTIN_EXPONENT_P (fcode))
7442 {
7443 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7444 const REAL_VALUE_TYPE third_trunc =
7445 real_value_truncate (TYPE_MODE (type), dconst_third ());
7446 arg = fold_build2_loc (loc, MULT_EXPR, type,
7447 CALL_EXPR_ARG (arg, 0),
7448 build_real (type, third_trunc));
7449 return build_call_expr_loc (loc, expfn, 1, arg);
7450 }
7451
7452 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7453 if (BUILTIN_SQRT_P (fcode))
7454 {
7455 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7456
7457 if (powfn)
7458 {
7459 tree arg0 = CALL_EXPR_ARG (arg, 0);
7460 tree tree_root;
7461 REAL_VALUE_TYPE dconstroot = dconst_third ();
7462
7463 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7464 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7465 tree_root = build_real (type, dconstroot);
7466 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7467 }
7468 }
7469
7470 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7471 if (BUILTIN_CBRT_P (fcode))
7472 {
7473 tree arg0 = CALL_EXPR_ARG (arg, 0);
7474 if (tree_expr_nonnegative_p (arg0))
7475 {
7476 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7477
7478 if (powfn)
7479 {
7480 tree tree_root;
7481 REAL_VALUE_TYPE dconstroot;
7482
7483 real_arithmetic (&dconstroot, MULT_EXPR,
7484 dconst_third_ptr (), dconst_third_ptr ());
7485 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7486 tree_root = build_real (type, dconstroot);
7487 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7488 }
7489 }
7490 }
7491
7492 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7493 if (fcode == BUILT_IN_POW
7494 || fcode == BUILT_IN_POWF
7495 || fcode == BUILT_IN_POWL)
7496 {
7497 tree arg00 = CALL_EXPR_ARG (arg, 0);
7498 tree arg01 = CALL_EXPR_ARG (arg, 1);
7499 if (tree_expr_nonnegative_p (arg00))
7500 {
7501 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7502 const REAL_VALUE_TYPE dconstroot
7503 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7504 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7505 build_real (type, dconstroot));
7506 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7507 }
7508 }
7509 }
7510 return NULL_TREE;
7511 }
7512
7513 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7514 TYPE is the type of the return value. Return NULL_TREE if no
7515 simplification can be made. */
7516
7517 static tree
7518 fold_builtin_cos (location_t loc,
7519 tree arg, tree type, tree fndecl)
7520 {
7521 tree res, narg;
7522
7523 if (!validate_arg (arg, REAL_TYPE))
7524 return NULL_TREE;
7525
7526 /* Calculate the result when the argument is a constant. */
7527 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7528 return res;
7529
7530 /* Optimize cos(-x) into cos (x). */
7531 if ((narg = fold_strip_sign_ops (arg)))
7532 return build_call_expr_loc (loc, fndecl, 1, narg);
7533
7534 return NULL_TREE;
7535 }
7536
7537 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7538 Return NULL_TREE if no simplification can be made. */
7539
7540 static tree
7541 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7542 {
7543 if (validate_arg (arg, REAL_TYPE))
7544 {
7545 tree res, narg;
7546
7547 /* Calculate the result when the argument is a constant. */
7548 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7549 return res;
7550
7551 /* Optimize cosh(-x) into cosh (x). */
7552 if ((narg = fold_strip_sign_ops (arg)))
7553 return build_call_expr_loc (loc, fndecl, 1, narg);
7554 }
7555
7556 return NULL_TREE;
7557 }
7558
7559 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7560 argument ARG. TYPE is the type of the return value. Return
7561 NULL_TREE if no simplification can be made. */
7562
7563 static tree
7564 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7565 bool hyper)
7566 {
7567 if (validate_arg (arg, COMPLEX_TYPE)
7568 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7569 {
7570 tree tmp;
7571
7572 /* Calculate the result when the argument is a constant. */
7573 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7574 return tmp;
7575
7576 /* Optimize fn(-x) into fn(x). */
7577 if ((tmp = fold_strip_sign_ops (arg)))
7578 return build_call_expr_loc (loc, fndecl, 1, tmp);
7579 }
7580
7581 return NULL_TREE;
7582 }
7583
7584 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7585 Return NULL_TREE if no simplification can be made. */
7586
7587 static tree
7588 fold_builtin_tan (tree arg, tree type)
7589 {
7590 enum built_in_function fcode;
7591 tree res;
7592
7593 if (!validate_arg (arg, REAL_TYPE))
7594 return NULL_TREE;
7595
7596 /* Calculate the result when the argument is a constant. */
7597 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7598 return res;
7599
7600 /* Optimize tan(atan(x)) = x. */
7601 fcode = builtin_mathfn_code (arg);
7602 if (flag_unsafe_math_optimizations
7603 && (fcode == BUILT_IN_ATAN
7604 || fcode == BUILT_IN_ATANF
7605 || fcode == BUILT_IN_ATANL))
7606 return CALL_EXPR_ARG (arg, 0);
7607
7608 return NULL_TREE;
7609 }
7610
7611 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7612 NULL_TREE if no simplification can be made. */
7613
7614 static tree
7615 fold_builtin_sincos (location_t loc,
7616 tree arg0, tree arg1, tree arg2)
7617 {
7618 tree type;
7619 tree res, fn, call;
7620
7621 if (!validate_arg (arg0, REAL_TYPE)
7622 || !validate_arg (arg1, POINTER_TYPE)
7623 || !validate_arg (arg2, POINTER_TYPE))
7624 return NULL_TREE;
7625
7626 type = TREE_TYPE (arg0);
7627
7628 /* Calculate the result when the argument is a constant. */
7629 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7630 return res;
7631
7632 /* Canonicalize sincos to cexpi. */
7633 if (!targetm.libc_has_function (function_c99_math_complex))
7634 return NULL_TREE;
7635 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7636 if (!fn)
7637 return NULL_TREE;
7638
7639 call = build_call_expr_loc (loc, fn, 1, arg0);
7640 call = builtin_save_expr (call);
7641
7642 return build2 (COMPOUND_EXPR, void_type_node,
7643 build2 (MODIFY_EXPR, void_type_node,
7644 build_fold_indirect_ref_loc (loc, arg1),
7645 build1 (IMAGPART_EXPR, type, call)),
7646 build2 (MODIFY_EXPR, void_type_node,
7647 build_fold_indirect_ref_loc (loc, arg2),
7648 build1 (REALPART_EXPR, type, call)));
7649 }
7650
7651 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7652 NULL_TREE if no simplification can be made. */
7653
7654 static tree
7655 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7656 {
7657 tree rtype;
7658 tree realp, imagp, ifn;
7659 tree res;
7660
7661 if (!validate_arg (arg0, COMPLEX_TYPE)
7662 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7663 return NULL_TREE;
7664
7665 /* Calculate the result when the argument is a constant. */
7666 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7667 return res;
7668
7669 rtype = TREE_TYPE (TREE_TYPE (arg0));
7670
7671 /* In case we can figure out the real part of arg0 and it is constant zero
7672 fold to cexpi. */
7673 if (!targetm.libc_has_function (function_c99_math_complex))
7674 return NULL_TREE;
7675 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7676 if (!ifn)
7677 return NULL_TREE;
7678
7679 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7680 && real_zerop (realp))
7681 {
7682 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7683 return build_call_expr_loc (loc, ifn, 1, narg);
7684 }
7685
7686 /* In case we can easily decompose real and imaginary parts split cexp
7687 to exp (r) * cexpi (i). */
7688 if (flag_unsafe_math_optimizations
7689 && realp)
7690 {
7691 tree rfn, rcall, icall;
7692
7693 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7694 if (!rfn)
7695 return NULL_TREE;
7696
7697 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7698 if (!imagp)
7699 return NULL_TREE;
7700
7701 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7702 icall = builtin_save_expr (icall);
7703 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7704 rcall = builtin_save_expr (rcall);
7705 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7706 fold_build2_loc (loc, MULT_EXPR, rtype,
7707 rcall,
7708 fold_build1_loc (loc, REALPART_EXPR,
7709 rtype, icall)),
7710 fold_build2_loc (loc, MULT_EXPR, rtype,
7711 rcall,
7712 fold_build1_loc (loc, IMAGPART_EXPR,
7713 rtype, icall)));
7714 }
7715
7716 return NULL_TREE;
7717 }
7718
7719 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7720 Return NULL_TREE if no simplification can be made. */
7721
7722 static tree
7723 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7724 {
7725 if (!validate_arg (arg, REAL_TYPE))
7726 return NULL_TREE;
7727
7728 /* Optimize trunc of constant value. */
7729 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7730 {
7731 REAL_VALUE_TYPE r, x;
7732 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7733
7734 x = TREE_REAL_CST (arg);
7735 real_trunc (&r, TYPE_MODE (type), &x);
7736 return build_real (type, r);
7737 }
7738
7739 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7740 }
7741
7742 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7743 Return NULL_TREE if no simplification can be made. */
7744
7745 static tree
7746 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7747 {
7748 if (!validate_arg (arg, REAL_TYPE))
7749 return NULL_TREE;
7750
7751 /* Optimize floor of constant value. */
7752 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7753 {
7754 REAL_VALUE_TYPE x;
7755
7756 x = TREE_REAL_CST (arg);
7757 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7758 {
7759 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7760 REAL_VALUE_TYPE r;
7761
7762 real_floor (&r, TYPE_MODE (type), &x);
7763 return build_real (type, r);
7764 }
7765 }
7766
7767 /* Fold floor (x) where x is nonnegative to trunc (x). */
7768 if (tree_expr_nonnegative_p (arg))
7769 {
7770 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7771 if (truncfn)
7772 return build_call_expr_loc (loc, truncfn, 1, arg);
7773 }
7774
7775 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7776 }
7777
7778 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7779 Return NULL_TREE if no simplification can be made. */
7780
7781 static tree
7782 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7783 {
7784 if (!validate_arg (arg, REAL_TYPE))
7785 return NULL_TREE;
7786
7787 /* Optimize ceil of constant value. */
7788 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7789 {
7790 REAL_VALUE_TYPE x;
7791
7792 x = TREE_REAL_CST (arg);
7793 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7794 {
7795 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7796 REAL_VALUE_TYPE r;
7797
7798 real_ceil (&r, TYPE_MODE (type), &x);
7799 return build_real (type, r);
7800 }
7801 }
7802
7803 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7804 }
7805
7806 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7807 Return NULL_TREE if no simplification can be made. */
7808
7809 static tree
7810 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7811 {
7812 if (!validate_arg (arg, REAL_TYPE))
7813 return NULL_TREE;
7814
7815 /* Optimize round of constant value. */
7816 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7817 {
7818 REAL_VALUE_TYPE x;
7819
7820 x = TREE_REAL_CST (arg);
7821 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7822 {
7823 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7824 REAL_VALUE_TYPE r;
7825
7826 real_round (&r, TYPE_MODE (type), &x);
7827 return build_real (type, r);
7828 }
7829 }
7830
7831 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7832 }
7833
7834 /* Fold function call to builtin lround, lroundf or lroundl (or the
7835 corresponding long long versions) and other rounding functions. ARG
7836 is the argument to the call. Return NULL_TREE if no simplification
7837 can be made. */
7838
7839 static tree
7840 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7841 {
7842 if (!validate_arg (arg, REAL_TYPE))
7843 return NULL_TREE;
7844
7845 /* Optimize lround of constant value. */
7846 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7847 {
7848 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7849
7850 if (real_isfinite (&x))
7851 {
7852 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7853 tree ftype = TREE_TYPE (arg);
7854 double_int val;
7855 REAL_VALUE_TYPE r;
7856
7857 switch (DECL_FUNCTION_CODE (fndecl))
7858 {
7859 CASE_FLT_FN (BUILT_IN_IFLOOR):
7860 CASE_FLT_FN (BUILT_IN_LFLOOR):
7861 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7862 real_floor (&r, TYPE_MODE (ftype), &x);
7863 break;
7864
7865 CASE_FLT_FN (BUILT_IN_ICEIL):
7866 CASE_FLT_FN (BUILT_IN_LCEIL):
7867 CASE_FLT_FN (BUILT_IN_LLCEIL):
7868 real_ceil (&r, TYPE_MODE (ftype), &x);
7869 break;
7870
7871 CASE_FLT_FN (BUILT_IN_IROUND):
7872 CASE_FLT_FN (BUILT_IN_LROUND):
7873 CASE_FLT_FN (BUILT_IN_LLROUND):
7874 real_round (&r, TYPE_MODE (ftype), &x);
7875 break;
7876
7877 default:
7878 gcc_unreachable ();
7879 }
7880
7881 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7882 if (double_int_fits_to_tree_p (itype, val))
7883 return double_int_to_tree (itype, val);
7884 }
7885 }
7886
7887 switch (DECL_FUNCTION_CODE (fndecl))
7888 {
7889 CASE_FLT_FN (BUILT_IN_LFLOOR):
7890 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7891 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7892 if (tree_expr_nonnegative_p (arg))
7893 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7894 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7895 break;
7896 default:;
7897 }
7898
7899 return fold_fixed_mathfn (loc, fndecl, arg);
7900 }
7901
7902 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7903 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7904 the argument to the call. Return NULL_TREE if no simplification can
7905 be made. */
7906
7907 static tree
7908 fold_builtin_bitop (tree fndecl, tree arg)
7909 {
7910 if (!validate_arg (arg, INTEGER_TYPE))
7911 return NULL_TREE;
7912
7913 /* Optimize for constant argument. */
7914 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7915 {
7916 HOST_WIDE_INT hi, width, result;
7917 unsigned HOST_WIDE_INT lo;
7918 tree type;
7919
7920 type = TREE_TYPE (arg);
7921 width = TYPE_PRECISION (type);
7922 lo = TREE_INT_CST_LOW (arg);
7923
7924 /* Clear all the bits that are beyond the type's precision. */
7925 if (width > HOST_BITS_PER_WIDE_INT)
7926 {
7927 hi = TREE_INT_CST_HIGH (arg);
7928 if (width < HOST_BITS_PER_DOUBLE_INT)
7929 hi &= ~(HOST_WIDE_INT_M1U << (width - HOST_BITS_PER_WIDE_INT));
7930 }
7931 else
7932 {
7933 hi = 0;
7934 if (width < HOST_BITS_PER_WIDE_INT)
7935 lo &= ~(HOST_WIDE_INT_M1U << width);
7936 }
7937
7938 switch (DECL_FUNCTION_CODE (fndecl))
7939 {
7940 CASE_INT_FN (BUILT_IN_FFS):
7941 if (lo != 0)
7942 result = ffs_hwi (lo);
7943 else if (hi != 0)
7944 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
7945 else
7946 result = 0;
7947 break;
7948
7949 CASE_INT_FN (BUILT_IN_CLZ):
7950 if (hi != 0)
7951 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7952 else if (lo != 0)
7953 result = width - floor_log2 (lo) - 1;
7954 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7955 result = width;
7956 break;
7957
7958 CASE_INT_FN (BUILT_IN_CTZ):
7959 if (lo != 0)
7960 result = ctz_hwi (lo);
7961 else if (hi != 0)
7962 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
7963 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7964 result = width;
7965 break;
7966
7967 CASE_INT_FN (BUILT_IN_CLRSB):
7968 if (width > 2 * HOST_BITS_PER_WIDE_INT)
7969 return NULL_TREE;
7970 if (width > HOST_BITS_PER_WIDE_INT
7971 && (hi & ((unsigned HOST_WIDE_INT) 1
7972 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
7973 {
7974 hi = ~hi & ~(HOST_WIDE_INT_M1U
7975 << (width - HOST_BITS_PER_WIDE_INT - 1));
7976 lo = ~lo;
7977 }
7978 else if (width <= HOST_BITS_PER_WIDE_INT
7979 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
7980 lo = ~lo & ~(HOST_WIDE_INT_M1U << (width - 1));
7981 if (hi != 0)
7982 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
7983 else if (lo != 0)
7984 result = width - floor_log2 (lo) - 2;
7985 else
7986 result = width - 1;
7987 break;
7988
7989 CASE_INT_FN (BUILT_IN_POPCOUNT):
7990 result = 0;
7991 while (lo)
7992 result++, lo &= lo - 1;
7993 while (hi)
7994 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7995 break;
7996
7997 CASE_INT_FN (BUILT_IN_PARITY):
7998 result = 0;
7999 while (lo)
8000 result++, lo &= lo - 1;
8001 while (hi)
8002 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8003 result &= 1;
8004 break;
8005
8006 default:
8007 gcc_unreachable ();
8008 }
8009
8010 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8011 }
8012
8013 return NULL_TREE;
8014 }
8015
8016 /* Fold function call to builtin_bswap and the short, long and long long
8017 variants. Return NULL_TREE if no simplification can be made. */
8018 static tree
8019 fold_builtin_bswap (tree fndecl, tree arg)
8020 {
8021 if (! validate_arg (arg, INTEGER_TYPE))
8022 return NULL_TREE;
8023
8024 /* Optimize constant value. */
8025 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8026 {
8027 HOST_WIDE_INT hi, width, r_hi = 0;
8028 unsigned HOST_WIDE_INT lo, r_lo = 0;
8029 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8030
8031 width = TYPE_PRECISION (type);
8032 lo = TREE_INT_CST_LOW (arg);
8033 hi = TREE_INT_CST_HIGH (arg);
8034
8035 switch (DECL_FUNCTION_CODE (fndecl))
8036 {
8037 case BUILT_IN_BSWAP16:
8038 case BUILT_IN_BSWAP32:
8039 case BUILT_IN_BSWAP64:
8040 {
8041 int s;
8042
8043 for (s = 0; s < width; s += 8)
8044 {
8045 int d = width - s - 8;
8046 unsigned HOST_WIDE_INT byte;
8047
8048 if (s < HOST_BITS_PER_WIDE_INT)
8049 byte = (lo >> s) & 0xff;
8050 else
8051 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8052
8053 if (d < HOST_BITS_PER_WIDE_INT)
8054 r_lo |= byte << d;
8055 else
8056 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8057 }
8058 }
8059
8060 break;
8061
8062 default:
8063 gcc_unreachable ();
8064 }
8065
8066 if (width < HOST_BITS_PER_WIDE_INT)
8067 return build_int_cst (type, r_lo);
8068 else
8069 return build_int_cst_wide (type, r_lo, r_hi);
8070 }
8071
8072 return NULL_TREE;
8073 }
8074
8075 /* A subroutine of fold_builtin to fold the various logarithmic
8076 functions. Return NULL_TREE if no simplification can me made.
8077 FUNC is the corresponding MPFR logarithm function. */
8078
8079 static tree
8080 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8081 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8082 {
8083 if (validate_arg (arg, REAL_TYPE))
8084 {
8085 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8086 tree res;
8087 const enum built_in_function fcode = builtin_mathfn_code (arg);
8088
8089 /* Calculate the result when the argument is a constant. */
8090 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8091 return res;
8092
8093 /* Special case, optimize logN(expN(x)) = x. */
8094 if (flag_unsafe_math_optimizations
8095 && ((func == mpfr_log
8096 && (fcode == BUILT_IN_EXP
8097 || fcode == BUILT_IN_EXPF
8098 || fcode == BUILT_IN_EXPL))
8099 || (func == mpfr_log2
8100 && (fcode == BUILT_IN_EXP2
8101 || fcode == BUILT_IN_EXP2F
8102 || fcode == BUILT_IN_EXP2L))
8103 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8104 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8105
8106 /* Optimize logN(func()) for various exponential functions. We
8107 want to determine the value "x" and the power "exponent" in
8108 order to transform logN(x**exponent) into exponent*logN(x). */
8109 if (flag_unsafe_math_optimizations)
8110 {
8111 tree exponent = 0, x = 0;
8112
8113 switch (fcode)
8114 {
8115 CASE_FLT_FN (BUILT_IN_EXP):
8116 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8117 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8118 dconst_e ()));
8119 exponent = CALL_EXPR_ARG (arg, 0);
8120 break;
8121 CASE_FLT_FN (BUILT_IN_EXP2):
8122 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8123 x = build_real (type, dconst2);
8124 exponent = CALL_EXPR_ARG (arg, 0);
8125 break;
8126 CASE_FLT_FN (BUILT_IN_EXP10):
8127 CASE_FLT_FN (BUILT_IN_POW10):
8128 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8129 {
8130 REAL_VALUE_TYPE dconst10;
8131 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8132 x = build_real (type, dconst10);
8133 }
8134 exponent = CALL_EXPR_ARG (arg, 0);
8135 break;
8136 CASE_FLT_FN (BUILT_IN_SQRT):
8137 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8138 x = CALL_EXPR_ARG (arg, 0);
8139 exponent = build_real (type, dconsthalf);
8140 break;
8141 CASE_FLT_FN (BUILT_IN_CBRT):
8142 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8143 x = CALL_EXPR_ARG (arg, 0);
8144 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8145 dconst_third ()));
8146 break;
8147 CASE_FLT_FN (BUILT_IN_POW):
8148 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8149 x = CALL_EXPR_ARG (arg, 0);
8150 exponent = CALL_EXPR_ARG (arg, 1);
8151 break;
8152 default:
8153 break;
8154 }
8155
8156 /* Now perform the optimization. */
8157 if (x && exponent)
8158 {
8159 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8160 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8161 }
8162 }
8163 }
8164
8165 return NULL_TREE;
8166 }
8167
8168 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8169 NULL_TREE if no simplification can be made. */
8170
8171 static tree
8172 fold_builtin_hypot (location_t loc, tree fndecl,
8173 tree arg0, tree arg1, tree type)
8174 {
8175 tree res, narg0, narg1;
8176
8177 if (!validate_arg (arg0, REAL_TYPE)
8178 || !validate_arg (arg1, REAL_TYPE))
8179 return NULL_TREE;
8180
8181 /* Calculate the result when the argument is a constant. */
8182 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8183 return res;
8184
8185 /* If either argument to hypot has a negate or abs, strip that off.
8186 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8187 narg0 = fold_strip_sign_ops (arg0);
8188 narg1 = fold_strip_sign_ops (arg1);
8189 if (narg0 || narg1)
8190 {
8191 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8192 narg1 ? narg1 : arg1);
8193 }
8194
8195 /* If either argument is zero, hypot is fabs of the other. */
8196 if (real_zerop (arg0))
8197 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8198 else if (real_zerop (arg1))
8199 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8200
8201 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8202 if (flag_unsafe_math_optimizations
8203 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8204 {
8205 const REAL_VALUE_TYPE sqrt2_trunc
8206 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8207 return fold_build2_loc (loc, MULT_EXPR, type,
8208 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8209 build_real (type, sqrt2_trunc));
8210 }
8211
8212 return NULL_TREE;
8213 }
8214
8215
8216 /* Fold a builtin function call to pow, powf, or powl. Return
8217 NULL_TREE if no simplification can be made. */
8218 static tree
8219 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8220 {
8221 tree res;
8222
8223 if (!validate_arg (arg0, REAL_TYPE)
8224 || !validate_arg (arg1, REAL_TYPE))
8225 return NULL_TREE;
8226
8227 /* Calculate the result when the argument is a constant. */
8228 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8229 return res;
8230
8231 /* Optimize pow(1.0,y) = 1.0. */
8232 if (real_onep (arg0))
8233 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8234
8235 if (TREE_CODE (arg1) == REAL_CST
8236 && !TREE_OVERFLOW (arg1))
8237 {
8238 REAL_VALUE_TYPE cint;
8239 REAL_VALUE_TYPE c;
8240 HOST_WIDE_INT n;
8241
8242 c = TREE_REAL_CST (arg1);
8243
8244 /* Optimize pow(x,0.0) = 1.0. */
8245 if (REAL_VALUES_EQUAL (c, dconst0))
8246 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8247 arg0);
8248
8249 /* Optimize pow(x,1.0) = x. */
8250 if (REAL_VALUES_EQUAL (c, dconst1))
8251 return arg0;
8252
8253 /* Optimize pow(x,-1.0) = 1.0/x. */
8254 if (REAL_VALUES_EQUAL (c, dconstm1))
8255 return fold_build2_loc (loc, RDIV_EXPR, type,
8256 build_real (type, dconst1), arg0);
8257
8258 /* Optimize pow(x,0.5) = sqrt(x). */
8259 if (flag_unsafe_math_optimizations
8260 && REAL_VALUES_EQUAL (c, dconsthalf))
8261 {
8262 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8263
8264 if (sqrtfn != NULL_TREE)
8265 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8266 }
8267
8268 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8269 if (flag_unsafe_math_optimizations)
8270 {
8271 const REAL_VALUE_TYPE dconstroot
8272 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8273
8274 if (REAL_VALUES_EQUAL (c, dconstroot))
8275 {
8276 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8277 if (cbrtfn != NULL_TREE)
8278 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8279 }
8280 }
8281
8282 /* Check for an integer exponent. */
8283 n = real_to_integer (&c);
8284 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8285 if (real_identical (&c, &cint))
8286 {
8287 /* Attempt to evaluate pow at compile-time, unless this should
8288 raise an exception. */
8289 if (TREE_CODE (arg0) == REAL_CST
8290 && !TREE_OVERFLOW (arg0)
8291 && (n > 0
8292 || (!flag_trapping_math && !flag_errno_math)
8293 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8294 {
8295 REAL_VALUE_TYPE x;
8296 bool inexact;
8297
8298 x = TREE_REAL_CST (arg0);
8299 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8300 if (flag_unsafe_math_optimizations || !inexact)
8301 return build_real (type, x);
8302 }
8303
8304 /* Strip sign ops from even integer powers. */
8305 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8306 {
8307 tree narg0 = fold_strip_sign_ops (arg0);
8308 if (narg0)
8309 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8310 }
8311 }
8312 }
8313
8314 if (flag_unsafe_math_optimizations)
8315 {
8316 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8317
8318 /* Optimize pow(expN(x),y) = expN(x*y). */
8319 if (BUILTIN_EXPONENT_P (fcode))
8320 {
8321 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8322 tree arg = CALL_EXPR_ARG (arg0, 0);
8323 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8324 return build_call_expr_loc (loc, expfn, 1, arg);
8325 }
8326
8327 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8328 if (BUILTIN_SQRT_P (fcode))
8329 {
8330 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8331 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8332 build_real (type, dconsthalf));
8333 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8334 }
8335
8336 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8337 if (BUILTIN_CBRT_P (fcode))
8338 {
8339 tree arg = CALL_EXPR_ARG (arg0, 0);
8340 if (tree_expr_nonnegative_p (arg))
8341 {
8342 const REAL_VALUE_TYPE dconstroot
8343 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8344 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8345 build_real (type, dconstroot));
8346 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8347 }
8348 }
8349
8350 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8351 if (fcode == BUILT_IN_POW
8352 || fcode == BUILT_IN_POWF
8353 || fcode == BUILT_IN_POWL)
8354 {
8355 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8356 if (tree_expr_nonnegative_p (arg00))
8357 {
8358 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8359 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8360 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8361 }
8362 }
8363 }
8364
8365 return NULL_TREE;
8366 }
8367
8368 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8369 Return NULL_TREE if no simplification can be made. */
8370 static tree
8371 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8372 tree arg0, tree arg1, tree type)
8373 {
8374 if (!validate_arg (arg0, REAL_TYPE)
8375 || !validate_arg (arg1, INTEGER_TYPE))
8376 return NULL_TREE;
8377
8378 /* Optimize pow(1.0,y) = 1.0. */
8379 if (real_onep (arg0))
8380 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8381
8382 if (host_integerp (arg1, 0))
8383 {
8384 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8385
8386 /* Evaluate powi at compile-time. */
8387 if (TREE_CODE (arg0) == REAL_CST
8388 && !TREE_OVERFLOW (arg0))
8389 {
8390 REAL_VALUE_TYPE x;
8391 x = TREE_REAL_CST (arg0);
8392 real_powi (&x, TYPE_MODE (type), &x, c);
8393 return build_real (type, x);
8394 }
8395
8396 /* Optimize pow(x,0) = 1.0. */
8397 if (c == 0)
8398 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8399 arg0);
8400
8401 /* Optimize pow(x,1) = x. */
8402 if (c == 1)
8403 return arg0;
8404
8405 /* Optimize pow(x,-1) = 1.0/x. */
8406 if (c == -1)
8407 return fold_build2_loc (loc, RDIV_EXPR, type,
8408 build_real (type, dconst1), arg0);
8409 }
8410
8411 return NULL_TREE;
8412 }
8413
8414 /* A subroutine of fold_builtin to fold the various exponent
8415 functions. Return NULL_TREE if no simplification can be made.
8416 FUNC is the corresponding MPFR exponent function. */
8417
8418 static tree
8419 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8420 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8421 {
8422 if (validate_arg (arg, REAL_TYPE))
8423 {
8424 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8425 tree res;
8426
8427 /* Calculate the result when the argument is a constant. */
8428 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8429 return res;
8430
8431 /* Optimize expN(logN(x)) = x. */
8432 if (flag_unsafe_math_optimizations)
8433 {
8434 const enum built_in_function fcode = builtin_mathfn_code (arg);
8435
8436 if ((func == mpfr_exp
8437 && (fcode == BUILT_IN_LOG
8438 || fcode == BUILT_IN_LOGF
8439 || fcode == BUILT_IN_LOGL))
8440 || (func == mpfr_exp2
8441 && (fcode == BUILT_IN_LOG2
8442 || fcode == BUILT_IN_LOG2F
8443 || fcode == BUILT_IN_LOG2L))
8444 || (func == mpfr_exp10
8445 && (fcode == BUILT_IN_LOG10
8446 || fcode == BUILT_IN_LOG10F
8447 || fcode == BUILT_IN_LOG10L)))
8448 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8449 }
8450 }
8451
8452 return NULL_TREE;
8453 }
8454
8455 /* Return true if VAR is a VAR_DECL or a component thereof. */
8456
8457 static bool
8458 var_decl_component_p (tree var)
8459 {
8460 tree inner = var;
8461 while (handled_component_p (inner))
8462 inner = TREE_OPERAND (inner, 0);
8463 return SSA_VAR_P (inner);
8464 }
8465
8466 /* Fold function call to builtin memset. Return
8467 NULL_TREE if no simplification can be made. */
8468
8469 static tree
8470 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8471 tree type, bool ignore)
8472 {
8473 tree var, ret, etype;
8474 unsigned HOST_WIDE_INT length, cval;
8475
8476 if (! validate_arg (dest, POINTER_TYPE)
8477 || ! validate_arg (c, INTEGER_TYPE)
8478 || ! validate_arg (len, INTEGER_TYPE))
8479 return NULL_TREE;
8480
8481 if (! host_integerp (len, 1))
8482 return NULL_TREE;
8483
8484 /* If the LEN parameter is zero, return DEST. */
8485 if (integer_zerop (len))
8486 return omit_one_operand_loc (loc, type, dest, c);
8487
8488 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8489 return NULL_TREE;
8490
8491 var = dest;
8492 STRIP_NOPS (var);
8493 if (TREE_CODE (var) != ADDR_EXPR)
8494 return NULL_TREE;
8495
8496 var = TREE_OPERAND (var, 0);
8497 if (TREE_THIS_VOLATILE (var))
8498 return NULL_TREE;
8499
8500 etype = TREE_TYPE (var);
8501 if (TREE_CODE (etype) == ARRAY_TYPE)
8502 etype = TREE_TYPE (etype);
8503
8504 if (!INTEGRAL_TYPE_P (etype)
8505 && !POINTER_TYPE_P (etype))
8506 return NULL_TREE;
8507
8508 if (! var_decl_component_p (var))
8509 return NULL_TREE;
8510
8511 length = tree_low_cst (len, 1);
8512 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8513 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8514 return NULL_TREE;
8515
8516 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8517 return NULL_TREE;
8518
8519 if (integer_zerop (c))
8520 cval = 0;
8521 else
8522 {
8523 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8524 return NULL_TREE;
8525
8526 cval = TREE_INT_CST_LOW (c);
8527 cval &= 0xff;
8528 cval |= cval << 8;
8529 cval |= cval << 16;
8530 cval |= (cval << 31) << 1;
8531 }
8532
8533 ret = build_int_cst_type (etype, cval);
8534 var = build_fold_indirect_ref_loc (loc,
8535 fold_convert_loc (loc,
8536 build_pointer_type (etype),
8537 dest));
8538 ret = build2 (MODIFY_EXPR, etype, var, ret);
8539 if (ignore)
8540 return ret;
8541
8542 return omit_one_operand_loc (loc, type, dest, ret);
8543 }
8544
8545 /* Fold function call to builtin memset. Return
8546 NULL_TREE if no simplification can be made. */
8547
8548 static tree
8549 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8550 {
8551 if (! validate_arg (dest, POINTER_TYPE)
8552 || ! validate_arg (size, INTEGER_TYPE))
8553 return NULL_TREE;
8554
8555 if (!ignore)
8556 return NULL_TREE;
8557
8558 /* New argument list transforming bzero(ptr x, int y) to
8559 memset(ptr x, int 0, size_t y). This is done this way
8560 so that if it isn't expanded inline, we fallback to
8561 calling bzero instead of memset. */
8562
8563 return fold_builtin_memset (loc, dest, integer_zero_node,
8564 fold_convert_loc (loc, size_type_node, size),
8565 void_type_node, ignore);
8566 }
8567
8568 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8569 NULL_TREE if no simplification can be made.
8570 If ENDP is 0, return DEST (like memcpy).
8571 If ENDP is 1, return DEST+LEN (like mempcpy).
8572 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8573 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8574 (memmove). */
8575
8576 static tree
8577 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8578 tree len, tree type, bool ignore, int endp)
8579 {
8580 tree destvar, srcvar, expr;
8581
8582 if (! validate_arg (dest, POINTER_TYPE)
8583 || ! validate_arg (src, POINTER_TYPE)
8584 || ! validate_arg (len, INTEGER_TYPE))
8585 return NULL_TREE;
8586
8587 /* If the LEN parameter is zero, return DEST. */
8588 if (integer_zerop (len))
8589 return omit_one_operand_loc (loc, type, dest, src);
8590
8591 /* If SRC and DEST are the same (and not volatile), return
8592 DEST{,+LEN,+LEN-1}. */
8593 if (operand_equal_p (src, dest, 0))
8594 expr = len;
8595 else
8596 {
8597 tree srctype, desttype;
8598 unsigned int src_align, dest_align;
8599 tree off0;
8600
8601 if (endp == 3)
8602 {
8603 src_align = get_pointer_alignment (src);
8604 dest_align = get_pointer_alignment (dest);
8605
8606 /* Both DEST and SRC must be pointer types.
8607 ??? This is what old code did. Is the testing for pointer types
8608 really mandatory?
8609
8610 If either SRC is readonly or length is 1, we can use memcpy. */
8611 if (!dest_align || !src_align)
8612 return NULL_TREE;
8613 if (readonly_data_expr (src)
8614 || (host_integerp (len, 1)
8615 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8616 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8617 {
8618 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8619 if (!fn)
8620 return NULL_TREE;
8621 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8622 }
8623
8624 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8625 if (TREE_CODE (src) == ADDR_EXPR
8626 && TREE_CODE (dest) == ADDR_EXPR)
8627 {
8628 tree src_base, dest_base, fn;
8629 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8630 HOST_WIDE_INT size = -1;
8631 HOST_WIDE_INT maxsize = -1;
8632
8633 srcvar = TREE_OPERAND (src, 0);
8634 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8635 &size, &maxsize);
8636 destvar = TREE_OPERAND (dest, 0);
8637 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8638 &size, &maxsize);
8639 if (host_integerp (len, 1))
8640 maxsize = tree_low_cst (len, 1);
8641 else
8642 maxsize = -1;
8643 src_offset /= BITS_PER_UNIT;
8644 dest_offset /= BITS_PER_UNIT;
8645 if (SSA_VAR_P (src_base)
8646 && SSA_VAR_P (dest_base))
8647 {
8648 if (operand_equal_p (src_base, dest_base, 0)
8649 && ranges_overlap_p (src_offset, maxsize,
8650 dest_offset, maxsize))
8651 return NULL_TREE;
8652 }
8653 else if (TREE_CODE (src_base) == MEM_REF
8654 && TREE_CODE (dest_base) == MEM_REF)
8655 {
8656 double_int off;
8657 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8658 TREE_OPERAND (dest_base, 0), 0))
8659 return NULL_TREE;
8660 off = mem_ref_offset (src_base) +
8661 double_int::from_shwi (src_offset);
8662 if (!off.fits_shwi ())
8663 return NULL_TREE;
8664 src_offset = off.low;
8665 off = mem_ref_offset (dest_base) +
8666 double_int::from_shwi (dest_offset);
8667 if (!off.fits_shwi ())
8668 return NULL_TREE;
8669 dest_offset = off.low;
8670 if (ranges_overlap_p (src_offset, maxsize,
8671 dest_offset, maxsize))
8672 return NULL_TREE;
8673 }
8674 else
8675 return NULL_TREE;
8676
8677 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8678 if (!fn)
8679 return NULL_TREE;
8680 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8681 }
8682
8683 /* If the destination and source do not alias optimize into
8684 memcpy as well. */
8685 if ((is_gimple_min_invariant (dest)
8686 || TREE_CODE (dest) == SSA_NAME)
8687 && (is_gimple_min_invariant (src)
8688 || TREE_CODE (src) == SSA_NAME))
8689 {
8690 ao_ref destr, srcr;
8691 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8692 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8693 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8694 {
8695 tree fn;
8696 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8697 if (!fn)
8698 return NULL_TREE;
8699 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8700 }
8701 }
8702
8703 return NULL_TREE;
8704 }
8705
8706 if (!host_integerp (len, 0))
8707 return NULL_TREE;
8708 /* FIXME:
8709 This logic lose for arguments like (type *)malloc (sizeof (type)),
8710 since we strip the casts of up to VOID return value from malloc.
8711 Perhaps we ought to inherit type from non-VOID argument here? */
8712 STRIP_NOPS (src);
8713 STRIP_NOPS (dest);
8714 if (!POINTER_TYPE_P (TREE_TYPE (src))
8715 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8716 return NULL_TREE;
8717 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8718 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8719 {
8720 tree tem = TREE_OPERAND (src, 0);
8721 STRIP_NOPS (tem);
8722 if (tem != TREE_OPERAND (src, 0))
8723 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8724 }
8725 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8726 {
8727 tree tem = TREE_OPERAND (dest, 0);
8728 STRIP_NOPS (tem);
8729 if (tem != TREE_OPERAND (dest, 0))
8730 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8731 }
8732 srctype = TREE_TYPE (TREE_TYPE (src));
8733 if (TREE_CODE (srctype) == ARRAY_TYPE
8734 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8735 {
8736 srctype = TREE_TYPE (srctype);
8737 STRIP_NOPS (src);
8738 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8739 }
8740 desttype = TREE_TYPE (TREE_TYPE (dest));
8741 if (TREE_CODE (desttype) == ARRAY_TYPE
8742 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8743 {
8744 desttype = TREE_TYPE (desttype);
8745 STRIP_NOPS (dest);
8746 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8747 }
8748 if (TREE_ADDRESSABLE (srctype)
8749 || TREE_ADDRESSABLE (desttype))
8750 return NULL_TREE;
8751
8752 src_align = get_pointer_alignment (src);
8753 dest_align = get_pointer_alignment (dest);
8754 if (dest_align < TYPE_ALIGN (desttype)
8755 || src_align < TYPE_ALIGN (srctype))
8756 return NULL_TREE;
8757
8758 if (!ignore)
8759 dest = builtin_save_expr (dest);
8760
8761 /* Build accesses at offset zero with a ref-all character type. */
8762 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8763 ptr_mode, true), 0);
8764
8765 destvar = dest;
8766 STRIP_NOPS (destvar);
8767 if (TREE_CODE (destvar) == ADDR_EXPR
8768 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8769 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8770 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8771 else
8772 destvar = NULL_TREE;
8773
8774 srcvar = src;
8775 STRIP_NOPS (srcvar);
8776 if (TREE_CODE (srcvar) == ADDR_EXPR
8777 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8778 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8779 {
8780 if (!destvar
8781 || src_align >= TYPE_ALIGN (desttype))
8782 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8783 srcvar, off0);
8784 else if (!STRICT_ALIGNMENT)
8785 {
8786 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8787 src_align);
8788 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8789 }
8790 else
8791 srcvar = NULL_TREE;
8792 }
8793 else
8794 srcvar = NULL_TREE;
8795
8796 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8797 return NULL_TREE;
8798
8799 if (srcvar == NULL_TREE)
8800 {
8801 STRIP_NOPS (src);
8802 if (src_align >= TYPE_ALIGN (desttype))
8803 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8804 else
8805 {
8806 if (STRICT_ALIGNMENT)
8807 return NULL_TREE;
8808 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8809 src_align);
8810 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8811 }
8812 }
8813 else if (destvar == NULL_TREE)
8814 {
8815 STRIP_NOPS (dest);
8816 if (dest_align >= TYPE_ALIGN (srctype))
8817 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8818 else
8819 {
8820 if (STRICT_ALIGNMENT)
8821 return NULL_TREE;
8822 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8823 dest_align);
8824 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8825 }
8826 }
8827
8828 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8829 }
8830
8831 if (ignore)
8832 return expr;
8833
8834 if (endp == 0 || endp == 3)
8835 return omit_one_operand_loc (loc, type, dest, expr);
8836
8837 if (expr == len)
8838 expr = NULL_TREE;
8839
8840 if (endp == 2)
8841 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8842 ssize_int (1));
8843
8844 dest = fold_build_pointer_plus_loc (loc, dest, len);
8845 dest = fold_convert_loc (loc, type, dest);
8846 if (expr)
8847 dest = omit_one_operand_loc (loc, type, dest, expr);
8848 return dest;
8849 }
8850
8851 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8852 If LEN is not NULL, it represents the length of the string to be
8853 copied. Return NULL_TREE if no simplification can be made. */
8854
8855 tree
8856 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8857 {
8858 tree fn;
8859
8860 if (!validate_arg (dest, POINTER_TYPE)
8861 || !validate_arg (src, POINTER_TYPE))
8862 return NULL_TREE;
8863
8864 /* If SRC and DEST are the same (and not volatile), return DEST. */
8865 if (operand_equal_p (src, dest, 0))
8866 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8867
8868 if (optimize_function_for_size_p (cfun))
8869 return NULL_TREE;
8870
8871 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8872 if (!fn)
8873 return NULL_TREE;
8874
8875 if (!len)
8876 {
8877 len = c_strlen (src, 1);
8878 if (! len || TREE_SIDE_EFFECTS (len))
8879 return NULL_TREE;
8880 }
8881
8882 len = fold_convert_loc (loc, size_type_node, len);
8883 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
8884 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8885 build_call_expr_loc (loc, fn, 3, dest, src, len));
8886 }
8887
8888 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8889 Return NULL_TREE if no simplification can be made. */
8890
8891 static tree
8892 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8893 {
8894 tree fn, len, lenp1, call, type;
8895
8896 if (!validate_arg (dest, POINTER_TYPE)
8897 || !validate_arg (src, POINTER_TYPE))
8898 return NULL_TREE;
8899
8900 len = c_strlen (src, 1);
8901 if (!len
8902 || TREE_CODE (len) != INTEGER_CST)
8903 return NULL_TREE;
8904
8905 if (optimize_function_for_size_p (cfun)
8906 /* If length is zero it's small enough. */
8907 && !integer_zerop (len))
8908 return NULL_TREE;
8909
8910 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8911 if (!fn)
8912 return NULL_TREE;
8913
8914 lenp1 = size_binop_loc (loc, PLUS_EXPR,
8915 fold_convert_loc (loc, size_type_node, len),
8916 build_int_cst (size_type_node, 1));
8917 /* We use dest twice in building our expression. Save it from
8918 multiple expansions. */
8919 dest = builtin_save_expr (dest);
8920 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8921
8922 type = TREE_TYPE (TREE_TYPE (fndecl));
8923 dest = fold_build_pointer_plus_loc (loc, dest, len);
8924 dest = fold_convert_loc (loc, type, dest);
8925 dest = omit_one_operand_loc (loc, type, dest, call);
8926 return dest;
8927 }
8928
8929 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8930 If SLEN is not NULL, it represents the length of the source string.
8931 Return NULL_TREE if no simplification can be made. */
8932
8933 tree
8934 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8935 tree src, tree len, tree slen)
8936 {
8937 tree fn;
8938
8939 if (!validate_arg (dest, POINTER_TYPE)
8940 || !validate_arg (src, POINTER_TYPE)
8941 || !validate_arg (len, INTEGER_TYPE))
8942 return NULL_TREE;
8943
8944 /* If the LEN parameter is zero, return DEST. */
8945 if (integer_zerop (len))
8946 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8947
8948 /* We can't compare slen with len as constants below if len is not a
8949 constant. */
8950 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8951 return NULL_TREE;
8952
8953 if (!slen)
8954 slen = c_strlen (src, 1);
8955
8956 /* Now, we must be passed a constant src ptr parameter. */
8957 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8958 return NULL_TREE;
8959
8960 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8961
8962 /* We do not support simplification of this case, though we do
8963 support it when expanding trees into RTL. */
8964 /* FIXME: generate a call to __builtin_memset. */
8965 if (tree_int_cst_lt (slen, len))
8966 return NULL_TREE;
8967
8968 /* OK transform into builtin memcpy. */
8969 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8970 if (!fn)
8971 return NULL_TREE;
8972
8973 len = fold_convert_loc (loc, size_type_node, len);
8974 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8975 build_call_expr_loc (loc, fn, 3, dest, src, len));
8976 }
8977
8978 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8979 arguments to the call, and TYPE is its return type.
8980 Return NULL_TREE if no simplification can be made. */
8981
8982 static tree
8983 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8984 {
8985 if (!validate_arg (arg1, POINTER_TYPE)
8986 || !validate_arg (arg2, INTEGER_TYPE)
8987 || !validate_arg (len, INTEGER_TYPE))
8988 return NULL_TREE;
8989 else
8990 {
8991 const char *p1;
8992
8993 if (TREE_CODE (arg2) != INTEGER_CST
8994 || !host_integerp (len, 1))
8995 return NULL_TREE;
8996
8997 p1 = c_getstr (arg1);
8998 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8999 {
9000 char c;
9001 const char *r;
9002 tree tem;
9003
9004 if (target_char_cast (arg2, &c))
9005 return NULL_TREE;
9006
9007 r = (const char *) memchr (p1, c, tree_low_cst (len, 1));
9008
9009 if (r == NULL)
9010 return build_int_cst (TREE_TYPE (arg1), 0);
9011
9012 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9013 return fold_convert_loc (loc, type, tem);
9014 }
9015 return NULL_TREE;
9016 }
9017 }
9018
9019 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9020 Return NULL_TREE if no simplification can be made. */
9021
9022 static tree
9023 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9024 {
9025 const char *p1, *p2;
9026
9027 if (!validate_arg (arg1, POINTER_TYPE)
9028 || !validate_arg (arg2, POINTER_TYPE)
9029 || !validate_arg (len, INTEGER_TYPE))
9030 return NULL_TREE;
9031
9032 /* If the LEN parameter is zero, return zero. */
9033 if (integer_zerop (len))
9034 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9035 arg1, arg2);
9036
9037 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9038 if (operand_equal_p (arg1, arg2, 0))
9039 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9040
9041 p1 = c_getstr (arg1);
9042 p2 = c_getstr (arg2);
9043
9044 /* If all arguments are constant, and the value of len is not greater
9045 than the lengths of arg1 and arg2, evaluate at compile-time. */
9046 if (host_integerp (len, 1) && p1 && p2
9047 && compare_tree_int (len, strlen (p1) + 1) <= 0
9048 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9049 {
9050 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9051
9052 if (r > 0)
9053 return integer_one_node;
9054 else if (r < 0)
9055 return integer_minus_one_node;
9056 else
9057 return integer_zero_node;
9058 }
9059
9060 /* If len parameter is one, return an expression corresponding to
9061 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9062 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9063 {
9064 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9065 tree cst_uchar_ptr_node
9066 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9067
9068 tree ind1
9069 = fold_convert_loc (loc, integer_type_node,
9070 build1 (INDIRECT_REF, cst_uchar_node,
9071 fold_convert_loc (loc,
9072 cst_uchar_ptr_node,
9073 arg1)));
9074 tree ind2
9075 = fold_convert_loc (loc, integer_type_node,
9076 build1 (INDIRECT_REF, cst_uchar_node,
9077 fold_convert_loc (loc,
9078 cst_uchar_ptr_node,
9079 arg2)));
9080 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9081 }
9082
9083 return NULL_TREE;
9084 }
9085
9086 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9087 Return NULL_TREE if no simplification can be made. */
9088
9089 static tree
9090 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9091 {
9092 const char *p1, *p2;
9093
9094 if (!validate_arg (arg1, POINTER_TYPE)
9095 || !validate_arg (arg2, POINTER_TYPE))
9096 return NULL_TREE;
9097
9098 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9099 if (operand_equal_p (arg1, arg2, 0))
9100 return integer_zero_node;
9101
9102 p1 = c_getstr (arg1);
9103 p2 = c_getstr (arg2);
9104
9105 if (p1 && p2)
9106 {
9107 const int i = strcmp (p1, p2);
9108 if (i < 0)
9109 return integer_minus_one_node;
9110 else if (i > 0)
9111 return integer_one_node;
9112 else
9113 return integer_zero_node;
9114 }
9115
9116 /* If the second arg is "", return *(const unsigned char*)arg1. */
9117 if (p2 && *p2 == '\0')
9118 {
9119 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9120 tree cst_uchar_ptr_node
9121 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9122
9123 return fold_convert_loc (loc, integer_type_node,
9124 build1 (INDIRECT_REF, cst_uchar_node,
9125 fold_convert_loc (loc,
9126 cst_uchar_ptr_node,
9127 arg1)));
9128 }
9129
9130 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9131 if (p1 && *p1 == '\0')
9132 {
9133 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9134 tree cst_uchar_ptr_node
9135 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9136
9137 tree temp
9138 = fold_convert_loc (loc, integer_type_node,
9139 build1 (INDIRECT_REF, cst_uchar_node,
9140 fold_convert_loc (loc,
9141 cst_uchar_ptr_node,
9142 arg2)));
9143 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9144 }
9145
9146 return NULL_TREE;
9147 }
9148
9149 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9150 Return NULL_TREE if no simplification can be made. */
9151
9152 static tree
9153 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9154 {
9155 const char *p1, *p2;
9156
9157 if (!validate_arg (arg1, POINTER_TYPE)
9158 || !validate_arg (arg2, POINTER_TYPE)
9159 || !validate_arg (len, INTEGER_TYPE))
9160 return NULL_TREE;
9161
9162 /* If the LEN parameter is zero, return zero. */
9163 if (integer_zerop (len))
9164 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9165 arg1, arg2);
9166
9167 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9168 if (operand_equal_p (arg1, arg2, 0))
9169 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9170
9171 p1 = c_getstr (arg1);
9172 p2 = c_getstr (arg2);
9173
9174 if (host_integerp (len, 1) && p1 && p2)
9175 {
9176 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9177 if (i > 0)
9178 return integer_one_node;
9179 else if (i < 0)
9180 return integer_minus_one_node;
9181 else
9182 return integer_zero_node;
9183 }
9184
9185 /* If the second arg is "", and the length is greater than zero,
9186 return *(const unsigned char*)arg1. */
9187 if (p2 && *p2 == '\0'
9188 && TREE_CODE (len) == INTEGER_CST
9189 && tree_int_cst_sgn (len) == 1)
9190 {
9191 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9192 tree cst_uchar_ptr_node
9193 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9194
9195 return fold_convert_loc (loc, integer_type_node,
9196 build1 (INDIRECT_REF, cst_uchar_node,
9197 fold_convert_loc (loc,
9198 cst_uchar_ptr_node,
9199 arg1)));
9200 }
9201
9202 /* If the first arg is "", and the length is greater than zero,
9203 return -*(const unsigned char*)arg2. */
9204 if (p1 && *p1 == '\0'
9205 && TREE_CODE (len) == INTEGER_CST
9206 && tree_int_cst_sgn (len) == 1)
9207 {
9208 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9209 tree cst_uchar_ptr_node
9210 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9211
9212 tree temp = fold_convert_loc (loc, integer_type_node,
9213 build1 (INDIRECT_REF, cst_uchar_node,
9214 fold_convert_loc (loc,
9215 cst_uchar_ptr_node,
9216 arg2)));
9217 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9218 }
9219
9220 /* If len parameter is one, return an expression corresponding to
9221 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9222 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9223 {
9224 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9225 tree cst_uchar_ptr_node
9226 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9227
9228 tree ind1 = fold_convert_loc (loc, integer_type_node,
9229 build1 (INDIRECT_REF, cst_uchar_node,
9230 fold_convert_loc (loc,
9231 cst_uchar_ptr_node,
9232 arg1)));
9233 tree ind2 = fold_convert_loc (loc, integer_type_node,
9234 build1 (INDIRECT_REF, cst_uchar_node,
9235 fold_convert_loc (loc,
9236 cst_uchar_ptr_node,
9237 arg2)));
9238 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9239 }
9240
9241 return NULL_TREE;
9242 }
9243
9244 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9245 ARG. Return NULL_TREE if no simplification can be made. */
9246
9247 static tree
9248 fold_builtin_signbit (location_t loc, tree arg, tree type)
9249 {
9250 if (!validate_arg (arg, REAL_TYPE))
9251 return NULL_TREE;
9252
9253 /* If ARG is a compile-time constant, determine the result. */
9254 if (TREE_CODE (arg) == REAL_CST
9255 && !TREE_OVERFLOW (arg))
9256 {
9257 REAL_VALUE_TYPE c;
9258
9259 c = TREE_REAL_CST (arg);
9260 return (REAL_VALUE_NEGATIVE (c)
9261 ? build_one_cst (type)
9262 : build_zero_cst (type));
9263 }
9264
9265 /* If ARG is non-negative, the result is always zero. */
9266 if (tree_expr_nonnegative_p (arg))
9267 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9268
9269 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9270 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9271 return fold_convert (type,
9272 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9273 build_real (TREE_TYPE (arg), dconst0)));
9274
9275 return NULL_TREE;
9276 }
9277
9278 /* Fold function call to builtin copysign, copysignf or copysignl with
9279 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9280 be made. */
9281
9282 static tree
9283 fold_builtin_copysign (location_t loc, tree fndecl,
9284 tree arg1, tree arg2, tree type)
9285 {
9286 tree tem;
9287
9288 if (!validate_arg (arg1, REAL_TYPE)
9289 || !validate_arg (arg2, REAL_TYPE))
9290 return NULL_TREE;
9291
9292 /* copysign(X,X) is X. */
9293 if (operand_equal_p (arg1, arg2, 0))
9294 return fold_convert_loc (loc, type, arg1);
9295
9296 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9297 if (TREE_CODE (arg1) == REAL_CST
9298 && TREE_CODE (arg2) == REAL_CST
9299 && !TREE_OVERFLOW (arg1)
9300 && !TREE_OVERFLOW (arg2))
9301 {
9302 REAL_VALUE_TYPE c1, c2;
9303
9304 c1 = TREE_REAL_CST (arg1);
9305 c2 = TREE_REAL_CST (arg2);
9306 /* c1.sign := c2.sign. */
9307 real_copysign (&c1, &c2);
9308 return build_real (type, c1);
9309 }
9310
9311 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9312 Remember to evaluate Y for side-effects. */
9313 if (tree_expr_nonnegative_p (arg2))
9314 return omit_one_operand_loc (loc, type,
9315 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9316 arg2);
9317
9318 /* Strip sign changing operations for the first argument. */
9319 tem = fold_strip_sign_ops (arg1);
9320 if (tem)
9321 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9322
9323 return NULL_TREE;
9324 }
9325
9326 /* Fold a call to builtin isascii with argument ARG. */
9327
9328 static tree
9329 fold_builtin_isascii (location_t loc, tree arg)
9330 {
9331 if (!validate_arg (arg, INTEGER_TYPE))
9332 return NULL_TREE;
9333 else
9334 {
9335 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9336 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9337 build_int_cst (integer_type_node,
9338 ~ (unsigned HOST_WIDE_INT) 0x7f));
9339 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9340 arg, integer_zero_node);
9341 }
9342 }
9343
9344 /* Fold a call to builtin toascii with argument ARG. */
9345
9346 static tree
9347 fold_builtin_toascii (location_t loc, tree arg)
9348 {
9349 if (!validate_arg (arg, INTEGER_TYPE))
9350 return NULL_TREE;
9351
9352 /* Transform toascii(c) -> (c & 0x7f). */
9353 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9354 build_int_cst (integer_type_node, 0x7f));
9355 }
9356
9357 /* Fold a call to builtin isdigit with argument ARG. */
9358
9359 static tree
9360 fold_builtin_isdigit (location_t loc, tree arg)
9361 {
9362 if (!validate_arg (arg, INTEGER_TYPE))
9363 return NULL_TREE;
9364 else
9365 {
9366 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9367 /* According to the C standard, isdigit is unaffected by locale.
9368 However, it definitely is affected by the target character set. */
9369 unsigned HOST_WIDE_INT target_digit0
9370 = lang_hooks.to_target_charset ('0');
9371
9372 if (target_digit0 == 0)
9373 return NULL_TREE;
9374
9375 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9376 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9377 build_int_cst (unsigned_type_node, target_digit0));
9378 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9379 build_int_cst (unsigned_type_node, 9));
9380 }
9381 }
9382
9383 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9384
9385 static tree
9386 fold_builtin_fabs (location_t loc, tree arg, tree type)
9387 {
9388 if (!validate_arg (arg, REAL_TYPE))
9389 return NULL_TREE;
9390
9391 arg = fold_convert_loc (loc, type, arg);
9392 if (TREE_CODE (arg) == REAL_CST)
9393 return fold_abs_const (arg, type);
9394 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9395 }
9396
9397 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9398
9399 static tree
9400 fold_builtin_abs (location_t loc, tree arg, tree type)
9401 {
9402 if (!validate_arg (arg, INTEGER_TYPE))
9403 return NULL_TREE;
9404
9405 arg = fold_convert_loc (loc, type, arg);
9406 if (TREE_CODE (arg) == INTEGER_CST)
9407 return fold_abs_const (arg, type);
9408 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9409 }
9410
9411 /* Fold a fma operation with arguments ARG[012]. */
9412
9413 tree
9414 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9415 tree type, tree arg0, tree arg1, tree arg2)
9416 {
9417 if (TREE_CODE (arg0) == REAL_CST
9418 && TREE_CODE (arg1) == REAL_CST
9419 && TREE_CODE (arg2) == REAL_CST)
9420 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9421
9422 return NULL_TREE;
9423 }
9424
9425 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9426
9427 static tree
9428 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9429 {
9430 if (validate_arg (arg0, REAL_TYPE)
9431 && validate_arg (arg1, REAL_TYPE)
9432 && validate_arg (arg2, REAL_TYPE))
9433 {
9434 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9435 if (tem)
9436 return tem;
9437
9438 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9439 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9440 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9441 }
9442 return NULL_TREE;
9443 }
9444
9445 /* Fold a call to builtin fmin or fmax. */
9446
9447 static tree
9448 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9449 tree type, bool max)
9450 {
9451 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9452 {
9453 /* Calculate the result when the argument is a constant. */
9454 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9455
9456 if (res)
9457 return res;
9458
9459 /* If either argument is NaN, return the other one. Avoid the
9460 transformation if we get (and honor) a signalling NaN. Using
9461 omit_one_operand() ensures we create a non-lvalue. */
9462 if (TREE_CODE (arg0) == REAL_CST
9463 && real_isnan (&TREE_REAL_CST (arg0))
9464 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9465 || ! TREE_REAL_CST (arg0).signalling))
9466 return omit_one_operand_loc (loc, type, arg1, arg0);
9467 if (TREE_CODE (arg1) == REAL_CST
9468 && real_isnan (&TREE_REAL_CST (arg1))
9469 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9470 || ! TREE_REAL_CST (arg1).signalling))
9471 return omit_one_operand_loc (loc, type, arg0, arg1);
9472
9473 /* Transform fmin/fmax(x,x) -> x. */
9474 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9475 return omit_one_operand_loc (loc, type, arg0, arg1);
9476
9477 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9478 functions to return the numeric arg if the other one is NaN.
9479 These tree codes don't honor that, so only transform if
9480 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9481 handled, so we don't have to worry about it either. */
9482 if (flag_finite_math_only)
9483 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9484 fold_convert_loc (loc, type, arg0),
9485 fold_convert_loc (loc, type, arg1));
9486 }
9487 return NULL_TREE;
9488 }
9489
9490 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9491
9492 static tree
9493 fold_builtin_carg (location_t loc, tree arg, tree type)
9494 {
9495 if (validate_arg (arg, COMPLEX_TYPE)
9496 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9497 {
9498 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9499
9500 if (atan2_fn)
9501 {
9502 tree new_arg = builtin_save_expr (arg);
9503 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9504 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9505 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9506 }
9507 }
9508
9509 return NULL_TREE;
9510 }
9511
9512 /* Fold a call to builtin logb/ilogb. */
9513
9514 static tree
9515 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9516 {
9517 if (! validate_arg (arg, REAL_TYPE))
9518 return NULL_TREE;
9519
9520 STRIP_NOPS (arg);
9521
9522 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9523 {
9524 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9525
9526 switch (value->cl)
9527 {
9528 case rvc_nan:
9529 case rvc_inf:
9530 /* If arg is Inf or NaN and we're logb, return it. */
9531 if (TREE_CODE (rettype) == REAL_TYPE)
9532 {
9533 /* For logb(-Inf) we have to return +Inf. */
9534 if (real_isinf (value) && real_isneg (value))
9535 {
9536 REAL_VALUE_TYPE tem;
9537 real_inf (&tem);
9538 return build_real (rettype, tem);
9539 }
9540 return fold_convert_loc (loc, rettype, arg);
9541 }
9542 /* Fall through... */
9543 case rvc_zero:
9544 /* Zero may set errno and/or raise an exception for logb, also
9545 for ilogb we don't know FP_ILOGB0. */
9546 return NULL_TREE;
9547 case rvc_normal:
9548 /* For normal numbers, proceed iff radix == 2. In GCC,
9549 normalized significands are in the range [0.5, 1.0). We
9550 want the exponent as if they were [1.0, 2.0) so get the
9551 exponent and subtract 1. */
9552 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9553 return fold_convert_loc (loc, rettype,
9554 build_int_cst (integer_type_node,
9555 REAL_EXP (value)-1));
9556 break;
9557 }
9558 }
9559
9560 return NULL_TREE;
9561 }
9562
9563 /* Fold a call to builtin significand, if radix == 2. */
9564
9565 static tree
9566 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9567 {
9568 if (! validate_arg (arg, REAL_TYPE))
9569 return NULL_TREE;
9570
9571 STRIP_NOPS (arg);
9572
9573 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9574 {
9575 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9576
9577 switch (value->cl)
9578 {
9579 case rvc_zero:
9580 case rvc_nan:
9581 case rvc_inf:
9582 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9583 return fold_convert_loc (loc, rettype, arg);
9584 case rvc_normal:
9585 /* For normal numbers, proceed iff radix == 2. */
9586 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9587 {
9588 REAL_VALUE_TYPE result = *value;
9589 /* In GCC, normalized significands are in the range [0.5,
9590 1.0). We want them to be [1.0, 2.0) so set the
9591 exponent to 1. */
9592 SET_REAL_EXP (&result, 1);
9593 return build_real (rettype, result);
9594 }
9595 break;
9596 }
9597 }
9598
9599 return NULL_TREE;
9600 }
9601
9602 /* Fold a call to builtin frexp, we can assume the base is 2. */
9603
9604 static tree
9605 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9606 {
9607 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9608 return NULL_TREE;
9609
9610 STRIP_NOPS (arg0);
9611
9612 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9613 return NULL_TREE;
9614
9615 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9616
9617 /* Proceed if a valid pointer type was passed in. */
9618 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9619 {
9620 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9621 tree frac, exp;
9622
9623 switch (value->cl)
9624 {
9625 case rvc_zero:
9626 /* For +-0, return (*exp = 0, +-0). */
9627 exp = integer_zero_node;
9628 frac = arg0;
9629 break;
9630 case rvc_nan:
9631 case rvc_inf:
9632 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9633 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9634 case rvc_normal:
9635 {
9636 /* Since the frexp function always expects base 2, and in
9637 GCC normalized significands are already in the range
9638 [0.5, 1.0), we have exactly what frexp wants. */
9639 REAL_VALUE_TYPE frac_rvt = *value;
9640 SET_REAL_EXP (&frac_rvt, 0);
9641 frac = build_real (rettype, frac_rvt);
9642 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9643 }
9644 break;
9645 default:
9646 gcc_unreachable ();
9647 }
9648
9649 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9650 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9651 TREE_SIDE_EFFECTS (arg1) = 1;
9652 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9653 }
9654
9655 return NULL_TREE;
9656 }
9657
9658 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9659 then we can assume the base is two. If it's false, then we have to
9660 check the mode of the TYPE parameter in certain cases. */
9661
9662 static tree
9663 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9664 tree type, bool ldexp)
9665 {
9666 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9667 {
9668 STRIP_NOPS (arg0);
9669 STRIP_NOPS (arg1);
9670
9671 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9672 if (real_zerop (arg0) || integer_zerop (arg1)
9673 || (TREE_CODE (arg0) == REAL_CST
9674 && !real_isfinite (&TREE_REAL_CST (arg0))))
9675 return omit_one_operand_loc (loc, type, arg0, arg1);
9676
9677 /* If both arguments are constant, then try to evaluate it. */
9678 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9679 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9680 && host_integerp (arg1, 0))
9681 {
9682 /* Bound the maximum adjustment to twice the range of the
9683 mode's valid exponents. Use abs to ensure the range is
9684 positive as a sanity check. */
9685 const long max_exp_adj = 2 *
9686 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9687 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9688
9689 /* Get the user-requested adjustment. */
9690 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9691
9692 /* The requested adjustment must be inside this range. This
9693 is a preliminary cap to avoid things like overflow, we
9694 may still fail to compute the result for other reasons. */
9695 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9696 {
9697 REAL_VALUE_TYPE initial_result;
9698
9699 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9700
9701 /* Ensure we didn't overflow. */
9702 if (! real_isinf (&initial_result))
9703 {
9704 const REAL_VALUE_TYPE trunc_result
9705 = real_value_truncate (TYPE_MODE (type), initial_result);
9706
9707 /* Only proceed if the target mode can hold the
9708 resulting value. */
9709 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9710 return build_real (type, trunc_result);
9711 }
9712 }
9713 }
9714 }
9715
9716 return NULL_TREE;
9717 }
9718
9719 /* Fold a call to builtin modf. */
9720
9721 static tree
9722 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9723 {
9724 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9725 return NULL_TREE;
9726
9727 STRIP_NOPS (arg0);
9728
9729 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9730 return NULL_TREE;
9731
9732 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9733
9734 /* Proceed if a valid pointer type was passed in. */
9735 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9736 {
9737 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9738 REAL_VALUE_TYPE trunc, frac;
9739
9740 switch (value->cl)
9741 {
9742 case rvc_nan:
9743 case rvc_zero:
9744 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9745 trunc = frac = *value;
9746 break;
9747 case rvc_inf:
9748 /* For +-Inf, return (*arg1 = arg0, +-0). */
9749 frac = dconst0;
9750 frac.sign = value->sign;
9751 trunc = *value;
9752 break;
9753 case rvc_normal:
9754 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9755 real_trunc (&trunc, VOIDmode, value);
9756 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9757 /* If the original number was negative and already
9758 integral, then the fractional part is -0.0. */
9759 if (value->sign && frac.cl == rvc_zero)
9760 frac.sign = value->sign;
9761 break;
9762 }
9763
9764 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9765 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9766 build_real (rettype, trunc));
9767 TREE_SIDE_EFFECTS (arg1) = 1;
9768 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9769 build_real (rettype, frac));
9770 }
9771
9772 return NULL_TREE;
9773 }
9774
9775 /* Given a location LOC, an interclass builtin function decl FNDECL
9776 and its single argument ARG, return an folded expression computing
9777 the same, or NULL_TREE if we either couldn't or didn't want to fold
9778 (the latter happen if there's an RTL instruction available). */
9779
9780 static tree
9781 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9782 {
9783 enum machine_mode mode;
9784
9785 if (!validate_arg (arg, REAL_TYPE))
9786 return NULL_TREE;
9787
9788 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9789 return NULL_TREE;
9790
9791 mode = TYPE_MODE (TREE_TYPE (arg));
9792
9793 /* If there is no optab, try generic code. */
9794 switch (DECL_FUNCTION_CODE (fndecl))
9795 {
9796 tree result;
9797
9798 CASE_FLT_FN (BUILT_IN_ISINF):
9799 {
9800 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9801 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9802 tree const type = TREE_TYPE (arg);
9803 REAL_VALUE_TYPE r;
9804 char buf[128];
9805
9806 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9807 real_from_string (&r, buf);
9808 result = build_call_expr (isgr_fn, 2,
9809 fold_build1_loc (loc, ABS_EXPR, type, arg),
9810 build_real (type, r));
9811 return result;
9812 }
9813 CASE_FLT_FN (BUILT_IN_FINITE):
9814 case BUILT_IN_ISFINITE:
9815 {
9816 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9817 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9818 tree const type = TREE_TYPE (arg);
9819 REAL_VALUE_TYPE r;
9820 char buf[128];
9821
9822 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9823 real_from_string (&r, buf);
9824 result = build_call_expr (isle_fn, 2,
9825 fold_build1_loc (loc, ABS_EXPR, type, arg),
9826 build_real (type, r));
9827 /*result = fold_build2_loc (loc, UNGT_EXPR,
9828 TREE_TYPE (TREE_TYPE (fndecl)),
9829 fold_build1_loc (loc, ABS_EXPR, type, arg),
9830 build_real (type, r));
9831 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9832 TREE_TYPE (TREE_TYPE (fndecl)),
9833 result);*/
9834 return result;
9835 }
9836 case BUILT_IN_ISNORMAL:
9837 {
9838 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9839 islessequal(fabs(x),DBL_MAX). */
9840 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9841 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9842 tree const type = TREE_TYPE (arg);
9843 REAL_VALUE_TYPE rmax, rmin;
9844 char buf[128];
9845
9846 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9847 real_from_string (&rmax, buf);
9848 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9849 real_from_string (&rmin, buf);
9850 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9851 result = build_call_expr (isle_fn, 2, arg,
9852 build_real (type, rmax));
9853 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9854 build_call_expr (isge_fn, 2, arg,
9855 build_real (type, rmin)));
9856 return result;
9857 }
9858 default:
9859 break;
9860 }
9861
9862 return NULL_TREE;
9863 }
9864
9865 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9866 ARG is the argument for the call. */
9867
9868 static tree
9869 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9870 {
9871 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9872 REAL_VALUE_TYPE r;
9873
9874 if (!validate_arg (arg, REAL_TYPE))
9875 return NULL_TREE;
9876
9877 switch (builtin_index)
9878 {
9879 case BUILT_IN_ISINF:
9880 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9881 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9882
9883 if (TREE_CODE (arg) == REAL_CST)
9884 {
9885 r = TREE_REAL_CST (arg);
9886 if (real_isinf (&r))
9887 return real_compare (GT_EXPR, &r, &dconst0)
9888 ? integer_one_node : integer_minus_one_node;
9889 else
9890 return integer_zero_node;
9891 }
9892
9893 return NULL_TREE;
9894
9895 case BUILT_IN_ISINF_SIGN:
9896 {
9897 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9898 /* In a boolean context, GCC will fold the inner COND_EXPR to
9899 1. So e.g. "if (isinf_sign(x))" would be folded to just
9900 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9901 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9902 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9903 tree tmp = NULL_TREE;
9904
9905 arg = builtin_save_expr (arg);
9906
9907 if (signbit_fn && isinf_fn)
9908 {
9909 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9910 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9911
9912 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9913 signbit_call, integer_zero_node);
9914 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9915 isinf_call, integer_zero_node);
9916
9917 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9918 integer_minus_one_node, integer_one_node);
9919 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9920 isinf_call, tmp,
9921 integer_zero_node);
9922 }
9923
9924 return tmp;
9925 }
9926
9927 case BUILT_IN_ISFINITE:
9928 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9929 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9930 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9931
9932 if (TREE_CODE (arg) == REAL_CST)
9933 {
9934 r = TREE_REAL_CST (arg);
9935 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9936 }
9937
9938 return NULL_TREE;
9939
9940 case BUILT_IN_ISNAN:
9941 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9942 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9943
9944 if (TREE_CODE (arg) == REAL_CST)
9945 {
9946 r = TREE_REAL_CST (arg);
9947 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9948 }
9949
9950 arg = builtin_save_expr (arg);
9951 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9952
9953 default:
9954 gcc_unreachable ();
9955 }
9956 }
9957
9958 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9959 This builtin will generate code to return the appropriate floating
9960 point classification depending on the value of the floating point
9961 number passed in. The possible return values must be supplied as
9962 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9963 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9964 one floating point argument which is "type generic". */
9965
9966 static tree
9967 fold_builtin_fpclassify (location_t loc, tree exp)
9968 {
9969 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9970 arg, type, res, tmp;
9971 enum machine_mode mode;
9972 REAL_VALUE_TYPE r;
9973 char buf[128];
9974
9975 /* Verify the required arguments in the original call. */
9976 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9977 INTEGER_TYPE, INTEGER_TYPE,
9978 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9979 return NULL_TREE;
9980
9981 fp_nan = CALL_EXPR_ARG (exp, 0);
9982 fp_infinite = CALL_EXPR_ARG (exp, 1);
9983 fp_normal = CALL_EXPR_ARG (exp, 2);
9984 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9985 fp_zero = CALL_EXPR_ARG (exp, 4);
9986 arg = CALL_EXPR_ARG (exp, 5);
9987 type = TREE_TYPE (arg);
9988 mode = TYPE_MODE (type);
9989 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9990
9991 /* fpclassify(x) ->
9992 isnan(x) ? FP_NAN :
9993 (fabs(x) == Inf ? FP_INFINITE :
9994 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9995 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9996
9997 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9998 build_real (type, dconst0));
9999 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10000 tmp, fp_zero, fp_subnormal);
10001
10002 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10003 real_from_string (&r, buf);
10004 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10005 arg, build_real (type, r));
10006 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10007
10008 if (HONOR_INFINITIES (mode))
10009 {
10010 real_inf (&r);
10011 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10012 build_real (type, r));
10013 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10014 fp_infinite, res);
10015 }
10016
10017 if (HONOR_NANS (mode))
10018 {
10019 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10020 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10021 }
10022
10023 return res;
10024 }
10025
10026 /* Fold a call to an unordered comparison function such as
10027 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10028 being called and ARG0 and ARG1 are the arguments for the call.
10029 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10030 the opposite of the desired result. UNORDERED_CODE is used
10031 for modes that can hold NaNs and ORDERED_CODE is used for
10032 the rest. */
10033
10034 static tree
10035 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10036 enum tree_code unordered_code,
10037 enum tree_code ordered_code)
10038 {
10039 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10040 enum tree_code code;
10041 tree type0, type1;
10042 enum tree_code code0, code1;
10043 tree cmp_type = NULL_TREE;
10044
10045 type0 = TREE_TYPE (arg0);
10046 type1 = TREE_TYPE (arg1);
10047
10048 code0 = TREE_CODE (type0);
10049 code1 = TREE_CODE (type1);
10050
10051 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10052 /* Choose the wider of two real types. */
10053 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10054 ? type0 : type1;
10055 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10056 cmp_type = type0;
10057 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10058 cmp_type = type1;
10059
10060 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10061 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10062
10063 if (unordered_code == UNORDERED_EXPR)
10064 {
10065 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10066 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10067 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10068 }
10069
10070 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10071 : ordered_code;
10072 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10073 fold_build2_loc (loc, code, type, arg0, arg1));
10074 }
10075
10076 /* Fold a call to built-in function FNDECL with 0 arguments.
10077 IGNORE is true if the result of the function call is ignored. This
10078 function returns NULL_TREE if no simplification was possible. */
10079
10080 static tree
10081 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10082 {
10083 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10084 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10085 switch (fcode)
10086 {
10087 CASE_FLT_FN (BUILT_IN_INF):
10088 case BUILT_IN_INFD32:
10089 case BUILT_IN_INFD64:
10090 case BUILT_IN_INFD128:
10091 return fold_builtin_inf (loc, type, true);
10092
10093 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10094 return fold_builtin_inf (loc, type, false);
10095
10096 case BUILT_IN_CLASSIFY_TYPE:
10097 return fold_builtin_classify_type (NULL_TREE);
10098
10099 case BUILT_IN_UNREACHABLE:
10100 if (flag_sanitize & SANITIZE_UNREACHABLE
10101 && (current_function_decl == NULL
10102 || !lookup_attribute ("no_sanitize_undefined",
10103 DECL_ATTRIBUTES (current_function_decl))))
10104 return ubsan_instrument_unreachable (loc);
10105 break;
10106
10107 default:
10108 break;
10109 }
10110 return NULL_TREE;
10111 }
10112
10113 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10114 IGNORE is true if the result of the function call is ignored. This
10115 function returns NULL_TREE if no simplification was possible. */
10116
10117 static tree
10118 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10119 {
10120 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10121 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10122 switch (fcode)
10123 {
10124 case BUILT_IN_CONSTANT_P:
10125 {
10126 tree val = fold_builtin_constant_p (arg0);
10127
10128 /* Gimplification will pull the CALL_EXPR for the builtin out of
10129 an if condition. When not optimizing, we'll not CSE it back.
10130 To avoid link error types of regressions, return false now. */
10131 if (!val && !optimize)
10132 val = integer_zero_node;
10133
10134 return val;
10135 }
10136
10137 case BUILT_IN_CLASSIFY_TYPE:
10138 return fold_builtin_classify_type (arg0);
10139
10140 case BUILT_IN_STRLEN:
10141 return fold_builtin_strlen (loc, type, arg0);
10142
10143 CASE_FLT_FN (BUILT_IN_FABS):
10144 case BUILT_IN_FABSD32:
10145 case BUILT_IN_FABSD64:
10146 case BUILT_IN_FABSD128:
10147 return fold_builtin_fabs (loc, arg0, type);
10148
10149 case BUILT_IN_ABS:
10150 case BUILT_IN_LABS:
10151 case BUILT_IN_LLABS:
10152 case BUILT_IN_IMAXABS:
10153 return fold_builtin_abs (loc, arg0, type);
10154
10155 CASE_FLT_FN (BUILT_IN_CONJ):
10156 if (validate_arg (arg0, COMPLEX_TYPE)
10157 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10158 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10159 break;
10160
10161 CASE_FLT_FN (BUILT_IN_CREAL):
10162 if (validate_arg (arg0, COMPLEX_TYPE)
10163 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10164 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10165 break;
10166
10167 CASE_FLT_FN (BUILT_IN_CIMAG):
10168 if (validate_arg (arg0, COMPLEX_TYPE)
10169 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10170 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10171 break;
10172
10173 CASE_FLT_FN (BUILT_IN_CCOS):
10174 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
10175
10176 CASE_FLT_FN (BUILT_IN_CCOSH):
10177 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
10178
10179 CASE_FLT_FN (BUILT_IN_CPROJ):
10180 return fold_builtin_cproj (loc, arg0, type);
10181
10182 CASE_FLT_FN (BUILT_IN_CSIN):
10183 if (validate_arg (arg0, COMPLEX_TYPE)
10184 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10185 return do_mpc_arg1 (arg0, type, mpc_sin);
10186 break;
10187
10188 CASE_FLT_FN (BUILT_IN_CSINH):
10189 if (validate_arg (arg0, COMPLEX_TYPE)
10190 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10191 return do_mpc_arg1 (arg0, type, mpc_sinh);
10192 break;
10193
10194 CASE_FLT_FN (BUILT_IN_CTAN):
10195 if (validate_arg (arg0, COMPLEX_TYPE)
10196 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10197 return do_mpc_arg1 (arg0, type, mpc_tan);
10198 break;
10199
10200 CASE_FLT_FN (BUILT_IN_CTANH):
10201 if (validate_arg (arg0, COMPLEX_TYPE)
10202 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10203 return do_mpc_arg1 (arg0, type, mpc_tanh);
10204 break;
10205
10206 CASE_FLT_FN (BUILT_IN_CLOG):
10207 if (validate_arg (arg0, COMPLEX_TYPE)
10208 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10209 return do_mpc_arg1 (arg0, type, mpc_log);
10210 break;
10211
10212 CASE_FLT_FN (BUILT_IN_CSQRT):
10213 if (validate_arg (arg0, COMPLEX_TYPE)
10214 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10215 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10216 break;
10217
10218 CASE_FLT_FN (BUILT_IN_CASIN):
10219 if (validate_arg (arg0, COMPLEX_TYPE)
10220 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10221 return do_mpc_arg1 (arg0, type, mpc_asin);
10222 break;
10223
10224 CASE_FLT_FN (BUILT_IN_CACOS):
10225 if (validate_arg (arg0, COMPLEX_TYPE)
10226 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10227 return do_mpc_arg1 (arg0, type, mpc_acos);
10228 break;
10229
10230 CASE_FLT_FN (BUILT_IN_CATAN):
10231 if (validate_arg (arg0, COMPLEX_TYPE)
10232 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10233 return do_mpc_arg1 (arg0, type, mpc_atan);
10234 break;
10235
10236 CASE_FLT_FN (BUILT_IN_CASINH):
10237 if (validate_arg (arg0, COMPLEX_TYPE)
10238 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10239 return do_mpc_arg1 (arg0, type, mpc_asinh);
10240 break;
10241
10242 CASE_FLT_FN (BUILT_IN_CACOSH):
10243 if (validate_arg (arg0, COMPLEX_TYPE)
10244 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10245 return do_mpc_arg1 (arg0, type, mpc_acosh);
10246 break;
10247
10248 CASE_FLT_FN (BUILT_IN_CATANH):
10249 if (validate_arg (arg0, COMPLEX_TYPE)
10250 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10251 return do_mpc_arg1 (arg0, type, mpc_atanh);
10252 break;
10253
10254 CASE_FLT_FN (BUILT_IN_CABS):
10255 return fold_builtin_cabs (loc, arg0, type, fndecl);
10256
10257 CASE_FLT_FN (BUILT_IN_CARG):
10258 return fold_builtin_carg (loc, arg0, type);
10259
10260 CASE_FLT_FN (BUILT_IN_SQRT):
10261 return fold_builtin_sqrt (loc, arg0, type);
10262
10263 CASE_FLT_FN (BUILT_IN_CBRT):
10264 return fold_builtin_cbrt (loc, arg0, type);
10265
10266 CASE_FLT_FN (BUILT_IN_ASIN):
10267 if (validate_arg (arg0, REAL_TYPE))
10268 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10269 &dconstm1, &dconst1, true);
10270 break;
10271
10272 CASE_FLT_FN (BUILT_IN_ACOS):
10273 if (validate_arg (arg0, REAL_TYPE))
10274 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10275 &dconstm1, &dconst1, true);
10276 break;
10277
10278 CASE_FLT_FN (BUILT_IN_ATAN):
10279 if (validate_arg (arg0, REAL_TYPE))
10280 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10281 break;
10282
10283 CASE_FLT_FN (BUILT_IN_ASINH):
10284 if (validate_arg (arg0, REAL_TYPE))
10285 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10286 break;
10287
10288 CASE_FLT_FN (BUILT_IN_ACOSH):
10289 if (validate_arg (arg0, REAL_TYPE))
10290 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10291 &dconst1, NULL, true);
10292 break;
10293
10294 CASE_FLT_FN (BUILT_IN_ATANH):
10295 if (validate_arg (arg0, REAL_TYPE))
10296 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10297 &dconstm1, &dconst1, false);
10298 break;
10299
10300 CASE_FLT_FN (BUILT_IN_SIN):
10301 if (validate_arg (arg0, REAL_TYPE))
10302 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10303 break;
10304
10305 CASE_FLT_FN (BUILT_IN_COS):
10306 return fold_builtin_cos (loc, arg0, type, fndecl);
10307
10308 CASE_FLT_FN (BUILT_IN_TAN):
10309 return fold_builtin_tan (arg0, type);
10310
10311 CASE_FLT_FN (BUILT_IN_CEXP):
10312 return fold_builtin_cexp (loc, arg0, type);
10313
10314 CASE_FLT_FN (BUILT_IN_CEXPI):
10315 if (validate_arg (arg0, REAL_TYPE))
10316 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10317 break;
10318
10319 CASE_FLT_FN (BUILT_IN_SINH):
10320 if (validate_arg (arg0, REAL_TYPE))
10321 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10322 break;
10323
10324 CASE_FLT_FN (BUILT_IN_COSH):
10325 return fold_builtin_cosh (loc, arg0, type, fndecl);
10326
10327 CASE_FLT_FN (BUILT_IN_TANH):
10328 if (validate_arg (arg0, REAL_TYPE))
10329 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10330 break;
10331
10332 CASE_FLT_FN (BUILT_IN_ERF):
10333 if (validate_arg (arg0, REAL_TYPE))
10334 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10335 break;
10336
10337 CASE_FLT_FN (BUILT_IN_ERFC):
10338 if (validate_arg (arg0, REAL_TYPE))
10339 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10340 break;
10341
10342 CASE_FLT_FN (BUILT_IN_TGAMMA):
10343 if (validate_arg (arg0, REAL_TYPE))
10344 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10345 break;
10346
10347 CASE_FLT_FN (BUILT_IN_EXP):
10348 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10349
10350 CASE_FLT_FN (BUILT_IN_EXP2):
10351 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10352
10353 CASE_FLT_FN (BUILT_IN_EXP10):
10354 CASE_FLT_FN (BUILT_IN_POW10):
10355 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10356
10357 CASE_FLT_FN (BUILT_IN_EXPM1):
10358 if (validate_arg (arg0, REAL_TYPE))
10359 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10360 break;
10361
10362 CASE_FLT_FN (BUILT_IN_LOG):
10363 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10364
10365 CASE_FLT_FN (BUILT_IN_LOG2):
10366 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10367
10368 CASE_FLT_FN (BUILT_IN_LOG10):
10369 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10370
10371 CASE_FLT_FN (BUILT_IN_LOG1P):
10372 if (validate_arg (arg0, REAL_TYPE))
10373 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10374 &dconstm1, NULL, false);
10375 break;
10376
10377 CASE_FLT_FN (BUILT_IN_J0):
10378 if (validate_arg (arg0, REAL_TYPE))
10379 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10380 NULL, NULL, 0);
10381 break;
10382
10383 CASE_FLT_FN (BUILT_IN_J1):
10384 if (validate_arg (arg0, REAL_TYPE))
10385 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10386 NULL, NULL, 0);
10387 break;
10388
10389 CASE_FLT_FN (BUILT_IN_Y0):
10390 if (validate_arg (arg0, REAL_TYPE))
10391 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10392 &dconst0, NULL, false);
10393 break;
10394
10395 CASE_FLT_FN (BUILT_IN_Y1):
10396 if (validate_arg (arg0, REAL_TYPE))
10397 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10398 &dconst0, NULL, false);
10399 break;
10400
10401 CASE_FLT_FN (BUILT_IN_NAN):
10402 case BUILT_IN_NAND32:
10403 case BUILT_IN_NAND64:
10404 case BUILT_IN_NAND128:
10405 return fold_builtin_nan (arg0, type, true);
10406
10407 CASE_FLT_FN (BUILT_IN_NANS):
10408 return fold_builtin_nan (arg0, type, false);
10409
10410 CASE_FLT_FN (BUILT_IN_FLOOR):
10411 return fold_builtin_floor (loc, fndecl, arg0);
10412
10413 CASE_FLT_FN (BUILT_IN_CEIL):
10414 return fold_builtin_ceil (loc, fndecl, arg0);
10415
10416 CASE_FLT_FN (BUILT_IN_TRUNC):
10417 return fold_builtin_trunc (loc, fndecl, arg0);
10418
10419 CASE_FLT_FN (BUILT_IN_ROUND):
10420 return fold_builtin_round (loc, fndecl, arg0);
10421
10422 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10423 CASE_FLT_FN (BUILT_IN_RINT):
10424 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10425
10426 CASE_FLT_FN (BUILT_IN_ICEIL):
10427 CASE_FLT_FN (BUILT_IN_LCEIL):
10428 CASE_FLT_FN (BUILT_IN_LLCEIL):
10429 CASE_FLT_FN (BUILT_IN_LFLOOR):
10430 CASE_FLT_FN (BUILT_IN_IFLOOR):
10431 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10432 CASE_FLT_FN (BUILT_IN_IROUND):
10433 CASE_FLT_FN (BUILT_IN_LROUND):
10434 CASE_FLT_FN (BUILT_IN_LLROUND):
10435 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10436
10437 CASE_FLT_FN (BUILT_IN_IRINT):
10438 CASE_FLT_FN (BUILT_IN_LRINT):
10439 CASE_FLT_FN (BUILT_IN_LLRINT):
10440 return fold_fixed_mathfn (loc, fndecl, arg0);
10441
10442 case BUILT_IN_BSWAP16:
10443 case BUILT_IN_BSWAP32:
10444 case BUILT_IN_BSWAP64:
10445 return fold_builtin_bswap (fndecl, arg0);
10446
10447 CASE_INT_FN (BUILT_IN_FFS):
10448 CASE_INT_FN (BUILT_IN_CLZ):
10449 CASE_INT_FN (BUILT_IN_CTZ):
10450 CASE_INT_FN (BUILT_IN_CLRSB):
10451 CASE_INT_FN (BUILT_IN_POPCOUNT):
10452 CASE_INT_FN (BUILT_IN_PARITY):
10453 return fold_builtin_bitop (fndecl, arg0);
10454
10455 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10456 return fold_builtin_signbit (loc, arg0, type);
10457
10458 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10459 return fold_builtin_significand (loc, arg0, type);
10460
10461 CASE_FLT_FN (BUILT_IN_ILOGB):
10462 CASE_FLT_FN (BUILT_IN_LOGB):
10463 return fold_builtin_logb (loc, arg0, type);
10464
10465 case BUILT_IN_ISASCII:
10466 return fold_builtin_isascii (loc, arg0);
10467
10468 case BUILT_IN_TOASCII:
10469 return fold_builtin_toascii (loc, arg0);
10470
10471 case BUILT_IN_ISDIGIT:
10472 return fold_builtin_isdigit (loc, arg0);
10473
10474 CASE_FLT_FN (BUILT_IN_FINITE):
10475 case BUILT_IN_FINITED32:
10476 case BUILT_IN_FINITED64:
10477 case BUILT_IN_FINITED128:
10478 case BUILT_IN_ISFINITE:
10479 {
10480 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10481 if (ret)
10482 return ret;
10483 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10484 }
10485
10486 CASE_FLT_FN (BUILT_IN_ISINF):
10487 case BUILT_IN_ISINFD32:
10488 case BUILT_IN_ISINFD64:
10489 case BUILT_IN_ISINFD128:
10490 {
10491 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10492 if (ret)
10493 return ret;
10494 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10495 }
10496
10497 case BUILT_IN_ISNORMAL:
10498 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10499
10500 case BUILT_IN_ISINF_SIGN:
10501 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10502
10503 CASE_FLT_FN (BUILT_IN_ISNAN):
10504 case BUILT_IN_ISNAND32:
10505 case BUILT_IN_ISNAND64:
10506 case BUILT_IN_ISNAND128:
10507 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10508
10509 case BUILT_IN_PRINTF:
10510 case BUILT_IN_PRINTF_UNLOCKED:
10511 case BUILT_IN_VPRINTF:
10512 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10513
10514 case BUILT_IN_FREE:
10515 if (integer_zerop (arg0))
10516 return build_empty_stmt (loc);
10517 break;
10518
10519 default:
10520 break;
10521 }
10522
10523 return NULL_TREE;
10524
10525 }
10526
10527 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10528 IGNORE is true if the result of the function call is ignored. This
10529 function returns NULL_TREE if no simplification was possible. */
10530
10531 static tree
10532 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10533 {
10534 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10535 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10536
10537 switch (fcode)
10538 {
10539 CASE_FLT_FN (BUILT_IN_JN):
10540 if (validate_arg (arg0, INTEGER_TYPE)
10541 && validate_arg (arg1, REAL_TYPE))
10542 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10543 break;
10544
10545 CASE_FLT_FN (BUILT_IN_YN):
10546 if (validate_arg (arg0, INTEGER_TYPE)
10547 && validate_arg (arg1, REAL_TYPE))
10548 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10549 &dconst0, false);
10550 break;
10551
10552 CASE_FLT_FN (BUILT_IN_DREM):
10553 CASE_FLT_FN (BUILT_IN_REMAINDER):
10554 if (validate_arg (arg0, REAL_TYPE)
10555 && validate_arg (arg1, REAL_TYPE))
10556 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10557 break;
10558
10559 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10560 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10561 if (validate_arg (arg0, REAL_TYPE)
10562 && validate_arg (arg1, POINTER_TYPE))
10563 return do_mpfr_lgamma_r (arg0, arg1, type);
10564 break;
10565
10566 CASE_FLT_FN (BUILT_IN_ATAN2):
10567 if (validate_arg (arg0, REAL_TYPE)
10568 && validate_arg (arg1, REAL_TYPE))
10569 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10570 break;
10571
10572 CASE_FLT_FN (BUILT_IN_FDIM):
10573 if (validate_arg (arg0, REAL_TYPE)
10574 && validate_arg (arg1, REAL_TYPE))
10575 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10576 break;
10577
10578 CASE_FLT_FN (BUILT_IN_HYPOT):
10579 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10580
10581 CASE_FLT_FN (BUILT_IN_CPOW):
10582 if (validate_arg (arg0, COMPLEX_TYPE)
10583 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10584 && validate_arg (arg1, COMPLEX_TYPE)
10585 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10586 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10587 break;
10588
10589 CASE_FLT_FN (BUILT_IN_LDEXP):
10590 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10591 CASE_FLT_FN (BUILT_IN_SCALBN):
10592 CASE_FLT_FN (BUILT_IN_SCALBLN):
10593 return fold_builtin_load_exponent (loc, arg0, arg1,
10594 type, /*ldexp=*/false);
10595
10596 CASE_FLT_FN (BUILT_IN_FREXP):
10597 return fold_builtin_frexp (loc, arg0, arg1, type);
10598
10599 CASE_FLT_FN (BUILT_IN_MODF):
10600 return fold_builtin_modf (loc, arg0, arg1, type);
10601
10602 case BUILT_IN_BZERO:
10603 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10604
10605 case BUILT_IN_FPUTS:
10606 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10607
10608 case BUILT_IN_FPUTS_UNLOCKED:
10609 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10610
10611 case BUILT_IN_STRSTR:
10612 return fold_builtin_strstr (loc, arg0, arg1, type);
10613
10614 case BUILT_IN_STRCAT:
10615 return fold_builtin_strcat (loc, arg0, arg1);
10616
10617 case BUILT_IN_STRSPN:
10618 return fold_builtin_strspn (loc, arg0, arg1);
10619
10620 case BUILT_IN_STRCSPN:
10621 return fold_builtin_strcspn (loc, arg0, arg1);
10622
10623 case BUILT_IN_STRCHR:
10624 case BUILT_IN_INDEX:
10625 return fold_builtin_strchr (loc, arg0, arg1, type);
10626
10627 case BUILT_IN_STRRCHR:
10628 case BUILT_IN_RINDEX:
10629 return fold_builtin_strrchr (loc, arg0, arg1, type);
10630
10631 case BUILT_IN_STRCPY:
10632 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10633
10634 case BUILT_IN_STPCPY:
10635 if (ignore)
10636 {
10637 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10638 if (!fn)
10639 break;
10640
10641 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10642 }
10643 else
10644 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10645 break;
10646
10647 case BUILT_IN_STRCMP:
10648 return fold_builtin_strcmp (loc, arg0, arg1);
10649
10650 case BUILT_IN_STRPBRK:
10651 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10652
10653 case BUILT_IN_EXPECT:
10654 return fold_builtin_expect (loc, arg0, arg1);
10655
10656 CASE_FLT_FN (BUILT_IN_POW):
10657 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10658
10659 CASE_FLT_FN (BUILT_IN_POWI):
10660 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10661
10662 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10663 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10664
10665 CASE_FLT_FN (BUILT_IN_FMIN):
10666 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10667
10668 CASE_FLT_FN (BUILT_IN_FMAX):
10669 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10670
10671 case BUILT_IN_ISGREATER:
10672 return fold_builtin_unordered_cmp (loc, fndecl,
10673 arg0, arg1, UNLE_EXPR, LE_EXPR);
10674 case BUILT_IN_ISGREATEREQUAL:
10675 return fold_builtin_unordered_cmp (loc, fndecl,
10676 arg0, arg1, UNLT_EXPR, LT_EXPR);
10677 case BUILT_IN_ISLESS:
10678 return fold_builtin_unordered_cmp (loc, fndecl,
10679 arg0, arg1, UNGE_EXPR, GE_EXPR);
10680 case BUILT_IN_ISLESSEQUAL:
10681 return fold_builtin_unordered_cmp (loc, fndecl,
10682 arg0, arg1, UNGT_EXPR, GT_EXPR);
10683 case BUILT_IN_ISLESSGREATER:
10684 return fold_builtin_unordered_cmp (loc, fndecl,
10685 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10686 case BUILT_IN_ISUNORDERED:
10687 return fold_builtin_unordered_cmp (loc, fndecl,
10688 arg0, arg1, UNORDERED_EXPR,
10689 NOP_EXPR);
10690
10691 /* We do the folding for va_start in the expander. */
10692 case BUILT_IN_VA_START:
10693 break;
10694
10695 case BUILT_IN_SPRINTF:
10696 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10697
10698 case BUILT_IN_OBJECT_SIZE:
10699 return fold_builtin_object_size (arg0, arg1);
10700
10701 case BUILT_IN_PRINTF:
10702 case BUILT_IN_PRINTF_UNLOCKED:
10703 case BUILT_IN_VPRINTF:
10704 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10705
10706 case BUILT_IN_PRINTF_CHK:
10707 case BUILT_IN_VPRINTF_CHK:
10708 if (!validate_arg (arg0, INTEGER_TYPE)
10709 || TREE_SIDE_EFFECTS (arg0))
10710 return NULL_TREE;
10711 else
10712 return fold_builtin_printf (loc, fndecl,
10713 arg1, NULL_TREE, ignore, fcode);
10714 break;
10715
10716 case BUILT_IN_FPRINTF:
10717 case BUILT_IN_FPRINTF_UNLOCKED:
10718 case BUILT_IN_VFPRINTF:
10719 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10720 ignore, fcode);
10721
10722 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10723 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10724
10725 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10726 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10727
10728 default:
10729 break;
10730 }
10731 return NULL_TREE;
10732 }
10733
10734 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10735 and ARG2. IGNORE is true if the result of the function call is ignored.
10736 This function returns NULL_TREE if no simplification was possible. */
10737
10738 static tree
10739 fold_builtin_3 (location_t loc, tree fndecl,
10740 tree arg0, tree arg1, tree arg2, bool ignore)
10741 {
10742 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10743 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10744 switch (fcode)
10745 {
10746
10747 CASE_FLT_FN (BUILT_IN_SINCOS):
10748 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10749
10750 CASE_FLT_FN (BUILT_IN_FMA):
10751 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10752 break;
10753
10754 CASE_FLT_FN (BUILT_IN_REMQUO):
10755 if (validate_arg (arg0, REAL_TYPE)
10756 && validate_arg (arg1, REAL_TYPE)
10757 && validate_arg (arg2, POINTER_TYPE))
10758 return do_mpfr_remquo (arg0, arg1, arg2);
10759 break;
10760
10761 case BUILT_IN_MEMSET:
10762 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10763
10764 case BUILT_IN_BCOPY:
10765 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10766 void_type_node, true, /*endp=*/3);
10767
10768 case BUILT_IN_MEMCPY:
10769 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10770 type, ignore, /*endp=*/0);
10771
10772 case BUILT_IN_MEMPCPY:
10773 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10774 type, ignore, /*endp=*/1);
10775
10776 case BUILT_IN_MEMMOVE:
10777 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10778 type, ignore, /*endp=*/3);
10779
10780 case BUILT_IN_STRNCAT:
10781 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10782
10783 case BUILT_IN_STRNCPY:
10784 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10785
10786 case BUILT_IN_STRNCMP:
10787 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10788
10789 case BUILT_IN_MEMCHR:
10790 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10791
10792 case BUILT_IN_BCMP:
10793 case BUILT_IN_MEMCMP:
10794 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10795
10796 case BUILT_IN_SPRINTF:
10797 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10798
10799 case BUILT_IN_SNPRINTF:
10800 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10801
10802 case BUILT_IN_STRCPY_CHK:
10803 case BUILT_IN_STPCPY_CHK:
10804 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10805 ignore, fcode);
10806
10807 case BUILT_IN_STRCAT_CHK:
10808 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10809
10810 case BUILT_IN_PRINTF_CHK:
10811 case BUILT_IN_VPRINTF_CHK:
10812 if (!validate_arg (arg0, INTEGER_TYPE)
10813 || TREE_SIDE_EFFECTS (arg0))
10814 return NULL_TREE;
10815 else
10816 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10817 break;
10818
10819 case BUILT_IN_FPRINTF:
10820 case BUILT_IN_FPRINTF_UNLOCKED:
10821 case BUILT_IN_VFPRINTF:
10822 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10823 ignore, fcode);
10824
10825 case BUILT_IN_FPRINTF_CHK:
10826 case BUILT_IN_VFPRINTF_CHK:
10827 if (!validate_arg (arg1, INTEGER_TYPE)
10828 || TREE_SIDE_EFFECTS (arg1))
10829 return NULL_TREE;
10830 else
10831 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10832 ignore, fcode);
10833
10834 default:
10835 break;
10836 }
10837 return NULL_TREE;
10838 }
10839
10840 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10841 ARG2, and ARG3. IGNORE is true if the result of the function call is
10842 ignored. This function returns NULL_TREE if no simplification was
10843 possible. */
10844
10845 static tree
10846 fold_builtin_4 (location_t loc, tree fndecl,
10847 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10848 {
10849 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10850
10851 switch (fcode)
10852 {
10853 case BUILT_IN_MEMCPY_CHK:
10854 case BUILT_IN_MEMPCPY_CHK:
10855 case BUILT_IN_MEMMOVE_CHK:
10856 case BUILT_IN_MEMSET_CHK:
10857 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10858 NULL_TREE, ignore,
10859 DECL_FUNCTION_CODE (fndecl));
10860
10861 case BUILT_IN_STRNCPY_CHK:
10862 case BUILT_IN_STPNCPY_CHK:
10863 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
10864 ignore, fcode);
10865
10866 case BUILT_IN_STRNCAT_CHK:
10867 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10868
10869 case BUILT_IN_SNPRINTF:
10870 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
10871
10872 case BUILT_IN_FPRINTF_CHK:
10873 case BUILT_IN_VFPRINTF_CHK:
10874 if (!validate_arg (arg1, INTEGER_TYPE)
10875 || TREE_SIDE_EFFECTS (arg1))
10876 return NULL_TREE;
10877 else
10878 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10879 ignore, fcode);
10880 break;
10881
10882 default:
10883 break;
10884 }
10885 return NULL_TREE;
10886 }
10887
10888 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10889 arguments, where NARGS <= 4. IGNORE is true if the result of the
10890 function call is ignored. This function returns NULL_TREE if no
10891 simplification was possible. Note that this only folds builtins with
10892 fixed argument patterns. Foldings that do varargs-to-varargs
10893 transformations, or that match calls with more than 4 arguments,
10894 need to be handled with fold_builtin_varargs instead. */
10895
10896 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10897
10898 static tree
10899 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10900 {
10901 tree ret = NULL_TREE;
10902
10903 switch (nargs)
10904 {
10905 case 0:
10906 ret = fold_builtin_0 (loc, fndecl, ignore);
10907 break;
10908 case 1:
10909 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10910 break;
10911 case 2:
10912 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10913 break;
10914 case 3:
10915 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10916 break;
10917 case 4:
10918 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10919 ignore);
10920 break;
10921 default:
10922 break;
10923 }
10924 if (ret)
10925 {
10926 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10927 SET_EXPR_LOCATION (ret, loc);
10928 TREE_NO_WARNING (ret) = 1;
10929 return ret;
10930 }
10931 return NULL_TREE;
10932 }
10933
10934 /* Builtins with folding operations that operate on "..." arguments
10935 need special handling; we need to store the arguments in a convenient
10936 data structure before attempting any folding. Fortunately there are
10937 only a few builtins that fall into this category. FNDECL is the
10938 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10939 result of the function call is ignored. */
10940
10941 static tree
10942 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10943 bool ignore ATTRIBUTE_UNUSED)
10944 {
10945 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10946 tree ret = NULL_TREE;
10947
10948 switch (fcode)
10949 {
10950 case BUILT_IN_SPRINTF_CHK:
10951 case BUILT_IN_VSPRINTF_CHK:
10952 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10953 break;
10954
10955 case BUILT_IN_SNPRINTF_CHK:
10956 case BUILT_IN_VSNPRINTF_CHK:
10957 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10958 break;
10959
10960 case BUILT_IN_FPCLASSIFY:
10961 ret = fold_builtin_fpclassify (loc, exp);
10962 break;
10963
10964 default:
10965 break;
10966 }
10967 if (ret)
10968 {
10969 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10970 SET_EXPR_LOCATION (ret, loc);
10971 TREE_NO_WARNING (ret) = 1;
10972 return ret;
10973 }
10974 return NULL_TREE;
10975 }
10976
10977 /* Return true if FNDECL shouldn't be folded right now.
10978 If a built-in function has an inline attribute always_inline
10979 wrapper, defer folding it after always_inline functions have
10980 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10981 might not be performed. */
10982
10983 bool
10984 avoid_folding_inline_builtin (tree fndecl)
10985 {
10986 return (DECL_DECLARED_INLINE_P (fndecl)
10987 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10988 && cfun
10989 && !cfun->always_inline_functions_inlined
10990 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10991 }
10992
10993 /* A wrapper function for builtin folding that prevents warnings for
10994 "statement without effect" and the like, caused by removing the
10995 call node earlier than the warning is generated. */
10996
10997 tree
10998 fold_call_expr (location_t loc, tree exp, bool ignore)
10999 {
11000 tree ret = NULL_TREE;
11001 tree fndecl = get_callee_fndecl (exp);
11002 if (fndecl
11003 && TREE_CODE (fndecl) == FUNCTION_DECL
11004 && DECL_BUILT_IN (fndecl)
11005 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11006 yet. Defer folding until we see all the arguments
11007 (after inlining). */
11008 && !CALL_EXPR_VA_ARG_PACK (exp))
11009 {
11010 int nargs = call_expr_nargs (exp);
11011
11012 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11013 instead last argument is __builtin_va_arg_pack (). Defer folding
11014 even in that case, until arguments are finalized. */
11015 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11016 {
11017 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11018 if (fndecl2
11019 && TREE_CODE (fndecl2) == FUNCTION_DECL
11020 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11021 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11022 return NULL_TREE;
11023 }
11024
11025 if (avoid_folding_inline_builtin (fndecl))
11026 return NULL_TREE;
11027
11028 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11029 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11030 CALL_EXPR_ARGP (exp), ignore);
11031 else
11032 {
11033 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11034 {
11035 tree *args = CALL_EXPR_ARGP (exp);
11036 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11037 }
11038 if (!ret)
11039 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11040 if (ret)
11041 return ret;
11042 }
11043 }
11044 return NULL_TREE;
11045 }
11046
11047 /* Conveniently construct a function call expression. FNDECL names the
11048 function to be called and N arguments are passed in the array
11049 ARGARRAY. */
11050
11051 tree
11052 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11053 {
11054 tree fntype = TREE_TYPE (fndecl);
11055 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11056
11057 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11058 }
11059
11060 /* Conveniently construct a function call expression. FNDECL names the
11061 function to be called and the arguments are passed in the vector
11062 VEC. */
11063
11064 tree
11065 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11066 {
11067 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11068 vec_safe_address (vec));
11069 }
11070
11071
11072 /* Conveniently construct a function call expression. FNDECL names the
11073 function to be called, N is the number of arguments, and the "..."
11074 parameters are the argument expressions. */
11075
11076 tree
11077 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11078 {
11079 va_list ap;
11080 tree *argarray = XALLOCAVEC (tree, n);
11081 int i;
11082
11083 va_start (ap, n);
11084 for (i = 0; i < n; i++)
11085 argarray[i] = va_arg (ap, tree);
11086 va_end (ap);
11087 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11088 }
11089
11090 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11091 varargs macros aren't supported by all bootstrap compilers. */
11092
11093 tree
11094 build_call_expr (tree fndecl, int n, ...)
11095 {
11096 va_list ap;
11097 tree *argarray = XALLOCAVEC (tree, n);
11098 int i;
11099
11100 va_start (ap, n);
11101 for (i = 0; i < n; i++)
11102 argarray[i] = va_arg (ap, tree);
11103 va_end (ap);
11104 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11105 }
11106
11107 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11108 N arguments are passed in the array ARGARRAY. */
11109
11110 tree
11111 fold_builtin_call_array (location_t loc, tree type,
11112 tree fn,
11113 int n,
11114 tree *argarray)
11115 {
11116 tree ret = NULL_TREE;
11117 tree exp;
11118
11119 if (TREE_CODE (fn) == ADDR_EXPR)
11120 {
11121 tree fndecl = TREE_OPERAND (fn, 0);
11122 if (TREE_CODE (fndecl) == FUNCTION_DECL
11123 && DECL_BUILT_IN (fndecl))
11124 {
11125 /* If last argument is __builtin_va_arg_pack (), arguments to this
11126 function are not finalized yet. Defer folding until they are. */
11127 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11128 {
11129 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11130 if (fndecl2
11131 && TREE_CODE (fndecl2) == FUNCTION_DECL
11132 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11133 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11134 return build_call_array_loc (loc, type, fn, n, argarray);
11135 }
11136 if (avoid_folding_inline_builtin (fndecl))
11137 return build_call_array_loc (loc, type, fn, n, argarray);
11138 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11139 {
11140 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11141 if (ret)
11142 return ret;
11143
11144 return build_call_array_loc (loc, type, fn, n, argarray);
11145 }
11146 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11147 {
11148 /* First try the transformations that don't require consing up
11149 an exp. */
11150 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11151 if (ret)
11152 return ret;
11153 }
11154
11155 /* If we got this far, we need to build an exp. */
11156 exp = build_call_array_loc (loc, type, fn, n, argarray);
11157 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11158 return ret ? ret : exp;
11159 }
11160 }
11161
11162 return build_call_array_loc (loc, type, fn, n, argarray);
11163 }
11164
11165 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11166 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11167 of arguments in ARGS to be omitted. OLDNARGS is the number of
11168 elements in ARGS. */
11169
11170 static tree
11171 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11172 int skip, tree fndecl, int n, va_list newargs)
11173 {
11174 int nargs = oldnargs - skip + n;
11175 tree *buffer;
11176
11177 if (n > 0)
11178 {
11179 int i, j;
11180
11181 buffer = XALLOCAVEC (tree, nargs);
11182 for (i = 0; i < n; i++)
11183 buffer[i] = va_arg (newargs, tree);
11184 for (j = skip; j < oldnargs; j++, i++)
11185 buffer[i] = args[j];
11186 }
11187 else
11188 buffer = args + skip;
11189
11190 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11191 }
11192
11193 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11194 list ARGS along with N new arguments specified as the "..."
11195 parameters. SKIP is the number of arguments in ARGS to be omitted.
11196 OLDNARGS is the number of elements in ARGS. */
11197
11198 static tree
11199 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11200 int skip, tree fndecl, int n, ...)
11201 {
11202 va_list ap;
11203 tree t;
11204
11205 va_start (ap, n);
11206 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11207 va_end (ap);
11208
11209 return t;
11210 }
11211
11212 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11213 along with N new arguments specified as the "..." parameters. SKIP
11214 is the number of arguments in EXP to be omitted. This function is used
11215 to do varargs-to-varargs transformations. */
11216
11217 static tree
11218 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11219 {
11220 va_list ap;
11221 tree t;
11222
11223 va_start (ap, n);
11224 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11225 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11226 va_end (ap);
11227
11228 return t;
11229 }
11230
11231 /* Validate a single argument ARG against a tree code CODE representing
11232 a type. */
11233
11234 static bool
11235 validate_arg (const_tree arg, enum tree_code code)
11236 {
11237 if (!arg)
11238 return false;
11239 else if (code == POINTER_TYPE)
11240 return POINTER_TYPE_P (TREE_TYPE (arg));
11241 else if (code == INTEGER_TYPE)
11242 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11243 return code == TREE_CODE (TREE_TYPE (arg));
11244 }
11245
11246 /* This function validates the types of a function call argument list
11247 against a specified list of tree_codes. If the last specifier is a 0,
11248 that represents an ellipses, otherwise the last specifier must be a
11249 VOID_TYPE.
11250
11251 This is the GIMPLE version of validate_arglist. Eventually we want to
11252 completely convert builtins.c to work from GIMPLEs and the tree based
11253 validate_arglist will then be removed. */
11254
11255 bool
11256 validate_gimple_arglist (const_gimple call, ...)
11257 {
11258 enum tree_code code;
11259 bool res = 0;
11260 va_list ap;
11261 const_tree arg;
11262 size_t i;
11263
11264 va_start (ap, call);
11265 i = 0;
11266
11267 do
11268 {
11269 code = (enum tree_code) va_arg (ap, int);
11270 switch (code)
11271 {
11272 case 0:
11273 /* This signifies an ellipses, any further arguments are all ok. */
11274 res = true;
11275 goto end;
11276 case VOID_TYPE:
11277 /* This signifies an endlink, if no arguments remain, return
11278 true, otherwise return false. */
11279 res = (i == gimple_call_num_args (call));
11280 goto end;
11281 default:
11282 /* If no parameters remain or the parameter's code does not
11283 match the specified code, return false. Otherwise continue
11284 checking any remaining arguments. */
11285 arg = gimple_call_arg (call, i++);
11286 if (!validate_arg (arg, code))
11287 goto end;
11288 break;
11289 }
11290 }
11291 while (1);
11292
11293 /* We need gotos here since we can only have one VA_CLOSE in a
11294 function. */
11295 end: ;
11296 va_end (ap);
11297
11298 return res;
11299 }
11300
11301 /* This function validates the types of a function call argument list
11302 against a specified list of tree_codes. If the last specifier is a 0,
11303 that represents an ellipses, otherwise the last specifier must be a
11304 VOID_TYPE. */
11305
11306 bool
11307 validate_arglist (const_tree callexpr, ...)
11308 {
11309 enum tree_code code;
11310 bool res = 0;
11311 va_list ap;
11312 const_call_expr_arg_iterator iter;
11313 const_tree arg;
11314
11315 va_start (ap, callexpr);
11316 init_const_call_expr_arg_iterator (callexpr, &iter);
11317
11318 do
11319 {
11320 code = (enum tree_code) va_arg (ap, int);
11321 switch (code)
11322 {
11323 case 0:
11324 /* This signifies an ellipses, any further arguments are all ok. */
11325 res = true;
11326 goto end;
11327 case VOID_TYPE:
11328 /* This signifies an endlink, if no arguments remain, return
11329 true, otherwise return false. */
11330 res = !more_const_call_expr_args_p (&iter);
11331 goto end;
11332 default:
11333 /* If no parameters remain or the parameter's code does not
11334 match the specified code, return false. Otherwise continue
11335 checking any remaining arguments. */
11336 arg = next_const_call_expr_arg (&iter);
11337 if (!validate_arg (arg, code))
11338 goto end;
11339 break;
11340 }
11341 }
11342 while (1);
11343
11344 /* We need gotos here since we can only have one VA_CLOSE in a
11345 function. */
11346 end: ;
11347 va_end (ap);
11348
11349 return res;
11350 }
11351
11352 /* Default target-specific builtin expander that does nothing. */
11353
11354 rtx
11355 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11356 rtx target ATTRIBUTE_UNUSED,
11357 rtx subtarget ATTRIBUTE_UNUSED,
11358 enum machine_mode mode ATTRIBUTE_UNUSED,
11359 int ignore ATTRIBUTE_UNUSED)
11360 {
11361 return NULL_RTX;
11362 }
11363
11364 /* Returns true is EXP represents data that would potentially reside
11365 in a readonly section. */
11366
11367 static bool
11368 readonly_data_expr (tree exp)
11369 {
11370 STRIP_NOPS (exp);
11371
11372 if (TREE_CODE (exp) != ADDR_EXPR)
11373 return false;
11374
11375 exp = get_base_address (TREE_OPERAND (exp, 0));
11376 if (!exp)
11377 return false;
11378
11379 /* Make sure we call decl_readonly_section only for trees it
11380 can handle (since it returns true for everything it doesn't
11381 understand). */
11382 if (TREE_CODE (exp) == STRING_CST
11383 || TREE_CODE (exp) == CONSTRUCTOR
11384 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11385 return decl_readonly_section (exp, 0);
11386 else
11387 return false;
11388 }
11389
11390 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11391 to the call, and TYPE is its return type.
11392
11393 Return NULL_TREE if no simplification was possible, otherwise return the
11394 simplified form of the call as a tree.
11395
11396 The simplified form may be a constant or other expression which
11397 computes the same value, but in a more efficient manner (including
11398 calls to other builtin functions).
11399
11400 The call may contain arguments which need to be evaluated, but
11401 which are not useful to determine the result of the call. In
11402 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11403 COMPOUND_EXPR will be an argument which must be evaluated.
11404 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11405 COMPOUND_EXPR in the chain will contain the tree for the simplified
11406 form of the builtin function call. */
11407
11408 static tree
11409 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11410 {
11411 if (!validate_arg (s1, POINTER_TYPE)
11412 || !validate_arg (s2, POINTER_TYPE))
11413 return NULL_TREE;
11414 else
11415 {
11416 tree fn;
11417 const char *p1, *p2;
11418
11419 p2 = c_getstr (s2);
11420 if (p2 == NULL)
11421 return NULL_TREE;
11422
11423 p1 = c_getstr (s1);
11424 if (p1 != NULL)
11425 {
11426 const char *r = strstr (p1, p2);
11427 tree tem;
11428
11429 if (r == NULL)
11430 return build_int_cst (TREE_TYPE (s1), 0);
11431
11432 /* Return an offset into the constant string argument. */
11433 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11434 return fold_convert_loc (loc, type, tem);
11435 }
11436
11437 /* The argument is const char *, and the result is char *, so we need
11438 a type conversion here to avoid a warning. */
11439 if (p2[0] == '\0')
11440 return fold_convert_loc (loc, type, s1);
11441
11442 if (p2[1] != '\0')
11443 return NULL_TREE;
11444
11445 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11446 if (!fn)
11447 return NULL_TREE;
11448
11449 /* New argument list transforming strstr(s1, s2) to
11450 strchr(s1, s2[0]). */
11451 return build_call_expr_loc (loc, fn, 2, s1,
11452 build_int_cst (integer_type_node, p2[0]));
11453 }
11454 }
11455
11456 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11457 the call, and TYPE is its return type.
11458
11459 Return NULL_TREE if no simplification was possible, otherwise return the
11460 simplified form of the call as a tree.
11461
11462 The simplified form may be a constant or other expression which
11463 computes the same value, but in a more efficient manner (including
11464 calls to other builtin functions).
11465
11466 The call may contain arguments which need to be evaluated, but
11467 which are not useful to determine the result of the call. In
11468 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11469 COMPOUND_EXPR will be an argument which must be evaluated.
11470 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11471 COMPOUND_EXPR in the chain will contain the tree for the simplified
11472 form of the builtin function call. */
11473
11474 static tree
11475 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11476 {
11477 if (!validate_arg (s1, POINTER_TYPE)
11478 || !validate_arg (s2, INTEGER_TYPE))
11479 return NULL_TREE;
11480 else
11481 {
11482 const char *p1;
11483
11484 if (TREE_CODE (s2) != INTEGER_CST)
11485 return NULL_TREE;
11486
11487 p1 = c_getstr (s1);
11488 if (p1 != NULL)
11489 {
11490 char c;
11491 const char *r;
11492 tree tem;
11493
11494 if (target_char_cast (s2, &c))
11495 return NULL_TREE;
11496
11497 r = strchr (p1, c);
11498
11499 if (r == NULL)
11500 return build_int_cst (TREE_TYPE (s1), 0);
11501
11502 /* Return an offset into the constant string argument. */
11503 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11504 return fold_convert_loc (loc, type, tem);
11505 }
11506 return NULL_TREE;
11507 }
11508 }
11509
11510 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11511 the call, and TYPE is its return type.
11512
11513 Return NULL_TREE if no simplification was possible, otherwise return the
11514 simplified form of the call as a tree.
11515
11516 The simplified form may be a constant or other expression which
11517 computes the same value, but in a more efficient manner (including
11518 calls to other builtin functions).
11519
11520 The call may contain arguments which need to be evaluated, but
11521 which are not useful to determine the result of the call. In
11522 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11523 COMPOUND_EXPR will be an argument which must be evaluated.
11524 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11525 COMPOUND_EXPR in the chain will contain the tree for the simplified
11526 form of the builtin function call. */
11527
11528 static tree
11529 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11530 {
11531 if (!validate_arg (s1, POINTER_TYPE)
11532 || !validate_arg (s2, INTEGER_TYPE))
11533 return NULL_TREE;
11534 else
11535 {
11536 tree fn;
11537 const char *p1;
11538
11539 if (TREE_CODE (s2) != INTEGER_CST)
11540 return NULL_TREE;
11541
11542 p1 = c_getstr (s1);
11543 if (p1 != NULL)
11544 {
11545 char c;
11546 const char *r;
11547 tree tem;
11548
11549 if (target_char_cast (s2, &c))
11550 return NULL_TREE;
11551
11552 r = strrchr (p1, c);
11553
11554 if (r == NULL)
11555 return build_int_cst (TREE_TYPE (s1), 0);
11556
11557 /* Return an offset into the constant string argument. */
11558 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11559 return fold_convert_loc (loc, type, tem);
11560 }
11561
11562 if (! integer_zerop (s2))
11563 return NULL_TREE;
11564
11565 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11566 if (!fn)
11567 return NULL_TREE;
11568
11569 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11570 return build_call_expr_loc (loc, fn, 2, s1, s2);
11571 }
11572 }
11573
11574 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11575 to the call, and TYPE is its return type.
11576
11577 Return NULL_TREE if no simplification was possible, otherwise return the
11578 simplified form of the call as a tree.
11579
11580 The simplified form may be a constant or other expression which
11581 computes the same value, but in a more efficient manner (including
11582 calls to other builtin functions).
11583
11584 The call may contain arguments which need to be evaluated, but
11585 which are not useful to determine the result of the call. In
11586 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11587 COMPOUND_EXPR will be an argument which must be evaluated.
11588 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11589 COMPOUND_EXPR in the chain will contain the tree for the simplified
11590 form of the builtin function call. */
11591
11592 static tree
11593 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11594 {
11595 if (!validate_arg (s1, POINTER_TYPE)
11596 || !validate_arg (s2, POINTER_TYPE))
11597 return NULL_TREE;
11598 else
11599 {
11600 tree fn;
11601 const char *p1, *p2;
11602
11603 p2 = c_getstr (s2);
11604 if (p2 == NULL)
11605 return NULL_TREE;
11606
11607 p1 = c_getstr (s1);
11608 if (p1 != NULL)
11609 {
11610 const char *r = strpbrk (p1, p2);
11611 tree tem;
11612
11613 if (r == NULL)
11614 return build_int_cst (TREE_TYPE (s1), 0);
11615
11616 /* Return an offset into the constant string argument. */
11617 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11618 return fold_convert_loc (loc, type, tem);
11619 }
11620
11621 if (p2[0] == '\0')
11622 /* strpbrk(x, "") == NULL.
11623 Evaluate and ignore s1 in case it had side-effects. */
11624 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11625
11626 if (p2[1] != '\0')
11627 return NULL_TREE; /* Really call strpbrk. */
11628
11629 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11630 if (!fn)
11631 return NULL_TREE;
11632
11633 /* New argument list transforming strpbrk(s1, s2) to
11634 strchr(s1, s2[0]). */
11635 return build_call_expr_loc (loc, fn, 2, s1,
11636 build_int_cst (integer_type_node, p2[0]));
11637 }
11638 }
11639
11640 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11641 to the call.
11642
11643 Return NULL_TREE if no simplification was possible, otherwise return the
11644 simplified form of the call as a tree.
11645
11646 The simplified form may be a constant or other expression which
11647 computes the same value, but in a more efficient manner (including
11648 calls to other builtin functions).
11649
11650 The call may contain arguments which need to be evaluated, but
11651 which are not useful to determine the result of the call. In
11652 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11653 COMPOUND_EXPR will be an argument which must be evaluated.
11654 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11655 COMPOUND_EXPR in the chain will contain the tree for the simplified
11656 form of the builtin function call. */
11657
11658 static tree
11659 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11660 {
11661 if (!validate_arg (dst, POINTER_TYPE)
11662 || !validate_arg (src, POINTER_TYPE))
11663 return NULL_TREE;
11664 else
11665 {
11666 const char *p = c_getstr (src);
11667
11668 /* If the string length is zero, return the dst parameter. */
11669 if (p && *p == '\0')
11670 return dst;
11671
11672 if (optimize_insn_for_speed_p ())
11673 {
11674 /* See if we can store by pieces into (dst + strlen(dst)). */
11675 tree newdst, call;
11676 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11677 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11678
11679 if (!strlen_fn || !strcpy_fn)
11680 return NULL_TREE;
11681
11682 /* If we don't have a movstr we don't want to emit an strcpy
11683 call. We have to do that if the length of the source string
11684 isn't computable (in that case we can use memcpy probably
11685 later expanding to a sequence of mov instructions). If we
11686 have movstr instructions we can emit strcpy calls. */
11687 if (!HAVE_movstr)
11688 {
11689 tree len = c_strlen (src, 1);
11690 if (! len || TREE_SIDE_EFFECTS (len))
11691 return NULL_TREE;
11692 }
11693
11694 /* Stabilize the argument list. */
11695 dst = builtin_save_expr (dst);
11696
11697 /* Create strlen (dst). */
11698 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11699 /* Create (dst p+ strlen (dst)). */
11700
11701 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11702 newdst = builtin_save_expr (newdst);
11703
11704 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11705 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11706 }
11707 return NULL_TREE;
11708 }
11709 }
11710
11711 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11712 arguments to the call.
11713
11714 Return NULL_TREE if no simplification was possible, otherwise return the
11715 simplified form of the call as a tree.
11716
11717 The simplified form may be a constant or other expression which
11718 computes the same value, but in a more efficient manner (including
11719 calls to other builtin functions).
11720
11721 The call may contain arguments which need to be evaluated, but
11722 which are not useful to determine the result of the call. In
11723 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11724 COMPOUND_EXPR will be an argument which must be evaluated.
11725 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11726 COMPOUND_EXPR in the chain will contain the tree for the simplified
11727 form of the builtin function call. */
11728
11729 static tree
11730 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11731 {
11732 if (!validate_arg (dst, POINTER_TYPE)
11733 || !validate_arg (src, POINTER_TYPE)
11734 || !validate_arg (len, INTEGER_TYPE))
11735 return NULL_TREE;
11736 else
11737 {
11738 const char *p = c_getstr (src);
11739
11740 /* If the requested length is zero, or the src parameter string
11741 length is zero, return the dst parameter. */
11742 if (integer_zerop (len) || (p && *p == '\0'))
11743 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11744
11745 /* If the requested len is greater than or equal to the string
11746 length, call strcat. */
11747 if (TREE_CODE (len) == INTEGER_CST && p
11748 && compare_tree_int (len, strlen (p)) >= 0)
11749 {
11750 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11751
11752 /* If the replacement _DECL isn't initialized, don't do the
11753 transformation. */
11754 if (!fn)
11755 return NULL_TREE;
11756
11757 return build_call_expr_loc (loc, fn, 2, dst, src);
11758 }
11759 return NULL_TREE;
11760 }
11761 }
11762
11763 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11764 to the call.
11765
11766 Return NULL_TREE if no simplification was possible, otherwise return the
11767 simplified form of the call as a tree.
11768
11769 The simplified form may be a constant or other expression which
11770 computes the same value, but in a more efficient manner (including
11771 calls to other builtin functions).
11772
11773 The call may contain arguments which need to be evaluated, but
11774 which are not useful to determine the result of the call. In
11775 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11776 COMPOUND_EXPR will be an argument which must be evaluated.
11777 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11778 COMPOUND_EXPR in the chain will contain the tree for the simplified
11779 form of the builtin function call. */
11780
11781 static tree
11782 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11783 {
11784 if (!validate_arg (s1, POINTER_TYPE)
11785 || !validate_arg (s2, POINTER_TYPE))
11786 return NULL_TREE;
11787 else
11788 {
11789 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11790
11791 /* If both arguments are constants, evaluate at compile-time. */
11792 if (p1 && p2)
11793 {
11794 const size_t r = strspn (p1, p2);
11795 return build_int_cst (size_type_node, r);
11796 }
11797
11798 /* If either argument is "", return NULL_TREE. */
11799 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11800 /* Evaluate and ignore both arguments in case either one has
11801 side-effects. */
11802 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11803 s1, s2);
11804 return NULL_TREE;
11805 }
11806 }
11807
11808 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11809 to the call.
11810
11811 Return NULL_TREE if no simplification was possible, otherwise return the
11812 simplified form of the call as a tree.
11813
11814 The simplified form may be a constant or other expression which
11815 computes the same value, but in a more efficient manner (including
11816 calls to other builtin functions).
11817
11818 The call may contain arguments which need to be evaluated, but
11819 which are not useful to determine the result of the call. In
11820 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11821 COMPOUND_EXPR will be an argument which must be evaluated.
11822 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11823 COMPOUND_EXPR in the chain will contain the tree for the simplified
11824 form of the builtin function call. */
11825
11826 static tree
11827 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11828 {
11829 if (!validate_arg (s1, POINTER_TYPE)
11830 || !validate_arg (s2, POINTER_TYPE))
11831 return NULL_TREE;
11832 else
11833 {
11834 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11835
11836 /* If both arguments are constants, evaluate at compile-time. */
11837 if (p1 && p2)
11838 {
11839 const size_t r = strcspn (p1, p2);
11840 return build_int_cst (size_type_node, r);
11841 }
11842
11843 /* If the first argument is "", return NULL_TREE. */
11844 if (p1 && *p1 == '\0')
11845 {
11846 /* Evaluate and ignore argument s2 in case it has
11847 side-effects. */
11848 return omit_one_operand_loc (loc, size_type_node,
11849 size_zero_node, s2);
11850 }
11851
11852 /* If the second argument is "", return __builtin_strlen(s1). */
11853 if (p2 && *p2 == '\0')
11854 {
11855 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11856
11857 /* If the replacement _DECL isn't initialized, don't do the
11858 transformation. */
11859 if (!fn)
11860 return NULL_TREE;
11861
11862 return build_call_expr_loc (loc, fn, 1, s1);
11863 }
11864 return NULL_TREE;
11865 }
11866 }
11867
11868 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11869 to the call. IGNORE is true if the value returned
11870 by the builtin will be ignored. UNLOCKED is true is true if this
11871 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11872 the known length of the string. Return NULL_TREE if no simplification
11873 was possible. */
11874
11875 tree
11876 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11877 bool ignore, bool unlocked, tree len)
11878 {
11879 /* If we're using an unlocked function, assume the other unlocked
11880 functions exist explicitly. */
11881 tree const fn_fputc = (unlocked
11882 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
11883 : builtin_decl_implicit (BUILT_IN_FPUTC));
11884 tree const fn_fwrite = (unlocked
11885 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
11886 : builtin_decl_implicit (BUILT_IN_FWRITE));
11887
11888 /* If the return value is used, don't do the transformation. */
11889 if (!ignore)
11890 return NULL_TREE;
11891
11892 /* Verify the arguments in the original call. */
11893 if (!validate_arg (arg0, POINTER_TYPE)
11894 || !validate_arg (arg1, POINTER_TYPE))
11895 return NULL_TREE;
11896
11897 if (! len)
11898 len = c_strlen (arg0, 0);
11899
11900 /* Get the length of the string passed to fputs. If the length
11901 can't be determined, punt. */
11902 if (!len
11903 || TREE_CODE (len) != INTEGER_CST)
11904 return NULL_TREE;
11905
11906 switch (compare_tree_int (len, 1))
11907 {
11908 case -1: /* length is 0, delete the call entirely . */
11909 return omit_one_operand_loc (loc, integer_type_node,
11910 integer_zero_node, arg1);;
11911
11912 case 0: /* length is 1, call fputc. */
11913 {
11914 const char *p = c_getstr (arg0);
11915
11916 if (p != NULL)
11917 {
11918 if (fn_fputc)
11919 return build_call_expr_loc (loc, fn_fputc, 2,
11920 build_int_cst
11921 (integer_type_node, p[0]), arg1);
11922 else
11923 return NULL_TREE;
11924 }
11925 }
11926 /* FALLTHROUGH */
11927 case 1: /* length is greater than 1, call fwrite. */
11928 {
11929 /* If optimizing for size keep fputs. */
11930 if (optimize_function_for_size_p (cfun))
11931 return NULL_TREE;
11932 /* New argument list transforming fputs(string, stream) to
11933 fwrite(string, 1, len, stream). */
11934 if (fn_fwrite)
11935 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11936 size_one_node, len, arg1);
11937 else
11938 return NULL_TREE;
11939 }
11940 default:
11941 gcc_unreachable ();
11942 }
11943 return NULL_TREE;
11944 }
11945
11946 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11947 produced. False otherwise. This is done so that we don't output the error
11948 or warning twice or three times. */
11949
11950 bool
11951 fold_builtin_next_arg (tree exp, bool va_start_p)
11952 {
11953 tree fntype = TREE_TYPE (current_function_decl);
11954 int nargs = call_expr_nargs (exp);
11955 tree arg;
11956 /* There is good chance the current input_location points inside the
11957 definition of the va_start macro (perhaps on the token for
11958 builtin) in a system header, so warnings will not be emitted.
11959 Use the location in real source code. */
11960 source_location current_location =
11961 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11962 NULL);
11963
11964 if (!stdarg_p (fntype))
11965 {
11966 error ("%<va_start%> used in function with fixed args");
11967 return true;
11968 }
11969
11970 if (va_start_p)
11971 {
11972 if (va_start_p && (nargs != 2))
11973 {
11974 error ("wrong number of arguments to function %<va_start%>");
11975 return true;
11976 }
11977 arg = CALL_EXPR_ARG (exp, 1);
11978 }
11979 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11980 when we checked the arguments and if needed issued a warning. */
11981 else
11982 {
11983 if (nargs == 0)
11984 {
11985 /* Evidently an out of date version of <stdarg.h>; can't validate
11986 va_start's second argument, but can still work as intended. */
11987 warning_at (current_location,
11988 OPT_Wvarargs,
11989 "%<__builtin_next_arg%> called without an argument");
11990 return true;
11991 }
11992 else if (nargs > 1)
11993 {
11994 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11995 return true;
11996 }
11997 arg = CALL_EXPR_ARG (exp, 0);
11998 }
11999
12000 if (TREE_CODE (arg) == SSA_NAME)
12001 arg = SSA_NAME_VAR (arg);
12002
12003 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12004 or __builtin_next_arg (0) the first time we see it, after checking
12005 the arguments and if needed issuing a warning. */
12006 if (!integer_zerop (arg))
12007 {
12008 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12009
12010 /* Strip off all nops for the sake of the comparison. This
12011 is not quite the same as STRIP_NOPS. It does more.
12012 We must also strip off INDIRECT_EXPR for C++ reference
12013 parameters. */
12014 while (CONVERT_EXPR_P (arg)
12015 || TREE_CODE (arg) == INDIRECT_REF)
12016 arg = TREE_OPERAND (arg, 0);
12017 if (arg != last_parm)
12018 {
12019 /* FIXME: Sometimes with the tree optimizers we can get the
12020 not the last argument even though the user used the last
12021 argument. We just warn and set the arg to be the last
12022 argument so that we will get wrong-code because of
12023 it. */
12024 warning_at (current_location,
12025 OPT_Wvarargs,
12026 "second parameter of %<va_start%> not last named argument");
12027 }
12028
12029 /* Undefined by C99 7.15.1.4p4 (va_start):
12030 "If the parameter parmN is declared with the register storage
12031 class, with a function or array type, or with a type that is
12032 not compatible with the type that results after application of
12033 the default argument promotions, the behavior is undefined."
12034 */
12035 else if (DECL_REGISTER (arg))
12036 {
12037 warning_at (current_location,
12038 OPT_Wvarargs,
12039 "undefined behaviour when second parameter of "
12040 "%<va_start%> is declared with %<register%> storage");
12041 }
12042
12043 /* We want to verify the second parameter just once before the tree
12044 optimizers are run and then avoid keeping it in the tree,
12045 as otherwise we could warn even for correct code like:
12046 void foo (int i, ...)
12047 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12048 if (va_start_p)
12049 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12050 else
12051 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12052 }
12053 return false;
12054 }
12055
12056
12057 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12058 ORIG may be null if this is a 2-argument call. We don't attempt to
12059 simplify calls with more than 3 arguments.
12060
12061 Return NULL_TREE if no simplification was possible, otherwise return the
12062 simplified form of the call as a tree. If IGNORED is true, it means that
12063 the caller does not use the returned value of the function. */
12064
12065 static tree
12066 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12067 tree orig, int ignored)
12068 {
12069 tree call, retval;
12070 const char *fmt_str = NULL;
12071
12072 /* Verify the required arguments in the original call. We deal with two
12073 types of sprintf() calls: 'sprintf (str, fmt)' and
12074 'sprintf (dest, "%s", orig)'. */
12075 if (!validate_arg (dest, POINTER_TYPE)
12076 || !validate_arg (fmt, POINTER_TYPE))
12077 return NULL_TREE;
12078 if (orig && !validate_arg (orig, POINTER_TYPE))
12079 return NULL_TREE;
12080
12081 /* Check whether the format is a literal string constant. */
12082 fmt_str = c_getstr (fmt);
12083 if (fmt_str == NULL)
12084 return NULL_TREE;
12085
12086 call = NULL_TREE;
12087 retval = NULL_TREE;
12088
12089 if (!init_target_chars ())
12090 return NULL_TREE;
12091
12092 /* If the format doesn't contain % args or %%, use strcpy. */
12093 if (strchr (fmt_str, target_percent) == NULL)
12094 {
12095 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12096
12097 if (!fn)
12098 return NULL_TREE;
12099
12100 /* Don't optimize sprintf (buf, "abc", ptr++). */
12101 if (orig)
12102 return NULL_TREE;
12103
12104 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12105 'format' is known to contain no % formats. */
12106 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12107 if (!ignored)
12108 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12109 }
12110
12111 /* If the format is "%s", use strcpy if the result isn't used. */
12112 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12113 {
12114 tree fn;
12115 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12116
12117 if (!fn)
12118 return NULL_TREE;
12119
12120 /* Don't crash on sprintf (str1, "%s"). */
12121 if (!orig)
12122 return NULL_TREE;
12123
12124 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12125 if (!ignored)
12126 {
12127 retval = c_strlen (orig, 1);
12128 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12129 return NULL_TREE;
12130 }
12131 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12132 }
12133
12134 if (call && retval)
12135 {
12136 retval = fold_convert_loc
12137 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12138 retval);
12139 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12140 }
12141 else
12142 return call;
12143 }
12144
12145 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12146 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12147 attempt to simplify calls with more than 4 arguments.
12148
12149 Return NULL_TREE if no simplification was possible, otherwise return the
12150 simplified form of the call as a tree. If IGNORED is true, it means that
12151 the caller does not use the returned value of the function. */
12152
12153 static tree
12154 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12155 tree orig, int ignored)
12156 {
12157 tree call, retval;
12158 const char *fmt_str = NULL;
12159 unsigned HOST_WIDE_INT destlen;
12160
12161 /* Verify the required arguments in the original call. We deal with two
12162 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12163 'snprintf (dest, cst, "%s", orig)'. */
12164 if (!validate_arg (dest, POINTER_TYPE)
12165 || !validate_arg (destsize, INTEGER_TYPE)
12166 || !validate_arg (fmt, POINTER_TYPE))
12167 return NULL_TREE;
12168 if (orig && !validate_arg (orig, POINTER_TYPE))
12169 return NULL_TREE;
12170
12171 if (!host_integerp (destsize, 1))
12172 return NULL_TREE;
12173
12174 /* Check whether the format is a literal string constant. */
12175 fmt_str = c_getstr (fmt);
12176 if (fmt_str == NULL)
12177 return NULL_TREE;
12178
12179 call = NULL_TREE;
12180 retval = NULL_TREE;
12181
12182 if (!init_target_chars ())
12183 return NULL_TREE;
12184
12185 destlen = tree_low_cst (destsize, 1);
12186
12187 /* If the format doesn't contain % args or %%, use strcpy. */
12188 if (strchr (fmt_str, target_percent) == NULL)
12189 {
12190 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12191 size_t len = strlen (fmt_str);
12192
12193 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12194 if (orig)
12195 return NULL_TREE;
12196
12197 /* We could expand this as
12198 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12199 or to
12200 memcpy (str, fmt_with_nul_at_cstm1, cst);
12201 but in the former case that might increase code size
12202 and in the latter case grow .rodata section too much.
12203 So punt for now. */
12204 if (len >= destlen)
12205 return NULL_TREE;
12206
12207 if (!fn)
12208 return NULL_TREE;
12209
12210 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12211 'format' is known to contain no % formats and
12212 strlen (fmt) < cst. */
12213 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12214
12215 if (!ignored)
12216 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12217 }
12218
12219 /* If the format is "%s", use strcpy if the result isn't used. */
12220 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12221 {
12222 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12223 unsigned HOST_WIDE_INT origlen;
12224
12225 /* Don't crash on snprintf (str1, cst, "%s"). */
12226 if (!orig)
12227 return NULL_TREE;
12228
12229 retval = c_strlen (orig, 1);
12230 if (!retval || !host_integerp (retval, 1))
12231 return NULL_TREE;
12232
12233 origlen = tree_low_cst (retval, 1);
12234 /* We could expand this as
12235 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12236 or to
12237 memcpy (str1, str2_with_nul_at_cstm1, cst);
12238 but in the former case that might increase code size
12239 and in the latter case grow .rodata section too much.
12240 So punt for now. */
12241 if (origlen >= destlen)
12242 return NULL_TREE;
12243
12244 /* Convert snprintf (str1, cst, "%s", str2) into
12245 strcpy (str1, str2) if strlen (str2) < cst. */
12246 if (!fn)
12247 return NULL_TREE;
12248
12249 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12250
12251 if (ignored)
12252 retval = NULL_TREE;
12253 }
12254
12255 if (call && retval)
12256 {
12257 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12258 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12259 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12260 }
12261 else
12262 return call;
12263 }
12264
12265 /* Expand a call EXP to __builtin_object_size. */
12266
12267 rtx
12268 expand_builtin_object_size (tree exp)
12269 {
12270 tree ost;
12271 int object_size_type;
12272 tree fndecl = get_callee_fndecl (exp);
12273
12274 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12275 {
12276 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12277 exp, fndecl);
12278 expand_builtin_trap ();
12279 return const0_rtx;
12280 }
12281
12282 ost = CALL_EXPR_ARG (exp, 1);
12283 STRIP_NOPS (ost);
12284
12285 if (TREE_CODE (ost) != INTEGER_CST
12286 || tree_int_cst_sgn (ost) < 0
12287 || compare_tree_int (ost, 3) > 0)
12288 {
12289 error ("%Klast argument of %D is not integer constant between 0 and 3",
12290 exp, fndecl);
12291 expand_builtin_trap ();
12292 return const0_rtx;
12293 }
12294
12295 object_size_type = tree_low_cst (ost, 0);
12296
12297 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12298 }
12299
12300 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12301 FCODE is the BUILT_IN_* to use.
12302 Return NULL_RTX if we failed; the caller should emit a normal call,
12303 otherwise try to get the result in TARGET, if convenient (and in
12304 mode MODE if that's convenient). */
12305
12306 static rtx
12307 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12308 enum built_in_function fcode)
12309 {
12310 tree dest, src, len, size;
12311
12312 if (!validate_arglist (exp,
12313 POINTER_TYPE,
12314 fcode == BUILT_IN_MEMSET_CHK
12315 ? INTEGER_TYPE : POINTER_TYPE,
12316 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12317 return NULL_RTX;
12318
12319 dest = CALL_EXPR_ARG (exp, 0);
12320 src = CALL_EXPR_ARG (exp, 1);
12321 len = CALL_EXPR_ARG (exp, 2);
12322 size = CALL_EXPR_ARG (exp, 3);
12323
12324 if (! host_integerp (size, 1))
12325 return NULL_RTX;
12326
12327 if (host_integerp (len, 1) || integer_all_onesp (size))
12328 {
12329 tree fn;
12330
12331 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12332 {
12333 warning_at (tree_nonartificial_location (exp),
12334 0, "%Kcall to %D will always overflow destination buffer",
12335 exp, get_callee_fndecl (exp));
12336 return NULL_RTX;
12337 }
12338
12339 fn = NULL_TREE;
12340 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12341 mem{cpy,pcpy,move,set} is available. */
12342 switch (fcode)
12343 {
12344 case BUILT_IN_MEMCPY_CHK:
12345 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12346 break;
12347 case BUILT_IN_MEMPCPY_CHK:
12348 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12349 break;
12350 case BUILT_IN_MEMMOVE_CHK:
12351 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12352 break;
12353 case BUILT_IN_MEMSET_CHK:
12354 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12355 break;
12356 default:
12357 break;
12358 }
12359
12360 if (! fn)
12361 return NULL_RTX;
12362
12363 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12364 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12365 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12366 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12367 }
12368 else if (fcode == BUILT_IN_MEMSET_CHK)
12369 return NULL_RTX;
12370 else
12371 {
12372 unsigned int dest_align = get_pointer_alignment (dest);
12373
12374 /* If DEST is not a pointer type, call the normal function. */
12375 if (dest_align == 0)
12376 return NULL_RTX;
12377
12378 /* If SRC and DEST are the same (and not volatile), do nothing. */
12379 if (operand_equal_p (src, dest, 0))
12380 {
12381 tree expr;
12382
12383 if (fcode != BUILT_IN_MEMPCPY_CHK)
12384 {
12385 /* Evaluate and ignore LEN in case it has side-effects. */
12386 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12387 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12388 }
12389
12390 expr = fold_build_pointer_plus (dest, len);
12391 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12392 }
12393
12394 /* __memmove_chk special case. */
12395 if (fcode == BUILT_IN_MEMMOVE_CHK)
12396 {
12397 unsigned int src_align = get_pointer_alignment (src);
12398
12399 if (src_align == 0)
12400 return NULL_RTX;
12401
12402 /* If src is categorized for a readonly section we can use
12403 normal __memcpy_chk. */
12404 if (readonly_data_expr (src))
12405 {
12406 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12407 if (!fn)
12408 return NULL_RTX;
12409 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12410 dest, src, len, size);
12411 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12412 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12413 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12414 }
12415 }
12416 return NULL_RTX;
12417 }
12418 }
12419
12420 /* Emit warning if a buffer overflow is detected at compile time. */
12421
12422 static void
12423 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12424 {
12425 int is_strlen = 0;
12426 tree len, size;
12427 location_t loc = tree_nonartificial_location (exp);
12428
12429 switch (fcode)
12430 {
12431 case BUILT_IN_STRCPY_CHK:
12432 case BUILT_IN_STPCPY_CHK:
12433 /* For __strcat_chk the warning will be emitted only if overflowing
12434 by at least strlen (dest) + 1 bytes. */
12435 case BUILT_IN_STRCAT_CHK:
12436 len = CALL_EXPR_ARG (exp, 1);
12437 size = CALL_EXPR_ARG (exp, 2);
12438 is_strlen = 1;
12439 break;
12440 case BUILT_IN_STRNCAT_CHK:
12441 case BUILT_IN_STRNCPY_CHK:
12442 case BUILT_IN_STPNCPY_CHK:
12443 len = CALL_EXPR_ARG (exp, 2);
12444 size = CALL_EXPR_ARG (exp, 3);
12445 break;
12446 case BUILT_IN_SNPRINTF_CHK:
12447 case BUILT_IN_VSNPRINTF_CHK:
12448 len = CALL_EXPR_ARG (exp, 1);
12449 size = CALL_EXPR_ARG (exp, 3);
12450 break;
12451 default:
12452 gcc_unreachable ();
12453 }
12454
12455 if (!len || !size)
12456 return;
12457
12458 if (! host_integerp (size, 1) || integer_all_onesp (size))
12459 return;
12460
12461 if (is_strlen)
12462 {
12463 len = c_strlen (len, 1);
12464 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12465 return;
12466 }
12467 else if (fcode == BUILT_IN_STRNCAT_CHK)
12468 {
12469 tree src = CALL_EXPR_ARG (exp, 1);
12470 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12471 return;
12472 src = c_strlen (src, 1);
12473 if (! src || ! host_integerp (src, 1))
12474 {
12475 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12476 exp, get_callee_fndecl (exp));
12477 return;
12478 }
12479 else if (tree_int_cst_lt (src, size))
12480 return;
12481 }
12482 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12483 return;
12484
12485 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12486 exp, get_callee_fndecl (exp));
12487 }
12488
12489 /* Emit warning if a buffer overflow is detected at compile time
12490 in __sprintf_chk/__vsprintf_chk calls. */
12491
12492 static void
12493 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12494 {
12495 tree size, len, fmt;
12496 const char *fmt_str;
12497 int nargs = call_expr_nargs (exp);
12498
12499 /* Verify the required arguments in the original call. */
12500
12501 if (nargs < 4)
12502 return;
12503 size = CALL_EXPR_ARG (exp, 2);
12504 fmt = CALL_EXPR_ARG (exp, 3);
12505
12506 if (! host_integerp (size, 1) || integer_all_onesp (size))
12507 return;
12508
12509 /* Check whether the format is a literal string constant. */
12510 fmt_str = c_getstr (fmt);
12511 if (fmt_str == NULL)
12512 return;
12513
12514 if (!init_target_chars ())
12515 return;
12516
12517 /* If the format doesn't contain % args or %%, we know its size. */
12518 if (strchr (fmt_str, target_percent) == 0)
12519 len = build_int_cstu (size_type_node, strlen (fmt_str));
12520 /* If the format is "%s" and first ... argument is a string literal,
12521 we know it too. */
12522 else if (fcode == BUILT_IN_SPRINTF_CHK
12523 && strcmp (fmt_str, target_percent_s) == 0)
12524 {
12525 tree arg;
12526
12527 if (nargs < 5)
12528 return;
12529 arg = CALL_EXPR_ARG (exp, 4);
12530 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12531 return;
12532
12533 len = c_strlen (arg, 1);
12534 if (!len || ! host_integerp (len, 1))
12535 return;
12536 }
12537 else
12538 return;
12539
12540 if (! tree_int_cst_lt (len, size))
12541 warning_at (tree_nonartificial_location (exp),
12542 0, "%Kcall to %D will always overflow destination buffer",
12543 exp, get_callee_fndecl (exp));
12544 }
12545
12546 /* Emit warning if a free is called with address of a variable. */
12547
12548 static void
12549 maybe_emit_free_warning (tree exp)
12550 {
12551 tree arg = CALL_EXPR_ARG (exp, 0);
12552
12553 STRIP_NOPS (arg);
12554 if (TREE_CODE (arg) != ADDR_EXPR)
12555 return;
12556
12557 arg = get_base_address (TREE_OPERAND (arg, 0));
12558 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12559 return;
12560
12561 if (SSA_VAR_P (arg))
12562 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12563 "%Kattempt to free a non-heap object %qD", exp, arg);
12564 else
12565 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12566 "%Kattempt to free a non-heap object", exp);
12567 }
12568
12569 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12570 if possible. */
12571
12572 tree
12573 fold_builtin_object_size (tree ptr, tree ost)
12574 {
12575 unsigned HOST_WIDE_INT bytes;
12576 int object_size_type;
12577
12578 if (!validate_arg (ptr, POINTER_TYPE)
12579 || !validate_arg (ost, INTEGER_TYPE))
12580 return NULL_TREE;
12581
12582 STRIP_NOPS (ost);
12583
12584 if (TREE_CODE (ost) != INTEGER_CST
12585 || tree_int_cst_sgn (ost) < 0
12586 || compare_tree_int (ost, 3) > 0)
12587 return NULL_TREE;
12588
12589 object_size_type = tree_low_cst (ost, 0);
12590
12591 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12592 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12593 and (size_t) 0 for types 2 and 3. */
12594 if (TREE_SIDE_EFFECTS (ptr))
12595 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12596
12597 if (TREE_CODE (ptr) == ADDR_EXPR)
12598 {
12599 bytes = compute_builtin_object_size (ptr, object_size_type);
12600 if (double_int_fits_to_tree_p (size_type_node,
12601 double_int::from_uhwi (bytes)))
12602 return build_int_cstu (size_type_node, bytes);
12603 }
12604 else if (TREE_CODE (ptr) == SSA_NAME)
12605 {
12606 /* If object size is not known yet, delay folding until
12607 later. Maybe subsequent passes will help determining
12608 it. */
12609 bytes = compute_builtin_object_size (ptr, object_size_type);
12610 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12611 && double_int_fits_to_tree_p (size_type_node,
12612 double_int::from_uhwi (bytes)))
12613 return build_int_cstu (size_type_node, bytes);
12614 }
12615
12616 return NULL_TREE;
12617 }
12618
12619 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12620 DEST, SRC, LEN, and SIZE are the arguments to the call.
12621 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12622 code of the builtin. If MAXLEN is not NULL, it is maximum length
12623 passed as third argument. */
12624
12625 tree
12626 fold_builtin_memory_chk (location_t loc, tree fndecl,
12627 tree dest, tree src, tree len, tree size,
12628 tree maxlen, bool ignore,
12629 enum built_in_function fcode)
12630 {
12631 tree fn;
12632
12633 if (!validate_arg (dest, POINTER_TYPE)
12634 || !validate_arg (src,
12635 (fcode == BUILT_IN_MEMSET_CHK
12636 ? INTEGER_TYPE : POINTER_TYPE))
12637 || !validate_arg (len, INTEGER_TYPE)
12638 || !validate_arg (size, INTEGER_TYPE))
12639 return NULL_TREE;
12640
12641 /* If SRC and DEST are the same (and not volatile), return DEST
12642 (resp. DEST+LEN for __mempcpy_chk). */
12643 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12644 {
12645 if (fcode != BUILT_IN_MEMPCPY_CHK)
12646 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12647 dest, len);
12648 else
12649 {
12650 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12651 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12652 }
12653 }
12654
12655 if (! host_integerp (size, 1))
12656 return NULL_TREE;
12657
12658 if (! integer_all_onesp (size))
12659 {
12660 if (! host_integerp (len, 1))
12661 {
12662 /* If LEN is not constant, try MAXLEN too.
12663 For MAXLEN only allow optimizing into non-_ocs function
12664 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12665 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12666 {
12667 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12668 {
12669 /* (void) __mempcpy_chk () can be optimized into
12670 (void) __memcpy_chk (). */
12671 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12672 if (!fn)
12673 return NULL_TREE;
12674
12675 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12676 }
12677 return NULL_TREE;
12678 }
12679 }
12680 else
12681 maxlen = len;
12682
12683 if (tree_int_cst_lt (size, maxlen))
12684 return NULL_TREE;
12685 }
12686
12687 fn = NULL_TREE;
12688 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12689 mem{cpy,pcpy,move,set} is available. */
12690 switch (fcode)
12691 {
12692 case BUILT_IN_MEMCPY_CHK:
12693 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12694 break;
12695 case BUILT_IN_MEMPCPY_CHK:
12696 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12697 break;
12698 case BUILT_IN_MEMMOVE_CHK:
12699 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12700 break;
12701 case BUILT_IN_MEMSET_CHK:
12702 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12703 break;
12704 default:
12705 break;
12706 }
12707
12708 if (!fn)
12709 return NULL_TREE;
12710
12711 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12712 }
12713
12714 /* Fold a call to the __st[rp]cpy_chk builtin.
12715 DEST, SRC, and SIZE are the arguments to the call.
12716 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12717 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12718 strings passed as second argument. */
12719
12720 tree
12721 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12722 tree src, tree size,
12723 tree maxlen, bool ignore,
12724 enum built_in_function fcode)
12725 {
12726 tree len, fn;
12727
12728 if (!validate_arg (dest, POINTER_TYPE)
12729 || !validate_arg (src, POINTER_TYPE)
12730 || !validate_arg (size, INTEGER_TYPE))
12731 return NULL_TREE;
12732
12733 /* If SRC and DEST are the same (and not volatile), return DEST. */
12734 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12735 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12736
12737 if (! host_integerp (size, 1))
12738 return NULL_TREE;
12739
12740 if (! integer_all_onesp (size))
12741 {
12742 len = c_strlen (src, 1);
12743 if (! len || ! host_integerp (len, 1))
12744 {
12745 /* If LEN is not constant, try MAXLEN too.
12746 For MAXLEN only allow optimizing into non-_ocs function
12747 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12748 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12749 {
12750 if (fcode == BUILT_IN_STPCPY_CHK)
12751 {
12752 if (! ignore)
12753 return NULL_TREE;
12754
12755 /* If return value of __stpcpy_chk is ignored,
12756 optimize into __strcpy_chk. */
12757 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12758 if (!fn)
12759 return NULL_TREE;
12760
12761 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12762 }
12763
12764 if (! len || TREE_SIDE_EFFECTS (len))
12765 return NULL_TREE;
12766
12767 /* If c_strlen returned something, but not a constant,
12768 transform __strcpy_chk into __memcpy_chk. */
12769 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12770 if (!fn)
12771 return NULL_TREE;
12772
12773 len = fold_convert_loc (loc, size_type_node, len);
12774 len = size_binop_loc (loc, PLUS_EXPR, len,
12775 build_int_cst (size_type_node, 1));
12776 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12777 build_call_expr_loc (loc, fn, 4,
12778 dest, src, len, size));
12779 }
12780 }
12781 else
12782 maxlen = len;
12783
12784 if (! tree_int_cst_lt (maxlen, size))
12785 return NULL_TREE;
12786 }
12787
12788 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12789 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12790 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12791 if (!fn)
12792 return NULL_TREE;
12793
12794 return build_call_expr_loc (loc, fn, 2, dest, src);
12795 }
12796
12797 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12798 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12799 length passed as third argument. IGNORE is true if return value can be
12800 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12801
12802 tree
12803 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12804 tree len, tree size, tree maxlen, bool ignore,
12805 enum built_in_function fcode)
12806 {
12807 tree fn;
12808
12809 if (!validate_arg (dest, POINTER_TYPE)
12810 || !validate_arg (src, POINTER_TYPE)
12811 || !validate_arg (len, INTEGER_TYPE)
12812 || !validate_arg (size, INTEGER_TYPE))
12813 return NULL_TREE;
12814
12815 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
12816 {
12817 /* If return value of __stpncpy_chk is ignored,
12818 optimize into __strncpy_chk. */
12819 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
12820 if (fn)
12821 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12822 }
12823
12824 if (! host_integerp (size, 1))
12825 return NULL_TREE;
12826
12827 if (! integer_all_onesp (size))
12828 {
12829 if (! host_integerp (len, 1))
12830 {
12831 /* If LEN is not constant, try MAXLEN too.
12832 For MAXLEN only allow optimizing into non-_ocs function
12833 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12834 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12835 return NULL_TREE;
12836 }
12837 else
12838 maxlen = len;
12839
12840 if (tree_int_cst_lt (size, maxlen))
12841 return NULL_TREE;
12842 }
12843
12844 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12845 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
12846 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
12847 if (!fn)
12848 return NULL_TREE;
12849
12850 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12851 }
12852
12853 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12854 are the arguments to the call. */
12855
12856 static tree
12857 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12858 tree src, tree size)
12859 {
12860 tree fn;
12861 const char *p;
12862
12863 if (!validate_arg (dest, POINTER_TYPE)
12864 || !validate_arg (src, POINTER_TYPE)
12865 || !validate_arg (size, INTEGER_TYPE))
12866 return NULL_TREE;
12867
12868 p = c_getstr (src);
12869 /* If the SRC parameter is "", return DEST. */
12870 if (p && *p == '\0')
12871 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12872
12873 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12874 return NULL_TREE;
12875
12876 /* If __builtin_strcat_chk is used, assume strcat is available. */
12877 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
12878 if (!fn)
12879 return NULL_TREE;
12880
12881 return build_call_expr_loc (loc, fn, 2, dest, src);
12882 }
12883
12884 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12885 LEN, and SIZE. */
12886
12887 static tree
12888 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12889 tree dest, tree src, tree len, tree size)
12890 {
12891 tree fn;
12892 const char *p;
12893
12894 if (!validate_arg (dest, POINTER_TYPE)
12895 || !validate_arg (src, POINTER_TYPE)
12896 || !validate_arg (size, INTEGER_TYPE)
12897 || !validate_arg (size, INTEGER_TYPE))
12898 return NULL_TREE;
12899
12900 p = c_getstr (src);
12901 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12902 if (p && *p == '\0')
12903 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12904 else if (integer_zerop (len))
12905 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12906
12907 if (! host_integerp (size, 1))
12908 return NULL_TREE;
12909
12910 if (! integer_all_onesp (size))
12911 {
12912 tree src_len = c_strlen (src, 1);
12913 if (src_len
12914 && host_integerp (src_len, 1)
12915 && host_integerp (len, 1)
12916 && ! tree_int_cst_lt (len, src_len))
12917 {
12918 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12919 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
12920 if (!fn)
12921 return NULL_TREE;
12922
12923 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12924 }
12925 return NULL_TREE;
12926 }
12927
12928 /* If __builtin_strncat_chk is used, assume strncat is available. */
12929 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
12930 if (!fn)
12931 return NULL_TREE;
12932
12933 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12934 }
12935
12936 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12937 Return NULL_TREE if a normal call should be emitted rather than
12938 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12939 or BUILT_IN_VSPRINTF_CHK. */
12940
12941 static tree
12942 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
12943 enum built_in_function fcode)
12944 {
12945 tree dest, size, len, fn, fmt, flag;
12946 const char *fmt_str;
12947
12948 /* Verify the required arguments in the original call. */
12949 if (nargs < 4)
12950 return NULL_TREE;
12951 dest = args[0];
12952 if (!validate_arg (dest, POINTER_TYPE))
12953 return NULL_TREE;
12954 flag = args[1];
12955 if (!validate_arg (flag, INTEGER_TYPE))
12956 return NULL_TREE;
12957 size = args[2];
12958 if (!validate_arg (size, INTEGER_TYPE))
12959 return NULL_TREE;
12960 fmt = args[3];
12961 if (!validate_arg (fmt, POINTER_TYPE))
12962 return NULL_TREE;
12963
12964 if (! host_integerp (size, 1))
12965 return NULL_TREE;
12966
12967 len = NULL_TREE;
12968
12969 if (!init_target_chars ())
12970 return NULL_TREE;
12971
12972 /* Check whether the format is a literal string constant. */
12973 fmt_str = c_getstr (fmt);
12974 if (fmt_str != NULL)
12975 {
12976 /* If the format doesn't contain % args or %%, we know the size. */
12977 if (strchr (fmt_str, target_percent) == 0)
12978 {
12979 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12980 len = build_int_cstu (size_type_node, strlen (fmt_str));
12981 }
12982 /* If the format is "%s" and first ... argument is a string literal,
12983 we know the size too. */
12984 else if (fcode == BUILT_IN_SPRINTF_CHK
12985 && strcmp (fmt_str, target_percent_s) == 0)
12986 {
12987 tree arg;
12988
12989 if (nargs == 5)
12990 {
12991 arg = args[4];
12992 if (validate_arg (arg, POINTER_TYPE))
12993 {
12994 len = c_strlen (arg, 1);
12995 if (! len || ! host_integerp (len, 1))
12996 len = NULL_TREE;
12997 }
12998 }
12999 }
13000 }
13001
13002 if (! integer_all_onesp (size))
13003 {
13004 if (! len || ! tree_int_cst_lt (len, size))
13005 return NULL_TREE;
13006 }
13007
13008 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13009 or if format doesn't contain % chars or is "%s". */
13010 if (! integer_zerop (flag))
13011 {
13012 if (fmt_str == NULL)
13013 return NULL_TREE;
13014 if (strchr (fmt_str, target_percent) != NULL
13015 && strcmp (fmt_str, target_percent_s))
13016 return NULL_TREE;
13017 }
13018
13019 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13020 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13021 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13022 if (!fn)
13023 return NULL_TREE;
13024
13025 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13026 }
13027
13028 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13029 a normal call should be emitted rather than expanding the function
13030 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13031
13032 static tree
13033 fold_builtin_sprintf_chk (location_t loc, tree exp,
13034 enum built_in_function fcode)
13035 {
13036 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13037 CALL_EXPR_ARGP (exp), fcode);
13038 }
13039
13040 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13041 NULL_TREE if a normal call should be emitted rather than expanding
13042 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13043 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13044 passed as second argument. */
13045
13046 static tree
13047 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13048 tree maxlen, enum built_in_function fcode)
13049 {
13050 tree dest, size, len, fn, fmt, flag;
13051 const char *fmt_str;
13052
13053 /* Verify the required arguments in the original call. */
13054 if (nargs < 5)
13055 return NULL_TREE;
13056 dest = args[0];
13057 if (!validate_arg (dest, POINTER_TYPE))
13058 return NULL_TREE;
13059 len = args[1];
13060 if (!validate_arg (len, INTEGER_TYPE))
13061 return NULL_TREE;
13062 flag = args[2];
13063 if (!validate_arg (flag, INTEGER_TYPE))
13064 return NULL_TREE;
13065 size = args[3];
13066 if (!validate_arg (size, INTEGER_TYPE))
13067 return NULL_TREE;
13068 fmt = args[4];
13069 if (!validate_arg (fmt, POINTER_TYPE))
13070 return NULL_TREE;
13071
13072 if (! host_integerp (size, 1))
13073 return NULL_TREE;
13074
13075 if (! integer_all_onesp (size))
13076 {
13077 if (! host_integerp (len, 1))
13078 {
13079 /* If LEN is not constant, try MAXLEN too.
13080 For MAXLEN only allow optimizing into non-_ocs function
13081 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13082 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13083 return NULL_TREE;
13084 }
13085 else
13086 maxlen = len;
13087
13088 if (tree_int_cst_lt (size, maxlen))
13089 return NULL_TREE;
13090 }
13091
13092 if (!init_target_chars ())
13093 return NULL_TREE;
13094
13095 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13096 or if format doesn't contain % chars or is "%s". */
13097 if (! integer_zerop (flag))
13098 {
13099 fmt_str = c_getstr (fmt);
13100 if (fmt_str == NULL)
13101 return NULL_TREE;
13102 if (strchr (fmt_str, target_percent) != NULL
13103 && strcmp (fmt_str, target_percent_s))
13104 return NULL_TREE;
13105 }
13106
13107 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13108 available. */
13109 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13110 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13111 if (!fn)
13112 return NULL_TREE;
13113
13114 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13115 }
13116
13117 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13118 a normal call should be emitted rather than expanding the function
13119 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13120 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13121 passed as second argument. */
13122
13123 tree
13124 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13125 enum built_in_function fcode)
13126 {
13127 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13128 CALL_EXPR_ARGP (exp), maxlen, fcode);
13129 }
13130
13131 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13132 FMT and ARG are the arguments to the call; we don't fold cases with
13133 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13134
13135 Return NULL_TREE if no simplification was possible, otherwise return the
13136 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13137 code of the function to be simplified. */
13138
13139 static tree
13140 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13141 tree arg, bool ignore,
13142 enum built_in_function fcode)
13143 {
13144 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13145 const char *fmt_str = NULL;
13146
13147 /* If the return value is used, don't do the transformation. */
13148 if (! ignore)
13149 return NULL_TREE;
13150
13151 /* Verify the required arguments in the original call. */
13152 if (!validate_arg (fmt, POINTER_TYPE))
13153 return NULL_TREE;
13154
13155 /* Check whether the format is a literal string constant. */
13156 fmt_str = c_getstr (fmt);
13157 if (fmt_str == NULL)
13158 return NULL_TREE;
13159
13160 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13161 {
13162 /* If we're using an unlocked function, assume the other
13163 unlocked functions exist explicitly. */
13164 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13165 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13166 }
13167 else
13168 {
13169 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13170 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13171 }
13172
13173 if (!init_target_chars ())
13174 return NULL_TREE;
13175
13176 if (strcmp (fmt_str, target_percent_s) == 0
13177 || strchr (fmt_str, target_percent) == NULL)
13178 {
13179 const char *str;
13180
13181 if (strcmp (fmt_str, target_percent_s) == 0)
13182 {
13183 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13184 return NULL_TREE;
13185
13186 if (!arg || !validate_arg (arg, POINTER_TYPE))
13187 return NULL_TREE;
13188
13189 str = c_getstr (arg);
13190 if (str == NULL)
13191 return NULL_TREE;
13192 }
13193 else
13194 {
13195 /* The format specifier doesn't contain any '%' characters. */
13196 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13197 && arg)
13198 return NULL_TREE;
13199 str = fmt_str;
13200 }
13201
13202 /* If the string was "", printf does nothing. */
13203 if (str[0] == '\0')
13204 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13205
13206 /* If the string has length of 1, call putchar. */
13207 if (str[1] == '\0')
13208 {
13209 /* Given printf("c"), (where c is any one character,)
13210 convert "c"[0] to an int and pass that to the replacement
13211 function. */
13212 newarg = build_int_cst (integer_type_node, str[0]);
13213 if (fn_putchar)
13214 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13215 }
13216 else
13217 {
13218 /* If the string was "string\n", call puts("string"). */
13219 size_t len = strlen (str);
13220 if ((unsigned char)str[len - 1] == target_newline
13221 && (size_t) (int) len == len
13222 && (int) len > 0)
13223 {
13224 char *newstr;
13225 tree offset_node, string_cst;
13226
13227 /* Create a NUL-terminated string that's one char shorter
13228 than the original, stripping off the trailing '\n'. */
13229 newarg = build_string_literal (len, str);
13230 string_cst = string_constant (newarg, &offset_node);
13231 gcc_checking_assert (string_cst
13232 && (TREE_STRING_LENGTH (string_cst)
13233 == (int) len)
13234 && integer_zerop (offset_node)
13235 && (unsigned char)
13236 TREE_STRING_POINTER (string_cst)[len - 1]
13237 == target_newline);
13238 /* build_string_literal creates a new STRING_CST,
13239 modify it in place to avoid double copying. */
13240 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13241 newstr[len - 1] = '\0';
13242 if (fn_puts)
13243 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13244 }
13245 else
13246 /* We'd like to arrange to call fputs(string,stdout) here,
13247 but we need stdout and don't have a way to get it yet. */
13248 return NULL_TREE;
13249 }
13250 }
13251
13252 /* The other optimizations can be done only on the non-va_list variants. */
13253 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13254 return NULL_TREE;
13255
13256 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13257 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13258 {
13259 if (!arg || !validate_arg (arg, POINTER_TYPE))
13260 return NULL_TREE;
13261 if (fn_puts)
13262 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13263 }
13264
13265 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13266 else if (strcmp (fmt_str, target_percent_c) == 0)
13267 {
13268 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13269 return NULL_TREE;
13270 if (fn_putchar)
13271 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13272 }
13273
13274 if (!call)
13275 return NULL_TREE;
13276
13277 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13278 }
13279
13280 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13281 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13282 more than 3 arguments, and ARG may be null in the 2-argument case.
13283
13284 Return NULL_TREE if no simplification was possible, otherwise return the
13285 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13286 code of the function to be simplified. */
13287
13288 static tree
13289 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13290 tree fmt, tree arg, bool ignore,
13291 enum built_in_function fcode)
13292 {
13293 tree fn_fputc, fn_fputs, call = NULL_TREE;
13294 const char *fmt_str = NULL;
13295
13296 /* If the return value is used, don't do the transformation. */
13297 if (! ignore)
13298 return NULL_TREE;
13299
13300 /* Verify the required arguments in the original call. */
13301 if (!validate_arg (fp, POINTER_TYPE))
13302 return NULL_TREE;
13303 if (!validate_arg (fmt, POINTER_TYPE))
13304 return NULL_TREE;
13305
13306 /* Check whether the format is a literal string constant. */
13307 fmt_str = c_getstr (fmt);
13308 if (fmt_str == NULL)
13309 return NULL_TREE;
13310
13311 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13312 {
13313 /* If we're using an unlocked function, assume the other
13314 unlocked functions exist explicitly. */
13315 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13316 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13317 }
13318 else
13319 {
13320 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13321 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13322 }
13323
13324 if (!init_target_chars ())
13325 return NULL_TREE;
13326
13327 /* If the format doesn't contain % args or %%, use strcpy. */
13328 if (strchr (fmt_str, target_percent) == NULL)
13329 {
13330 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13331 && arg)
13332 return NULL_TREE;
13333
13334 /* If the format specifier was "", fprintf does nothing. */
13335 if (fmt_str[0] == '\0')
13336 {
13337 /* If FP has side-effects, just wait until gimplification is
13338 done. */
13339 if (TREE_SIDE_EFFECTS (fp))
13340 return NULL_TREE;
13341
13342 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13343 }
13344
13345 /* When "string" doesn't contain %, replace all cases of
13346 fprintf (fp, string) with fputs (string, fp). The fputs
13347 builtin will take care of special cases like length == 1. */
13348 if (fn_fputs)
13349 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13350 }
13351
13352 /* The other optimizations can be done only on the non-va_list variants. */
13353 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13354 return NULL_TREE;
13355
13356 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13357 else if (strcmp (fmt_str, target_percent_s) == 0)
13358 {
13359 if (!arg || !validate_arg (arg, POINTER_TYPE))
13360 return NULL_TREE;
13361 if (fn_fputs)
13362 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13363 }
13364
13365 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13366 else if (strcmp (fmt_str, target_percent_c) == 0)
13367 {
13368 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13369 return NULL_TREE;
13370 if (fn_fputc)
13371 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13372 }
13373
13374 if (!call)
13375 return NULL_TREE;
13376 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13377 }
13378
13379 /* Initialize format string characters in the target charset. */
13380
13381 static bool
13382 init_target_chars (void)
13383 {
13384 static bool init;
13385 if (!init)
13386 {
13387 target_newline = lang_hooks.to_target_charset ('\n');
13388 target_percent = lang_hooks.to_target_charset ('%');
13389 target_c = lang_hooks.to_target_charset ('c');
13390 target_s = lang_hooks.to_target_charset ('s');
13391 if (target_newline == 0 || target_percent == 0 || target_c == 0
13392 || target_s == 0)
13393 return false;
13394
13395 target_percent_c[0] = target_percent;
13396 target_percent_c[1] = target_c;
13397 target_percent_c[2] = '\0';
13398
13399 target_percent_s[0] = target_percent;
13400 target_percent_s[1] = target_s;
13401 target_percent_s[2] = '\0';
13402
13403 target_percent_s_newline[0] = target_percent;
13404 target_percent_s_newline[1] = target_s;
13405 target_percent_s_newline[2] = target_newline;
13406 target_percent_s_newline[3] = '\0';
13407
13408 init = true;
13409 }
13410 return true;
13411 }
13412
13413 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13414 and no overflow/underflow occurred. INEXACT is true if M was not
13415 exactly calculated. TYPE is the tree type for the result. This
13416 function assumes that you cleared the MPFR flags and then
13417 calculated M to see if anything subsequently set a flag prior to
13418 entering this function. Return NULL_TREE if any checks fail. */
13419
13420 static tree
13421 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13422 {
13423 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13424 overflow/underflow occurred. If -frounding-math, proceed iff the
13425 result of calling FUNC was exact. */
13426 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13427 && (!flag_rounding_math || !inexact))
13428 {
13429 REAL_VALUE_TYPE rr;
13430
13431 real_from_mpfr (&rr, m, type, GMP_RNDN);
13432 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13433 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13434 but the mpft_t is not, then we underflowed in the
13435 conversion. */
13436 if (real_isfinite (&rr)
13437 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13438 {
13439 REAL_VALUE_TYPE rmode;
13440
13441 real_convert (&rmode, TYPE_MODE (type), &rr);
13442 /* Proceed iff the specified mode can hold the value. */
13443 if (real_identical (&rmode, &rr))
13444 return build_real (type, rmode);
13445 }
13446 }
13447 return NULL_TREE;
13448 }
13449
13450 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13451 number and no overflow/underflow occurred. INEXACT is true if M
13452 was not exactly calculated. TYPE is the tree type for the result.
13453 This function assumes that you cleared the MPFR flags and then
13454 calculated M to see if anything subsequently set a flag prior to
13455 entering this function. Return NULL_TREE if any checks fail, if
13456 FORCE_CONVERT is true, then bypass the checks. */
13457
13458 static tree
13459 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13460 {
13461 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13462 overflow/underflow occurred. If -frounding-math, proceed iff the
13463 result of calling FUNC was exact. */
13464 if (force_convert
13465 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13466 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13467 && (!flag_rounding_math || !inexact)))
13468 {
13469 REAL_VALUE_TYPE re, im;
13470
13471 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13472 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13473 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13474 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13475 but the mpft_t is not, then we underflowed in the
13476 conversion. */
13477 if (force_convert
13478 || (real_isfinite (&re) && real_isfinite (&im)
13479 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13480 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13481 {
13482 REAL_VALUE_TYPE re_mode, im_mode;
13483
13484 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13485 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13486 /* Proceed iff the specified mode can hold the value. */
13487 if (force_convert
13488 || (real_identical (&re_mode, &re)
13489 && real_identical (&im_mode, &im)))
13490 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13491 build_real (TREE_TYPE (type), im_mode));
13492 }
13493 }
13494 return NULL_TREE;
13495 }
13496
13497 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13498 FUNC on it and return the resulting value as a tree with type TYPE.
13499 If MIN and/or MAX are not NULL, then the supplied ARG must be
13500 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13501 acceptable values, otherwise they are not. The mpfr precision is
13502 set to the precision of TYPE. We assume that function FUNC returns
13503 zero if the result could be calculated exactly within the requested
13504 precision. */
13505
13506 static tree
13507 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13508 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13509 bool inclusive)
13510 {
13511 tree result = NULL_TREE;
13512
13513 STRIP_NOPS (arg);
13514
13515 /* To proceed, MPFR must exactly represent the target floating point
13516 format, which only happens when the target base equals two. */
13517 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13518 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13519 {
13520 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13521
13522 if (real_isfinite (ra)
13523 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13524 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13525 {
13526 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13527 const int prec = fmt->p;
13528 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13529 int inexact;
13530 mpfr_t m;
13531
13532 mpfr_init2 (m, prec);
13533 mpfr_from_real (m, ra, GMP_RNDN);
13534 mpfr_clear_flags ();
13535 inexact = func (m, m, rnd);
13536 result = do_mpfr_ckconv (m, type, inexact);
13537 mpfr_clear (m);
13538 }
13539 }
13540
13541 return result;
13542 }
13543
13544 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13545 FUNC on it and return the resulting value as a tree with type TYPE.
13546 The mpfr precision is set to the precision of TYPE. We assume that
13547 function FUNC returns zero if the result could be calculated
13548 exactly within the requested precision. */
13549
13550 static tree
13551 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13552 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13553 {
13554 tree result = NULL_TREE;
13555
13556 STRIP_NOPS (arg1);
13557 STRIP_NOPS (arg2);
13558
13559 /* To proceed, MPFR must exactly represent the target floating point
13560 format, which only happens when the target base equals two. */
13561 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13562 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13563 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13564 {
13565 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13566 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13567
13568 if (real_isfinite (ra1) && real_isfinite (ra2))
13569 {
13570 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13571 const int prec = fmt->p;
13572 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13573 int inexact;
13574 mpfr_t m1, m2;
13575
13576 mpfr_inits2 (prec, m1, m2, NULL);
13577 mpfr_from_real (m1, ra1, GMP_RNDN);
13578 mpfr_from_real (m2, ra2, GMP_RNDN);
13579 mpfr_clear_flags ();
13580 inexact = func (m1, m1, m2, rnd);
13581 result = do_mpfr_ckconv (m1, type, inexact);
13582 mpfr_clears (m1, m2, NULL);
13583 }
13584 }
13585
13586 return result;
13587 }
13588
13589 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13590 FUNC on it and return the resulting value as a tree with type TYPE.
13591 The mpfr precision is set to the precision of TYPE. We assume that
13592 function FUNC returns zero if the result could be calculated
13593 exactly within the requested precision. */
13594
13595 static tree
13596 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13597 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13598 {
13599 tree result = NULL_TREE;
13600
13601 STRIP_NOPS (arg1);
13602 STRIP_NOPS (arg2);
13603 STRIP_NOPS (arg3);
13604
13605 /* To proceed, MPFR must exactly represent the target floating point
13606 format, which only happens when the target base equals two. */
13607 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13608 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13609 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13610 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13611 {
13612 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13613 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13614 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13615
13616 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13617 {
13618 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13619 const int prec = fmt->p;
13620 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13621 int inexact;
13622 mpfr_t m1, m2, m3;
13623
13624 mpfr_inits2 (prec, m1, m2, m3, NULL);
13625 mpfr_from_real (m1, ra1, GMP_RNDN);
13626 mpfr_from_real (m2, ra2, GMP_RNDN);
13627 mpfr_from_real (m3, ra3, GMP_RNDN);
13628 mpfr_clear_flags ();
13629 inexact = func (m1, m1, m2, m3, rnd);
13630 result = do_mpfr_ckconv (m1, type, inexact);
13631 mpfr_clears (m1, m2, m3, NULL);
13632 }
13633 }
13634
13635 return result;
13636 }
13637
13638 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13639 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13640 If ARG_SINP and ARG_COSP are NULL then the result is returned
13641 as a complex value.
13642 The type is taken from the type of ARG and is used for setting the
13643 precision of the calculation and results. */
13644
13645 static tree
13646 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13647 {
13648 tree const type = TREE_TYPE (arg);
13649 tree result = NULL_TREE;
13650
13651 STRIP_NOPS (arg);
13652
13653 /* To proceed, MPFR must exactly represent the target floating point
13654 format, which only happens when the target base equals two. */
13655 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13656 && TREE_CODE (arg) == REAL_CST
13657 && !TREE_OVERFLOW (arg))
13658 {
13659 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13660
13661 if (real_isfinite (ra))
13662 {
13663 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13664 const int prec = fmt->p;
13665 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13666 tree result_s, result_c;
13667 int inexact;
13668 mpfr_t m, ms, mc;
13669
13670 mpfr_inits2 (prec, m, ms, mc, NULL);
13671 mpfr_from_real (m, ra, GMP_RNDN);
13672 mpfr_clear_flags ();
13673 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13674 result_s = do_mpfr_ckconv (ms, type, inexact);
13675 result_c = do_mpfr_ckconv (mc, type, inexact);
13676 mpfr_clears (m, ms, mc, NULL);
13677 if (result_s && result_c)
13678 {
13679 /* If we are to return in a complex value do so. */
13680 if (!arg_sinp && !arg_cosp)
13681 return build_complex (build_complex_type (type),
13682 result_c, result_s);
13683
13684 /* Dereference the sin/cos pointer arguments. */
13685 arg_sinp = build_fold_indirect_ref (arg_sinp);
13686 arg_cosp = build_fold_indirect_ref (arg_cosp);
13687 /* Proceed if valid pointer type were passed in. */
13688 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13689 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13690 {
13691 /* Set the values. */
13692 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13693 result_s);
13694 TREE_SIDE_EFFECTS (result_s) = 1;
13695 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13696 result_c);
13697 TREE_SIDE_EFFECTS (result_c) = 1;
13698 /* Combine the assignments into a compound expr. */
13699 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13700 result_s, result_c));
13701 }
13702 }
13703 }
13704 }
13705 return result;
13706 }
13707
13708 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13709 two-argument mpfr order N Bessel function FUNC on them and return
13710 the resulting value as a tree with type TYPE. The mpfr precision
13711 is set to the precision of TYPE. We assume that function FUNC
13712 returns zero if the result could be calculated exactly within the
13713 requested precision. */
13714 static tree
13715 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13716 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13717 const REAL_VALUE_TYPE *min, bool inclusive)
13718 {
13719 tree result = NULL_TREE;
13720
13721 STRIP_NOPS (arg1);
13722 STRIP_NOPS (arg2);
13723
13724 /* To proceed, MPFR must exactly represent the target floating point
13725 format, which only happens when the target base equals two. */
13726 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13727 && host_integerp (arg1, 0)
13728 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13729 {
13730 const HOST_WIDE_INT n = tree_low_cst (arg1, 0);
13731 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13732
13733 if (n == (long)n
13734 && real_isfinite (ra)
13735 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13736 {
13737 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13738 const int prec = fmt->p;
13739 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13740 int inexact;
13741 mpfr_t m;
13742
13743 mpfr_init2 (m, prec);
13744 mpfr_from_real (m, ra, GMP_RNDN);
13745 mpfr_clear_flags ();
13746 inexact = func (m, n, m, rnd);
13747 result = do_mpfr_ckconv (m, type, inexact);
13748 mpfr_clear (m);
13749 }
13750 }
13751
13752 return result;
13753 }
13754
13755 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13756 the pointer *(ARG_QUO) and return the result. The type is taken
13757 from the type of ARG0 and is used for setting the precision of the
13758 calculation and results. */
13759
13760 static tree
13761 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13762 {
13763 tree const type = TREE_TYPE (arg0);
13764 tree result = NULL_TREE;
13765
13766 STRIP_NOPS (arg0);
13767 STRIP_NOPS (arg1);
13768
13769 /* To proceed, MPFR must exactly represent the target floating point
13770 format, which only happens when the target base equals two. */
13771 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13772 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13773 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13774 {
13775 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13776 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13777
13778 if (real_isfinite (ra0) && real_isfinite (ra1))
13779 {
13780 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13781 const int prec = fmt->p;
13782 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13783 tree result_rem;
13784 long integer_quo;
13785 mpfr_t m0, m1;
13786
13787 mpfr_inits2 (prec, m0, m1, NULL);
13788 mpfr_from_real (m0, ra0, GMP_RNDN);
13789 mpfr_from_real (m1, ra1, GMP_RNDN);
13790 mpfr_clear_flags ();
13791 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13792 /* Remquo is independent of the rounding mode, so pass
13793 inexact=0 to do_mpfr_ckconv(). */
13794 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13795 mpfr_clears (m0, m1, NULL);
13796 if (result_rem)
13797 {
13798 /* MPFR calculates quo in the host's long so it may
13799 return more bits in quo than the target int can hold
13800 if sizeof(host long) > sizeof(target int). This can
13801 happen even for native compilers in LP64 mode. In
13802 these cases, modulo the quo value with the largest
13803 number that the target int can hold while leaving one
13804 bit for the sign. */
13805 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13806 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13807
13808 /* Dereference the quo pointer argument. */
13809 arg_quo = build_fold_indirect_ref (arg_quo);
13810 /* Proceed iff a valid pointer type was passed in. */
13811 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13812 {
13813 /* Set the value. */
13814 tree result_quo
13815 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13816 build_int_cst (TREE_TYPE (arg_quo),
13817 integer_quo));
13818 TREE_SIDE_EFFECTS (result_quo) = 1;
13819 /* Combine the quo assignment with the rem. */
13820 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13821 result_quo, result_rem));
13822 }
13823 }
13824 }
13825 }
13826 return result;
13827 }
13828
13829 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13830 resulting value as a tree with type TYPE. The mpfr precision is
13831 set to the precision of TYPE. We assume that this mpfr function
13832 returns zero if the result could be calculated exactly within the
13833 requested precision. In addition, the integer pointer represented
13834 by ARG_SG will be dereferenced and set to the appropriate signgam
13835 (-1,1) value. */
13836
13837 static tree
13838 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13839 {
13840 tree result = NULL_TREE;
13841
13842 STRIP_NOPS (arg);
13843
13844 /* To proceed, MPFR must exactly represent the target floating point
13845 format, which only happens when the target base equals two. Also
13846 verify ARG is a constant and that ARG_SG is an int pointer. */
13847 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13848 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13849 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13850 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13851 {
13852 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13853
13854 /* In addition to NaN and Inf, the argument cannot be zero or a
13855 negative integer. */
13856 if (real_isfinite (ra)
13857 && ra->cl != rvc_zero
13858 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
13859 {
13860 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13861 const int prec = fmt->p;
13862 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13863 int inexact, sg;
13864 mpfr_t m;
13865 tree result_lg;
13866
13867 mpfr_init2 (m, prec);
13868 mpfr_from_real (m, ra, GMP_RNDN);
13869 mpfr_clear_flags ();
13870 inexact = mpfr_lgamma (m, &sg, m, rnd);
13871 result_lg = do_mpfr_ckconv (m, type, inexact);
13872 mpfr_clear (m);
13873 if (result_lg)
13874 {
13875 tree result_sg;
13876
13877 /* Dereference the arg_sg pointer argument. */
13878 arg_sg = build_fold_indirect_ref (arg_sg);
13879 /* Assign the signgam value into *arg_sg. */
13880 result_sg = fold_build2 (MODIFY_EXPR,
13881 TREE_TYPE (arg_sg), arg_sg,
13882 build_int_cst (TREE_TYPE (arg_sg), sg));
13883 TREE_SIDE_EFFECTS (result_sg) = 1;
13884 /* Combine the signgam assignment with the lgamma result. */
13885 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13886 result_sg, result_lg));
13887 }
13888 }
13889 }
13890
13891 return result;
13892 }
13893
13894 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13895 function FUNC on it and return the resulting value as a tree with
13896 type TYPE. The mpfr precision is set to the precision of TYPE. We
13897 assume that function FUNC returns zero if the result could be
13898 calculated exactly within the requested precision. */
13899
13900 static tree
13901 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13902 {
13903 tree result = NULL_TREE;
13904
13905 STRIP_NOPS (arg);
13906
13907 /* To proceed, MPFR must exactly represent the target floating point
13908 format, which only happens when the target base equals two. */
13909 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13910 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13911 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13912 {
13913 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13914 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13915
13916 if (real_isfinite (re) && real_isfinite (im))
13917 {
13918 const struct real_format *const fmt =
13919 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13920 const int prec = fmt->p;
13921 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13922 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13923 int inexact;
13924 mpc_t m;
13925
13926 mpc_init2 (m, prec);
13927 mpfr_from_real (mpc_realref (m), re, rnd);
13928 mpfr_from_real (mpc_imagref (m), im, rnd);
13929 mpfr_clear_flags ();
13930 inexact = func (m, m, crnd);
13931 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13932 mpc_clear (m);
13933 }
13934 }
13935
13936 return result;
13937 }
13938
13939 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13940 mpc function FUNC on it and return the resulting value as a tree
13941 with type TYPE. The mpfr precision is set to the precision of
13942 TYPE. We assume that function FUNC returns zero if the result
13943 could be calculated exactly within the requested precision. If
13944 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13945 in the arguments and/or results. */
13946
13947 tree
13948 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13949 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13950 {
13951 tree result = NULL_TREE;
13952
13953 STRIP_NOPS (arg0);
13954 STRIP_NOPS (arg1);
13955
13956 /* To proceed, MPFR must exactly represent the target floating point
13957 format, which only happens when the target base equals two. */
13958 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13959 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13960 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13961 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13962 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13963 {
13964 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13965 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13966 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13967 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13968
13969 if (do_nonfinite
13970 || (real_isfinite (re0) && real_isfinite (im0)
13971 && real_isfinite (re1) && real_isfinite (im1)))
13972 {
13973 const struct real_format *const fmt =
13974 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13975 const int prec = fmt->p;
13976 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13977 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13978 int inexact;
13979 mpc_t m0, m1;
13980
13981 mpc_init2 (m0, prec);
13982 mpc_init2 (m1, prec);
13983 mpfr_from_real (mpc_realref (m0), re0, rnd);
13984 mpfr_from_real (mpc_imagref (m0), im0, rnd);
13985 mpfr_from_real (mpc_realref (m1), re1, rnd);
13986 mpfr_from_real (mpc_imagref (m1), im1, rnd);
13987 mpfr_clear_flags ();
13988 inexact = func (m0, m0, m1, crnd);
13989 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13990 mpc_clear (m0);
13991 mpc_clear (m1);
13992 }
13993 }
13994
13995 return result;
13996 }
13997
13998 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13999 a normal call should be emitted rather than expanding the function
14000 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14001
14002 static tree
14003 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14004 {
14005 int nargs = gimple_call_num_args (stmt);
14006
14007 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14008 (nargs > 0
14009 ? gimple_call_arg_ptr (stmt, 0)
14010 : &error_mark_node), fcode);
14011 }
14012
14013 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14014 a normal call should be emitted rather than expanding the function
14015 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14016 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14017 passed as second argument. */
14018
14019 tree
14020 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14021 enum built_in_function fcode)
14022 {
14023 int nargs = gimple_call_num_args (stmt);
14024
14025 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14026 (nargs > 0
14027 ? gimple_call_arg_ptr (stmt, 0)
14028 : &error_mark_node), maxlen, fcode);
14029 }
14030
14031 /* Builtins with folding operations that operate on "..." arguments
14032 need special handling; we need to store the arguments in a convenient
14033 data structure before attempting any folding. Fortunately there are
14034 only a few builtins that fall into this category. FNDECL is the
14035 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14036 result of the function call is ignored. */
14037
14038 static tree
14039 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14040 bool ignore ATTRIBUTE_UNUSED)
14041 {
14042 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14043 tree ret = NULL_TREE;
14044
14045 switch (fcode)
14046 {
14047 case BUILT_IN_SPRINTF_CHK:
14048 case BUILT_IN_VSPRINTF_CHK:
14049 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14050 break;
14051
14052 case BUILT_IN_SNPRINTF_CHK:
14053 case BUILT_IN_VSNPRINTF_CHK:
14054 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14055
14056 default:
14057 break;
14058 }
14059 if (ret)
14060 {
14061 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14062 TREE_NO_WARNING (ret) = 1;
14063 return ret;
14064 }
14065 return NULL_TREE;
14066 }
14067
14068 /* A wrapper function for builtin folding that prevents warnings for
14069 "statement without effect" and the like, caused by removing the
14070 call node earlier than the warning is generated. */
14071
14072 tree
14073 fold_call_stmt (gimple stmt, bool ignore)
14074 {
14075 tree ret = NULL_TREE;
14076 tree fndecl = gimple_call_fndecl (stmt);
14077 location_t loc = gimple_location (stmt);
14078 if (fndecl
14079 && TREE_CODE (fndecl) == FUNCTION_DECL
14080 && DECL_BUILT_IN (fndecl)
14081 && !gimple_call_va_arg_pack_p (stmt))
14082 {
14083 int nargs = gimple_call_num_args (stmt);
14084 tree *args = (nargs > 0
14085 ? gimple_call_arg_ptr (stmt, 0)
14086 : &error_mark_node);
14087
14088 if (avoid_folding_inline_builtin (fndecl))
14089 return NULL_TREE;
14090 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14091 {
14092 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14093 }
14094 else
14095 {
14096 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14097 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14098 if (!ret)
14099 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14100 if (ret)
14101 {
14102 /* Propagate location information from original call to
14103 expansion of builtin. Otherwise things like
14104 maybe_emit_chk_warning, that operate on the expansion
14105 of a builtin, will use the wrong location information. */
14106 if (gimple_has_location (stmt))
14107 {
14108 tree realret = ret;
14109 if (TREE_CODE (ret) == NOP_EXPR)
14110 realret = TREE_OPERAND (ret, 0);
14111 if (CAN_HAVE_LOCATION_P (realret)
14112 && !EXPR_HAS_LOCATION (realret))
14113 SET_EXPR_LOCATION (realret, loc);
14114 return realret;
14115 }
14116 return ret;
14117 }
14118 }
14119 }
14120 return NULL_TREE;
14121 }
14122
14123 /* Look up the function in builtin_decl that corresponds to DECL
14124 and set ASMSPEC as its user assembler name. DECL must be a
14125 function decl that declares a builtin. */
14126
14127 void
14128 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14129 {
14130 tree builtin;
14131 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14132 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14133 && asmspec != 0);
14134
14135 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14136 set_user_assembler_name (builtin, asmspec);
14137 switch (DECL_FUNCTION_CODE (decl))
14138 {
14139 case BUILT_IN_MEMCPY:
14140 init_block_move_fn (asmspec);
14141 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14142 break;
14143 case BUILT_IN_MEMSET:
14144 init_block_clear_fn (asmspec);
14145 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14146 break;
14147 case BUILT_IN_MEMMOVE:
14148 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14149 break;
14150 case BUILT_IN_MEMCMP:
14151 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14152 break;
14153 case BUILT_IN_ABORT:
14154 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14155 break;
14156 case BUILT_IN_FFS:
14157 if (INT_TYPE_SIZE < BITS_PER_WORD)
14158 {
14159 set_user_assembler_libfunc ("ffs", asmspec);
14160 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14161 MODE_INT, 0), "ffs");
14162 }
14163 break;
14164 default:
14165 break;
14166 }
14167 }
14168
14169 /* Return true if DECL is a builtin that expands to a constant or similarly
14170 simple code. */
14171 bool
14172 is_simple_builtin (tree decl)
14173 {
14174 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14175 switch (DECL_FUNCTION_CODE (decl))
14176 {
14177 /* Builtins that expand to constants. */
14178 case BUILT_IN_CONSTANT_P:
14179 case BUILT_IN_EXPECT:
14180 case BUILT_IN_OBJECT_SIZE:
14181 case BUILT_IN_UNREACHABLE:
14182 /* Simple register moves or loads from stack. */
14183 case BUILT_IN_ASSUME_ALIGNED:
14184 case BUILT_IN_RETURN_ADDRESS:
14185 case BUILT_IN_EXTRACT_RETURN_ADDR:
14186 case BUILT_IN_FROB_RETURN_ADDR:
14187 case BUILT_IN_RETURN:
14188 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14189 case BUILT_IN_FRAME_ADDRESS:
14190 case BUILT_IN_VA_END:
14191 case BUILT_IN_STACK_SAVE:
14192 case BUILT_IN_STACK_RESTORE:
14193 /* Exception state returns or moves registers around. */
14194 case BUILT_IN_EH_FILTER:
14195 case BUILT_IN_EH_POINTER:
14196 case BUILT_IN_EH_COPY_VALUES:
14197 return true;
14198
14199 default:
14200 return false;
14201 }
14202
14203 return false;
14204 }
14205
14206 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14207 most probably expanded inline into reasonably simple code. This is a
14208 superset of is_simple_builtin. */
14209 bool
14210 is_inexpensive_builtin (tree decl)
14211 {
14212 if (!decl)
14213 return false;
14214 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14215 return true;
14216 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14217 switch (DECL_FUNCTION_CODE (decl))
14218 {
14219 case BUILT_IN_ABS:
14220 case BUILT_IN_ALLOCA:
14221 case BUILT_IN_ALLOCA_WITH_ALIGN:
14222 case BUILT_IN_BSWAP16:
14223 case BUILT_IN_BSWAP32:
14224 case BUILT_IN_BSWAP64:
14225 case BUILT_IN_CLZ:
14226 case BUILT_IN_CLZIMAX:
14227 case BUILT_IN_CLZL:
14228 case BUILT_IN_CLZLL:
14229 case BUILT_IN_CTZ:
14230 case BUILT_IN_CTZIMAX:
14231 case BUILT_IN_CTZL:
14232 case BUILT_IN_CTZLL:
14233 case BUILT_IN_FFS:
14234 case BUILT_IN_FFSIMAX:
14235 case BUILT_IN_FFSL:
14236 case BUILT_IN_FFSLL:
14237 case BUILT_IN_IMAXABS:
14238 case BUILT_IN_FINITE:
14239 case BUILT_IN_FINITEF:
14240 case BUILT_IN_FINITEL:
14241 case BUILT_IN_FINITED32:
14242 case BUILT_IN_FINITED64:
14243 case BUILT_IN_FINITED128:
14244 case BUILT_IN_FPCLASSIFY:
14245 case BUILT_IN_ISFINITE:
14246 case BUILT_IN_ISINF_SIGN:
14247 case BUILT_IN_ISINF:
14248 case BUILT_IN_ISINFF:
14249 case BUILT_IN_ISINFL:
14250 case BUILT_IN_ISINFD32:
14251 case BUILT_IN_ISINFD64:
14252 case BUILT_IN_ISINFD128:
14253 case BUILT_IN_ISNAN:
14254 case BUILT_IN_ISNANF:
14255 case BUILT_IN_ISNANL:
14256 case BUILT_IN_ISNAND32:
14257 case BUILT_IN_ISNAND64:
14258 case BUILT_IN_ISNAND128:
14259 case BUILT_IN_ISNORMAL:
14260 case BUILT_IN_ISGREATER:
14261 case BUILT_IN_ISGREATEREQUAL:
14262 case BUILT_IN_ISLESS:
14263 case BUILT_IN_ISLESSEQUAL:
14264 case BUILT_IN_ISLESSGREATER:
14265 case BUILT_IN_ISUNORDERED:
14266 case BUILT_IN_VA_ARG_PACK:
14267 case BUILT_IN_VA_ARG_PACK_LEN:
14268 case BUILT_IN_VA_COPY:
14269 case BUILT_IN_TRAP:
14270 case BUILT_IN_SAVEREGS:
14271 case BUILT_IN_POPCOUNTL:
14272 case BUILT_IN_POPCOUNTLL:
14273 case BUILT_IN_POPCOUNTIMAX:
14274 case BUILT_IN_POPCOUNT:
14275 case BUILT_IN_PARITYL:
14276 case BUILT_IN_PARITYLL:
14277 case BUILT_IN_PARITYIMAX:
14278 case BUILT_IN_PARITY:
14279 case BUILT_IN_LABS:
14280 case BUILT_IN_LLABS:
14281 case BUILT_IN_PREFETCH:
14282 return true;
14283
14284 default:
14285 return is_simple_builtin (decl);
14286 }
14287
14288 return false;
14289 }