re PR middle-end/50604 (verify_gimple failed: type mismatch in binary expression)
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "realmpfr.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
47 #include "basic-block.h"
48 #include "tree-mudflap.h"
49 #include "tree-flow.h"
50 #include "value-prof.h"
51 #include "diagnostic-core.h"
52 #include "builtins.h"
53
54
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
57 #endif
58 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
59
60 struct target_builtins default_target_builtins;
61 #if SWITCHABLE_TARGET
62 struct target_builtins *this_target_builtins = &default_target_builtins;
63 #endif
64
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
68
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
71 {
72 #include "builtins.def"
73 };
74 #undef DEF_BUILTIN
75
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 tree built_in_decls[(int) END_BUILTINS];
79 /* Declarations used when constructing the builtin implicitly in the compiler.
80 It may be NULL_TREE when this is invalid (for instance runtime is not
81 required to implement the function call in all cases). */
82 tree implicit_built_in_decls[(int) END_BUILTINS];
83
84 static const char *c_getstr (tree);
85 static rtx c_readstr (const char *, enum machine_mode);
86 static int target_char_cast (tree, char *);
87 static rtx get_memory_rtx (tree, tree);
88 static int apply_args_size (void);
89 static int apply_result_size (void);
90 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
91 static rtx result_vector (int, rtx);
92 #endif
93 static void expand_builtin_update_setjmp_buf (rtx);
94 static void expand_builtin_prefetch (tree);
95 static rtx expand_builtin_apply_args (void);
96 static rtx expand_builtin_apply_args_1 (void);
97 static rtx expand_builtin_apply (rtx, rtx, rtx);
98 static void expand_builtin_return (rtx);
99 static enum type_class type_to_class (tree);
100 static rtx expand_builtin_classify_type (tree);
101 static void expand_errno_check (tree, rtx);
102 static rtx expand_builtin_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
105 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
106 static rtx expand_builtin_interclass_mathfn (tree, rtx);
107 static rtx expand_builtin_sincos (tree);
108 static rtx expand_builtin_cexpi (tree, rtx);
109 static rtx expand_builtin_int_roundingfn (tree, rtx);
110 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
111 static rtx expand_builtin_next_arg (void);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcmp (tree, rtx);
117 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
118 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
119 static rtx expand_builtin_memcpy (tree, rtx);
120 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
122 enum machine_mode, int);
123 static rtx expand_builtin_strcpy (tree, rtx);
124 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
125 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strncpy (tree, rtx);
127 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
128 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
130 static rtx expand_builtin_bzero (tree);
131 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_alloca (tree, bool);
133 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
134 static rtx expand_builtin_frame_address (tree, tree);
135 static tree stabilize_va_list_loc (location_t, tree, int);
136 static rtx expand_builtin_expect (tree, rtx);
137 static tree fold_builtin_constant_p (tree);
138 static tree fold_builtin_expect (location_t, tree, tree);
139 static tree fold_builtin_classify_type (tree);
140 static tree fold_builtin_strlen (location_t, tree, tree);
141 static tree fold_builtin_inf (location_t, tree, int);
142 static tree fold_builtin_nan (tree, tree, int);
143 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
144 static bool validate_arg (const_tree, enum tree_code code);
145 static bool integer_valued_real_p (tree);
146 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
147 static bool readonly_data_expr (tree);
148 static rtx expand_builtin_fabs (tree, rtx, rtx);
149 static rtx expand_builtin_signbit (tree, rtx);
150 static tree fold_builtin_sqrt (location_t, tree, tree);
151 static tree fold_builtin_cbrt (location_t, tree, tree);
152 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
153 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
154 static tree fold_builtin_cos (location_t, tree, tree, tree);
155 static tree fold_builtin_cosh (location_t, tree, tree, tree);
156 static tree fold_builtin_tan (tree, tree);
157 static tree fold_builtin_trunc (location_t, tree, tree);
158 static tree fold_builtin_floor (location_t, tree, tree);
159 static tree fold_builtin_ceil (location_t, tree, tree);
160 static tree fold_builtin_round (location_t, tree, tree);
161 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
162 static tree fold_builtin_bitop (tree, tree);
163 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
164 static tree fold_builtin_strchr (location_t, tree, tree, tree);
165 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
166 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
167 static tree fold_builtin_strcmp (location_t, tree, tree);
168 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
169 static tree fold_builtin_signbit (location_t, tree, tree);
170 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_isascii (location_t, tree);
172 static tree fold_builtin_toascii (location_t, tree);
173 static tree fold_builtin_isdigit (location_t, tree);
174 static tree fold_builtin_fabs (location_t, tree, tree);
175 static tree fold_builtin_abs (location_t, tree, tree);
176 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
177 enum tree_code);
178 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
179 static tree fold_builtin_0 (location_t, tree, bool);
180 static tree fold_builtin_1 (location_t, tree, tree, bool);
181 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
182 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
183 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
184 static tree fold_builtin_varargs (location_t, tree, tree, bool);
185
186 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
187 static tree fold_builtin_strstr (location_t, tree, tree, tree);
188 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
189 static tree fold_builtin_strcat (location_t, tree, tree);
190 static tree fold_builtin_strncat (location_t, tree, tree, tree);
191 static tree fold_builtin_strspn (location_t, tree, tree);
192 static tree fold_builtin_strcspn (location_t, tree, tree);
193 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
194 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
195
196 static rtx expand_builtin_object_size (tree);
197 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
198 enum built_in_function);
199 static void maybe_emit_chk_warning (tree, enum built_in_function);
200 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
201 static void maybe_emit_free_warning (tree);
202 static tree fold_builtin_object_size (tree, tree);
203 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
204 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
205 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
206 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
207 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
208 enum built_in_function);
209 static bool init_target_chars (void);
210
211 static unsigned HOST_WIDE_INT target_newline;
212 static unsigned HOST_WIDE_INT target_percent;
213 static unsigned HOST_WIDE_INT target_c;
214 static unsigned HOST_WIDE_INT target_s;
215 static char target_percent_c[3];
216 static char target_percent_s[3];
217 static char target_percent_s_newline[4];
218 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
219 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
220 static tree do_mpfr_arg2 (tree, tree, tree,
221 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
222 static tree do_mpfr_arg3 (tree, tree, tree, tree,
223 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
224 static tree do_mpfr_sincos (tree, tree, tree);
225 static tree do_mpfr_bessel_n (tree, tree, tree,
226 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_remquo (tree, tree, tree);
229 static tree do_mpfr_lgamma_r (tree, tree, tree);
230
231 /* Return true if NAME starts with __builtin_ or __sync_. */
232
233 bool
234 is_builtin_name (const char *name)
235 {
236 if (strncmp (name, "__builtin_", 10) == 0)
237 return true;
238 if (strncmp (name, "__sync_", 7) == 0)
239 return true;
240 return false;
241 }
242
243
244 /* Return true if DECL is a function symbol representing a built-in. */
245
246 bool
247 is_builtin_fn (tree decl)
248 {
249 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
250 }
251
252
253 /* Return true if NODE should be considered for inline expansion regardless
254 of the optimization level. This means whenever a function is invoked with
255 its "internal" name, which normally contains the prefix "__builtin". */
256
257 static bool
258 called_as_built_in (tree node)
259 {
260 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
261 we want the name used to call the function, not the name it
262 will have. */
263 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
264 return is_builtin_name (name);
265 }
266
267 /* Compute values M and N such that M divides (address of EXP - N) and
268 such that N < M. Store N in *BITPOSP and return M.
269
270 Note that the address (and thus the alignment) computed here is based
271 on the address to which a symbol resolves, whereas DECL_ALIGN is based
272 on the address at which an object is actually located. These two
273 addresses are not always the same. For example, on ARM targets,
274 the address &foo of a Thumb function foo() has the lowest bit set,
275 whereas foo() itself starts on an even address. */
276
277 unsigned int
278 get_object_alignment_1 (tree exp, unsigned HOST_WIDE_INT *bitposp)
279 {
280 HOST_WIDE_INT bitsize, bitpos;
281 tree offset;
282 enum machine_mode mode;
283 int unsignedp, volatilep;
284 unsigned int align, inner;
285
286 /* Get the innermost object and the constant (bitpos) and possibly
287 variable (offset) offset of the access. */
288 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
289 &mode, &unsignedp, &volatilep, true);
290
291 /* Extract alignment information from the innermost object and
292 possibly adjust bitpos and offset. */
293 if (TREE_CODE (exp) == CONST_DECL)
294 exp = DECL_INITIAL (exp);
295 if (DECL_P (exp)
296 && TREE_CODE (exp) != LABEL_DECL)
297 {
298 if (TREE_CODE (exp) == FUNCTION_DECL)
299 {
300 /* Function addresses can encode extra information besides their
301 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
302 allows the low bit to be used as a virtual bit, we know
303 that the address itself must be 2-byte aligned. */
304 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
305 align = 2 * BITS_PER_UNIT;
306 else
307 align = BITS_PER_UNIT;
308 }
309 else
310 align = DECL_ALIGN (exp);
311 }
312 else if (CONSTANT_CLASS_P (exp))
313 {
314 align = TYPE_ALIGN (TREE_TYPE (exp));
315 #ifdef CONSTANT_ALIGNMENT
316 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
317 #endif
318 }
319 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
320 align = TYPE_ALIGN (TREE_TYPE (exp));
321 else if (TREE_CODE (exp) == INDIRECT_REF)
322 align = TYPE_ALIGN (TREE_TYPE (exp));
323 else if (TREE_CODE (exp) == MEM_REF)
324 {
325 tree addr = TREE_OPERAND (exp, 0);
326 struct ptr_info_def *pi;
327 if (TREE_CODE (addr) == BIT_AND_EXPR
328 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
329 {
330 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
331 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
332 align *= BITS_PER_UNIT;
333 addr = TREE_OPERAND (addr, 0);
334 }
335 else
336 align = BITS_PER_UNIT;
337 if (TREE_CODE (addr) == SSA_NAME
338 && (pi = SSA_NAME_PTR_INFO (addr)))
339 {
340 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
341 align = MAX (pi->align * BITS_PER_UNIT, align);
342 }
343 else if (TREE_CODE (addr) == ADDR_EXPR)
344 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0)));
345 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
346 }
347 else if (TREE_CODE (exp) == TARGET_MEM_REF)
348 {
349 struct ptr_info_def *pi;
350 tree addr = TMR_BASE (exp);
351 if (TREE_CODE (addr) == BIT_AND_EXPR
352 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
353 {
354 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
355 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
356 align *= BITS_PER_UNIT;
357 addr = TREE_OPERAND (addr, 0);
358 }
359 else
360 align = BITS_PER_UNIT;
361 if (TREE_CODE (addr) == SSA_NAME
362 && (pi = SSA_NAME_PTR_INFO (addr)))
363 {
364 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
365 align = MAX (pi->align * BITS_PER_UNIT, align);
366 }
367 else if (TREE_CODE (addr) == ADDR_EXPR)
368 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0)));
369 if (TMR_OFFSET (exp))
370 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
371 if (TMR_INDEX (exp) && TMR_STEP (exp))
372 {
373 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
374 align = MIN (align, (step & -step) * BITS_PER_UNIT);
375 }
376 else if (TMR_INDEX (exp))
377 align = BITS_PER_UNIT;
378 if (TMR_INDEX2 (exp))
379 align = BITS_PER_UNIT;
380 }
381 else
382 align = BITS_PER_UNIT;
383
384 /* If there is a non-constant offset part extract the maximum
385 alignment that can prevail. */
386 inner = ~0U;
387 while (offset)
388 {
389 tree next_offset;
390
391 if (TREE_CODE (offset) == PLUS_EXPR)
392 {
393 next_offset = TREE_OPERAND (offset, 0);
394 offset = TREE_OPERAND (offset, 1);
395 }
396 else
397 next_offset = NULL;
398 if (host_integerp (offset, 1))
399 {
400 /* Any overflow in calculating offset_bits won't change
401 the alignment. */
402 unsigned offset_bits
403 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
404
405 if (offset_bits)
406 inner = MIN (inner, (offset_bits & -offset_bits));
407 }
408 else if (TREE_CODE (offset) == MULT_EXPR
409 && host_integerp (TREE_OPERAND (offset, 1), 1))
410 {
411 /* Any overflow in calculating offset_factor won't change
412 the alignment. */
413 unsigned offset_factor
414 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
415 * BITS_PER_UNIT);
416
417 if (offset_factor)
418 inner = MIN (inner, (offset_factor & -offset_factor));
419 }
420 else
421 {
422 inner = MIN (inner, BITS_PER_UNIT);
423 break;
424 }
425 offset = next_offset;
426 }
427
428 /* Alignment is innermost object alignment adjusted by the constant
429 and non-constant offset parts. */
430 align = MIN (align, inner);
431 bitpos = bitpos & (align - 1);
432
433 *bitposp = bitpos;
434 return align;
435 }
436
437 /* Return the alignment in bits of EXP, an object. */
438
439 unsigned int
440 get_object_alignment (tree exp)
441 {
442 unsigned HOST_WIDE_INT bitpos = 0;
443 unsigned int align;
444
445 align = get_object_alignment_1 (exp, &bitpos);
446
447 /* align and bitpos now specify known low bits of the pointer.
448 ptr & (align - 1) == bitpos. */
449
450 if (bitpos != 0)
451 align = (bitpos & -bitpos);
452
453 return align;
454 }
455
456 /* Return the alignment in bits of EXP, a pointer valued expression.
457 The alignment returned is, by default, the alignment of the thing that
458 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
459
460 Otherwise, look at the expression to see if we can do better, i.e., if the
461 expression is actually pointing at an object whose alignment is tighter. */
462
463 unsigned int
464 get_pointer_alignment (tree exp)
465 {
466 STRIP_NOPS (exp);
467
468 if (TREE_CODE (exp) == ADDR_EXPR)
469 return get_object_alignment (TREE_OPERAND (exp, 0));
470 else if (TREE_CODE (exp) == SSA_NAME
471 && POINTER_TYPE_P (TREE_TYPE (exp)))
472 {
473 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
474 unsigned align;
475 if (!pi)
476 return BITS_PER_UNIT;
477 if (pi->misalign != 0)
478 align = (pi->misalign & -pi->misalign);
479 else
480 align = pi->align;
481 return align * BITS_PER_UNIT;
482 }
483
484 return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
485 }
486
487 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
488 way, because it could contain a zero byte in the middle.
489 TREE_STRING_LENGTH is the size of the character array, not the string.
490
491 ONLY_VALUE should be nonzero if the result is not going to be emitted
492 into the instruction stream and zero if it is going to be expanded.
493 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
494 is returned, otherwise NULL, since
495 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
496 evaluate the side-effects.
497
498 The value returned is of type `ssizetype'.
499
500 Unfortunately, string_constant can't access the values of const char
501 arrays with initializers, so neither can we do so here. */
502
503 tree
504 c_strlen (tree src, int only_value)
505 {
506 tree offset_node;
507 HOST_WIDE_INT offset;
508 int max;
509 const char *ptr;
510 location_t loc;
511
512 STRIP_NOPS (src);
513 if (TREE_CODE (src) == COND_EXPR
514 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
515 {
516 tree len1, len2;
517
518 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
519 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
520 if (tree_int_cst_equal (len1, len2))
521 return len1;
522 }
523
524 if (TREE_CODE (src) == COMPOUND_EXPR
525 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
526 return c_strlen (TREE_OPERAND (src, 1), only_value);
527
528 loc = EXPR_LOC_OR_HERE (src);
529
530 src = string_constant (src, &offset_node);
531 if (src == 0)
532 return NULL_TREE;
533
534 max = TREE_STRING_LENGTH (src) - 1;
535 ptr = TREE_STRING_POINTER (src);
536
537 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
538 {
539 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
540 compute the offset to the following null if we don't know where to
541 start searching for it. */
542 int i;
543
544 for (i = 0; i < max; i++)
545 if (ptr[i] == 0)
546 return NULL_TREE;
547
548 /* We don't know the starting offset, but we do know that the string
549 has no internal zero bytes. We can assume that the offset falls
550 within the bounds of the string; otherwise, the programmer deserves
551 what he gets. Subtract the offset from the length of the string,
552 and return that. This would perhaps not be valid if we were dealing
553 with named arrays in addition to literal string constants. */
554
555 return size_diffop_loc (loc, size_int (max), offset_node);
556 }
557
558 /* We have a known offset into the string. Start searching there for
559 a null character if we can represent it as a single HOST_WIDE_INT. */
560 if (offset_node == 0)
561 offset = 0;
562 else if (! host_integerp (offset_node, 0))
563 offset = -1;
564 else
565 offset = tree_low_cst (offset_node, 0);
566
567 /* If the offset is known to be out of bounds, warn, and call strlen at
568 runtime. */
569 if (offset < 0 || offset > max)
570 {
571 /* Suppress multiple warnings for propagated constant strings. */
572 if (! TREE_NO_WARNING (src))
573 {
574 warning_at (loc, 0, "offset outside bounds of constant string");
575 TREE_NO_WARNING (src) = 1;
576 }
577 return NULL_TREE;
578 }
579
580 /* Use strlen to search for the first zero byte. Since any strings
581 constructed with build_string will have nulls appended, we win even
582 if we get handed something like (char[4])"abcd".
583
584 Since OFFSET is our starting index into the string, no further
585 calculation is needed. */
586 return ssize_int (strlen (ptr + offset));
587 }
588
589 /* Return a char pointer for a C string if it is a string constant
590 or sum of string constant and integer constant. */
591
592 static const char *
593 c_getstr (tree src)
594 {
595 tree offset_node;
596
597 src = string_constant (src, &offset_node);
598 if (src == 0)
599 return 0;
600
601 if (offset_node == 0)
602 return TREE_STRING_POINTER (src);
603 else if (!host_integerp (offset_node, 1)
604 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
605 return 0;
606
607 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
608 }
609
610 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
611 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
612
613 static rtx
614 c_readstr (const char *str, enum machine_mode mode)
615 {
616 HOST_WIDE_INT c[2];
617 HOST_WIDE_INT ch;
618 unsigned int i, j;
619
620 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
621
622 c[0] = 0;
623 c[1] = 0;
624 ch = 1;
625 for (i = 0; i < GET_MODE_SIZE (mode); i++)
626 {
627 j = i;
628 if (WORDS_BIG_ENDIAN)
629 j = GET_MODE_SIZE (mode) - i - 1;
630 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
631 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
632 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
633 j *= BITS_PER_UNIT;
634 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
635
636 if (ch)
637 ch = (unsigned char) str[i];
638 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
639 }
640 return immed_double_const (c[0], c[1], mode);
641 }
642
643 /* Cast a target constant CST to target CHAR and if that value fits into
644 host char type, return zero and put that value into variable pointed to by
645 P. */
646
647 static int
648 target_char_cast (tree cst, char *p)
649 {
650 unsigned HOST_WIDE_INT val, hostval;
651
652 if (TREE_CODE (cst) != INTEGER_CST
653 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
654 return 1;
655
656 val = TREE_INT_CST_LOW (cst);
657 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
658 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
659
660 hostval = val;
661 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
662 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
663
664 if (val != hostval)
665 return 1;
666
667 *p = hostval;
668 return 0;
669 }
670
671 /* Similar to save_expr, but assumes that arbitrary code is not executed
672 in between the multiple evaluations. In particular, we assume that a
673 non-addressable local variable will not be modified. */
674
675 static tree
676 builtin_save_expr (tree exp)
677 {
678 if (TREE_CODE (exp) == SSA_NAME
679 || (TREE_ADDRESSABLE (exp) == 0
680 && (TREE_CODE (exp) == PARM_DECL
681 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
682 return exp;
683
684 return save_expr (exp);
685 }
686
687 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
688 times to get the address of either a higher stack frame, or a return
689 address located within it (depending on FNDECL_CODE). */
690
691 static rtx
692 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
693 {
694 int i;
695
696 #ifdef INITIAL_FRAME_ADDRESS_RTX
697 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
698 #else
699 rtx tem;
700
701 /* For a zero count with __builtin_return_address, we don't care what
702 frame address we return, because target-specific definitions will
703 override us. Therefore frame pointer elimination is OK, and using
704 the soft frame pointer is OK.
705
706 For a nonzero count, or a zero count with __builtin_frame_address,
707 we require a stable offset from the current frame pointer to the
708 previous one, so we must use the hard frame pointer, and
709 we must disable frame pointer elimination. */
710 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
711 tem = frame_pointer_rtx;
712 else
713 {
714 tem = hard_frame_pointer_rtx;
715
716 /* Tell reload not to eliminate the frame pointer. */
717 crtl->accesses_prior_frames = 1;
718 }
719 #endif
720
721 /* Some machines need special handling before we can access
722 arbitrary frames. For example, on the SPARC, we must first flush
723 all register windows to the stack. */
724 #ifdef SETUP_FRAME_ADDRESSES
725 if (count > 0)
726 SETUP_FRAME_ADDRESSES ();
727 #endif
728
729 /* On the SPARC, the return address is not in the frame, it is in a
730 register. There is no way to access it off of the current frame
731 pointer, but it can be accessed off the previous frame pointer by
732 reading the value from the register window save area. */
733 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
734 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
735 count--;
736 #endif
737
738 /* Scan back COUNT frames to the specified frame. */
739 for (i = 0; i < count; i++)
740 {
741 /* Assume the dynamic chain pointer is in the word that the
742 frame address points to, unless otherwise specified. */
743 #ifdef DYNAMIC_CHAIN_ADDRESS
744 tem = DYNAMIC_CHAIN_ADDRESS (tem);
745 #endif
746 tem = memory_address (Pmode, tem);
747 tem = gen_frame_mem (Pmode, tem);
748 tem = copy_to_reg (tem);
749 }
750
751 /* For __builtin_frame_address, return what we've got. But, on
752 the SPARC for example, we may have to add a bias. */
753 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
754 #ifdef FRAME_ADDR_RTX
755 return FRAME_ADDR_RTX (tem);
756 #else
757 return tem;
758 #endif
759
760 /* For __builtin_return_address, get the return address from that frame. */
761 #ifdef RETURN_ADDR_RTX
762 tem = RETURN_ADDR_RTX (count, tem);
763 #else
764 tem = memory_address (Pmode,
765 plus_constant (tem, GET_MODE_SIZE (Pmode)));
766 tem = gen_frame_mem (Pmode, tem);
767 #endif
768 return tem;
769 }
770
771 /* Alias set used for setjmp buffer. */
772 static alias_set_type setjmp_alias_set = -1;
773
774 /* Construct the leading half of a __builtin_setjmp call. Control will
775 return to RECEIVER_LABEL. This is also called directly by the SJLJ
776 exception handling code. */
777
778 void
779 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
780 {
781 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
782 rtx stack_save;
783 rtx mem;
784
785 if (setjmp_alias_set == -1)
786 setjmp_alias_set = new_alias_set ();
787
788 buf_addr = convert_memory_address (Pmode, buf_addr);
789
790 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
791
792 /* We store the frame pointer and the address of receiver_label in
793 the buffer and use the rest of it for the stack save area, which
794 is machine-dependent. */
795
796 mem = gen_rtx_MEM (Pmode, buf_addr);
797 set_mem_alias_set (mem, setjmp_alias_set);
798 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
799
800 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
801 set_mem_alias_set (mem, setjmp_alias_set);
802
803 emit_move_insn (validize_mem (mem),
804 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
805
806 stack_save = gen_rtx_MEM (sa_mode,
807 plus_constant (buf_addr,
808 2 * GET_MODE_SIZE (Pmode)));
809 set_mem_alias_set (stack_save, setjmp_alias_set);
810 emit_stack_save (SAVE_NONLOCAL, &stack_save);
811
812 /* If there is further processing to do, do it. */
813 #ifdef HAVE_builtin_setjmp_setup
814 if (HAVE_builtin_setjmp_setup)
815 emit_insn (gen_builtin_setjmp_setup (buf_addr));
816 #endif
817
818 /* We have a nonlocal label. */
819 cfun->has_nonlocal_label = 1;
820 }
821
822 /* Construct the trailing part of a __builtin_setjmp call. This is
823 also called directly by the SJLJ exception handling code. */
824
825 void
826 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
827 {
828 rtx chain;
829
830 /* Clobber the FP when we get here, so we have to make sure it's
831 marked as used by this function. */
832 emit_use (hard_frame_pointer_rtx);
833
834 /* Mark the static chain as clobbered here so life information
835 doesn't get messed up for it. */
836 chain = targetm.calls.static_chain (current_function_decl, true);
837 if (chain && REG_P (chain))
838 emit_clobber (chain);
839
840 /* Now put in the code to restore the frame pointer, and argument
841 pointer, if needed. */
842 #ifdef HAVE_nonlocal_goto
843 if (! HAVE_nonlocal_goto)
844 #endif
845 {
846 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
847 /* This might change the hard frame pointer in ways that aren't
848 apparent to early optimization passes, so force a clobber. */
849 emit_clobber (hard_frame_pointer_rtx);
850 }
851
852 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
853 if (fixed_regs[ARG_POINTER_REGNUM])
854 {
855 #ifdef ELIMINABLE_REGS
856 size_t i;
857 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
858
859 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
860 if (elim_regs[i].from == ARG_POINTER_REGNUM
861 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
862 break;
863
864 if (i == ARRAY_SIZE (elim_regs))
865 #endif
866 {
867 /* Now restore our arg pointer from the address at which it
868 was saved in our stack frame. */
869 emit_move_insn (crtl->args.internal_arg_pointer,
870 copy_to_reg (get_arg_pointer_save_area ()));
871 }
872 }
873 #endif
874
875 #ifdef HAVE_builtin_setjmp_receiver
876 if (HAVE_builtin_setjmp_receiver)
877 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
878 else
879 #endif
880 #ifdef HAVE_nonlocal_goto_receiver
881 if (HAVE_nonlocal_goto_receiver)
882 emit_insn (gen_nonlocal_goto_receiver ());
883 else
884 #endif
885 { /* Nothing */ }
886
887 /* We must not allow the code we just generated to be reordered by
888 scheduling. Specifically, the update of the frame pointer must
889 happen immediately, not later. */
890 emit_insn (gen_blockage ());
891 }
892
893 /* __builtin_longjmp is passed a pointer to an array of five words (not
894 all will be used on all machines). It operates similarly to the C
895 library function of the same name, but is more efficient. Much of
896 the code below is copied from the handling of non-local gotos. */
897
898 static void
899 expand_builtin_longjmp (rtx buf_addr, rtx value)
900 {
901 rtx fp, lab, stack, insn, last;
902 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
903
904 /* DRAP is needed for stack realign if longjmp is expanded to current
905 function */
906 if (SUPPORTS_STACK_ALIGNMENT)
907 crtl->need_drap = true;
908
909 if (setjmp_alias_set == -1)
910 setjmp_alias_set = new_alias_set ();
911
912 buf_addr = convert_memory_address (Pmode, buf_addr);
913
914 buf_addr = force_reg (Pmode, buf_addr);
915
916 /* We require that the user must pass a second argument of 1, because
917 that is what builtin_setjmp will return. */
918 gcc_assert (value == const1_rtx);
919
920 last = get_last_insn ();
921 #ifdef HAVE_builtin_longjmp
922 if (HAVE_builtin_longjmp)
923 emit_insn (gen_builtin_longjmp (buf_addr));
924 else
925 #endif
926 {
927 fp = gen_rtx_MEM (Pmode, buf_addr);
928 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
929 GET_MODE_SIZE (Pmode)));
930
931 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
932 2 * GET_MODE_SIZE (Pmode)));
933 set_mem_alias_set (fp, setjmp_alias_set);
934 set_mem_alias_set (lab, setjmp_alias_set);
935 set_mem_alias_set (stack, setjmp_alias_set);
936
937 /* Pick up FP, label, and SP from the block and jump. This code is
938 from expand_goto in stmt.c; see there for detailed comments. */
939 #ifdef HAVE_nonlocal_goto
940 if (HAVE_nonlocal_goto)
941 /* We have to pass a value to the nonlocal_goto pattern that will
942 get copied into the static_chain pointer, but it does not matter
943 what that value is, because builtin_setjmp does not use it. */
944 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
945 else
946 #endif
947 {
948 lab = copy_to_reg (lab);
949
950 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
951 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
952
953 emit_move_insn (hard_frame_pointer_rtx, fp);
954 emit_stack_restore (SAVE_NONLOCAL, stack);
955
956 emit_use (hard_frame_pointer_rtx);
957 emit_use (stack_pointer_rtx);
958 emit_indirect_jump (lab);
959 }
960 }
961
962 /* Search backwards and mark the jump insn as a non-local goto.
963 Note that this precludes the use of __builtin_longjmp to a
964 __builtin_setjmp target in the same function. However, we've
965 already cautioned the user that these functions are for
966 internal exception handling use only. */
967 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
968 {
969 gcc_assert (insn != last);
970
971 if (JUMP_P (insn))
972 {
973 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
974 break;
975 }
976 else if (CALL_P (insn))
977 break;
978 }
979 }
980
981 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
982 and the address of the save area. */
983
984 static rtx
985 expand_builtin_nonlocal_goto (tree exp)
986 {
987 tree t_label, t_save_area;
988 rtx r_label, r_save_area, r_fp, r_sp, insn;
989
990 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
991 return NULL_RTX;
992
993 t_label = CALL_EXPR_ARG (exp, 0);
994 t_save_area = CALL_EXPR_ARG (exp, 1);
995
996 r_label = expand_normal (t_label);
997 r_label = convert_memory_address (Pmode, r_label);
998 r_save_area = expand_normal (t_save_area);
999 r_save_area = convert_memory_address (Pmode, r_save_area);
1000 /* Copy the address of the save location to a register just in case it was
1001 based on the frame pointer. */
1002 r_save_area = copy_to_reg (r_save_area);
1003 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1004 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1005 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
1006
1007 crtl->has_nonlocal_goto = 1;
1008
1009 #ifdef HAVE_nonlocal_goto
1010 /* ??? We no longer need to pass the static chain value, afaik. */
1011 if (HAVE_nonlocal_goto)
1012 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1013 else
1014 #endif
1015 {
1016 r_label = copy_to_reg (r_label);
1017
1018 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1019 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1020
1021 /* Restore frame pointer for containing function. */
1022 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1023 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1024
1025 /* USE of hard_frame_pointer_rtx added for consistency;
1026 not clear if really needed. */
1027 emit_use (hard_frame_pointer_rtx);
1028 emit_use (stack_pointer_rtx);
1029
1030 /* If the architecture is using a GP register, we must
1031 conservatively assume that the target function makes use of it.
1032 The prologue of functions with nonlocal gotos must therefore
1033 initialize the GP register to the appropriate value, and we
1034 must then make sure that this value is live at the point
1035 of the jump. (Note that this doesn't necessarily apply
1036 to targets with a nonlocal_goto pattern; they are free
1037 to implement it in their own way. Note also that this is
1038 a no-op if the GP register is a global invariant.) */
1039 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1040 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1041 emit_use (pic_offset_table_rtx);
1042
1043 emit_indirect_jump (r_label);
1044 }
1045
1046 /* Search backwards to the jump insn and mark it as a
1047 non-local goto. */
1048 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1049 {
1050 if (JUMP_P (insn))
1051 {
1052 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1053 break;
1054 }
1055 else if (CALL_P (insn))
1056 break;
1057 }
1058
1059 return const0_rtx;
1060 }
1061
1062 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1063 (not all will be used on all machines) that was passed to __builtin_setjmp.
1064 It updates the stack pointer in that block to correspond to the current
1065 stack pointer. */
1066
1067 static void
1068 expand_builtin_update_setjmp_buf (rtx buf_addr)
1069 {
1070 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1071 rtx stack_save
1072 = gen_rtx_MEM (sa_mode,
1073 memory_address
1074 (sa_mode,
1075 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1076
1077 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1078 }
1079
1080 /* Expand a call to __builtin_prefetch. For a target that does not support
1081 data prefetch, evaluate the memory address argument in case it has side
1082 effects. */
1083
1084 static void
1085 expand_builtin_prefetch (tree exp)
1086 {
1087 tree arg0, arg1, arg2;
1088 int nargs;
1089 rtx op0, op1, op2;
1090
1091 if (!validate_arglist (exp, POINTER_TYPE, 0))
1092 return;
1093
1094 arg0 = CALL_EXPR_ARG (exp, 0);
1095
1096 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1097 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1098 locality). */
1099 nargs = call_expr_nargs (exp);
1100 if (nargs > 1)
1101 arg1 = CALL_EXPR_ARG (exp, 1);
1102 else
1103 arg1 = integer_zero_node;
1104 if (nargs > 2)
1105 arg2 = CALL_EXPR_ARG (exp, 2);
1106 else
1107 arg2 = integer_three_node;
1108
1109 /* Argument 0 is an address. */
1110 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1111
1112 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1113 if (TREE_CODE (arg1) != INTEGER_CST)
1114 {
1115 error ("second argument to %<__builtin_prefetch%> must be a constant");
1116 arg1 = integer_zero_node;
1117 }
1118 op1 = expand_normal (arg1);
1119 /* Argument 1 must be either zero or one. */
1120 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1121 {
1122 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1123 " using zero");
1124 op1 = const0_rtx;
1125 }
1126
1127 /* Argument 2 (locality) must be a compile-time constant int. */
1128 if (TREE_CODE (arg2) != INTEGER_CST)
1129 {
1130 error ("third argument to %<__builtin_prefetch%> must be a constant");
1131 arg2 = integer_zero_node;
1132 }
1133 op2 = expand_normal (arg2);
1134 /* Argument 2 must be 0, 1, 2, or 3. */
1135 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1136 {
1137 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1138 op2 = const0_rtx;
1139 }
1140
1141 #ifdef HAVE_prefetch
1142 if (HAVE_prefetch)
1143 {
1144 struct expand_operand ops[3];
1145
1146 create_address_operand (&ops[0], op0);
1147 create_integer_operand (&ops[1], INTVAL (op1));
1148 create_integer_operand (&ops[2], INTVAL (op2));
1149 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1150 return;
1151 }
1152 #endif
1153
1154 /* Don't do anything with direct references to volatile memory, but
1155 generate code to handle other side effects. */
1156 if (!MEM_P (op0) && side_effects_p (op0))
1157 emit_insn (op0);
1158 }
1159
1160 /* Get a MEM rtx for expression EXP which is the address of an operand
1161 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1162 the maximum length of the block of memory that might be accessed or
1163 NULL if unknown. */
1164
1165 static rtx
1166 get_memory_rtx (tree exp, tree len)
1167 {
1168 tree orig_exp = exp;
1169 rtx addr, mem;
1170 HOST_WIDE_INT off;
1171
1172 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1173 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1174 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1175 exp = TREE_OPERAND (exp, 0);
1176
1177 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1178 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1179
1180 /* Get an expression we can use to find the attributes to assign to MEM.
1181 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1182 we can. First remove any nops. */
1183 while (CONVERT_EXPR_P (exp)
1184 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1185 exp = TREE_OPERAND (exp, 0);
1186
1187 off = 0;
1188 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1189 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1190 && host_integerp (TREE_OPERAND (exp, 1), 0)
1191 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1192 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1193 else if (TREE_CODE (exp) == ADDR_EXPR)
1194 exp = TREE_OPERAND (exp, 0);
1195 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1196 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1197 else
1198 exp = NULL;
1199
1200 /* Honor attributes derived from exp, except for the alias set
1201 (as builtin stringops may alias with anything) and the size
1202 (as stringops may access multiple array elements). */
1203 if (exp)
1204 {
1205 set_mem_attributes (mem, exp, 0);
1206
1207 if (off)
1208 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1209
1210 /* Allow the string and memory builtins to overflow from one
1211 field into another, see http://gcc.gnu.org/PR23561.
1212 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1213 memory accessed by the string or memory builtin will fit
1214 within the field. */
1215 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1216 {
1217 tree mem_expr = MEM_EXPR (mem);
1218 HOST_WIDE_INT offset = -1, length = -1;
1219 tree inner = exp;
1220
1221 while (TREE_CODE (inner) == ARRAY_REF
1222 || CONVERT_EXPR_P (inner)
1223 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1224 || TREE_CODE (inner) == SAVE_EXPR)
1225 inner = TREE_OPERAND (inner, 0);
1226
1227 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1228
1229 if (MEM_OFFSET_KNOWN_P (mem))
1230 offset = MEM_OFFSET (mem);
1231
1232 if (offset >= 0 && len && host_integerp (len, 0))
1233 length = tree_low_cst (len, 0);
1234
1235 while (TREE_CODE (inner) == COMPONENT_REF)
1236 {
1237 tree field = TREE_OPERAND (inner, 1);
1238 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1239 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1240
1241 /* Bitfields are generally not byte-addressable. */
1242 gcc_assert (!DECL_BIT_FIELD (field)
1243 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1244 % BITS_PER_UNIT) == 0
1245 && host_integerp (DECL_SIZE (field), 0)
1246 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1247 % BITS_PER_UNIT) == 0));
1248
1249 /* If we can prove that the memory starting at XEXP (mem, 0) and
1250 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1251 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1252 fields without DECL_SIZE_UNIT like flexible array members. */
1253 if (length >= 0
1254 && DECL_SIZE_UNIT (field)
1255 && host_integerp (DECL_SIZE_UNIT (field), 0))
1256 {
1257 HOST_WIDE_INT size
1258 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1259 if (offset <= size
1260 && length <= size
1261 && offset + length <= size)
1262 break;
1263 }
1264
1265 if (offset >= 0
1266 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1267 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1268 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1269 / BITS_PER_UNIT;
1270 else
1271 {
1272 offset = -1;
1273 length = -1;
1274 }
1275
1276 mem_expr = TREE_OPERAND (mem_expr, 0);
1277 inner = TREE_OPERAND (inner, 0);
1278 }
1279
1280 if (mem_expr == NULL)
1281 offset = -1;
1282 if (mem_expr != MEM_EXPR (mem))
1283 {
1284 set_mem_expr (mem, mem_expr);
1285 if (offset >= 0)
1286 set_mem_offset (mem, offset);
1287 else
1288 clear_mem_offset (mem);
1289 }
1290 }
1291 set_mem_alias_set (mem, 0);
1292 clear_mem_size (mem);
1293 }
1294
1295 return mem;
1296 }
1297 \f
1298 /* Built-in functions to perform an untyped call and return. */
1299
1300 #define apply_args_mode \
1301 (this_target_builtins->x_apply_args_mode)
1302 #define apply_result_mode \
1303 (this_target_builtins->x_apply_result_mode)
1304
1305 /* Return the size required for the block returned by __builtin_apply_args,
1306 and initialize apply_args_mode. */
1307
1308 static int
1309 apply_args_size (void)
1310 {
1311 static int size = -1;
1312 int align;
1313 unsigned int regno;
1314 enum machine_mode mode;
1315
1316 /* The values computed by this function never change. */
1317 if (size < 0)
1318 {
1319 /* The first value is the incoming arg-pointer. */
1320 size = GET_MODE_SIZE (Pmode);
1321
1322 /* The second value is the structure value address unless this is
1323 passed as an "invisible" first argument. */
1324 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1325 size += GET_MODE_SIZE (Pmode);
1326
1327 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1328 if (FUNCTION_ARG_REGNO_P (regno))
1329 {
1330 mode = targetm.calls.get_raw_arg_mode (regno);
1331
1332 gcc_assert (mode != VOIDmode);
1333
1334 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1335 if (size % align != 0)
1336 size = CEIL (size, align) * align;
1337 size += GET_MODE_SIZE (mode);
1338 apply_args_mode[regno] = mode;
1339 }
1340 else
1341 {
1342 apply_args_mode[regno] = VOIDmode;
1343 }
1344 }
1345 return size;
1346 }
1347
1348 /* Return the size required for the block returned by __builtin_apply,
1349 and initialize apply_result_mode. */
1350
1351 static int
1352 apply_result_size (void)
1353 {
1354 static int size = -1;
1355 int align, regno;
1356 enum machine_mode mode;
1357
1358 /* The values computed by this function never change. */
1359 if (size < 0)
1360 {
1361 size = 0;
1362
1363 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1364 if (targetm.calls.function_value_regno_p (regno))
1365 {
1366 mode = targetm.calls.get_raw_result_mode (regno);
1367
1368 gcc_assert (mode != VOIDmode);
1369
1370 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1371 if (size % align != 0)
1372 size = CEIL (size, align) * align;
1373 size += GET_MODE_SIZE (mode);
1374 apply_result_mode[regno] = mode;
1375 }
1376 else
1377 apply_result_mode[regno] = VOIDmode;
1378
1379 /* Allow targets that use untyped_call and untyped_return to override
1380 the size so that machine-specific information can be stored here. */
1381 #ifdef APPLY_RESULT_SIZE
1382 size = APPLY_RESULT_SIZE;
1383 #endif
1384 }
1385 return size;
1386 }
1387
1388 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1389 /* Create a vector describing the result block RESULT. If SAVEP is true,
1390 the result block is used to save the values; otherwise it is used to
1391 restore the values. */
1392
1393 static rtx
1394 result_vector (int savep, rtx result)
1395 {
1396 int regno, size, align, nelts;
1397 enum machine_mode mode;
1398 rtx reg, mem;
1399 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1400
1401 size = nelts = 0;
1402 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1403 if ((mode = apply_result_mode[regno]) != VOIDmode)
1404 {
1405 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1406 if (size % align != 0)
1407 size = CEIL (size, align) * align;
1408 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1409 mem = adjust_address (result, mode, size);
1410 savevec[nelts++] = (savep
1411 ? gen_rtx_SET (VOIDmode, mem, reg)
1412 : gen_rtx_SET (VOIDmode, reg, mem));
1413 size += GET_MODE_SIZE (mode);
1414 }
1415 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1416 }
1417 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1418
1419 /* Save the state required to perform an untyped call with the same
1420 arguments as were passed to the current function. */
1421
1422 static rtx
1423 expand_builtin_apply_args_1 (void)
1424 {
1425 rtx registers, tem;
1426 int size, align, regno;
1427 enum machine_mode mode;
1428 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1429
1430 /* Create a block where the arg-pointer, structure value address,
1431 and argument registers can be saved. */
1432 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1433
1434 /* Walk past the arg-pointer and structure value address. */
1435 size = GET_MODE_SIZE (Pmode);
1436 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1437 size += GET_MODE_SIZE (Pmode);
1438
1439 /* Save each register used in calling a function to the block. */
1440 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1441 if ((mode = apply_args_mode[regno]) != VOIDmode)
1442 {
1443 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1444 if (size % align != 0)
1445 size = CEIL (size, align) * align;
1446
1447 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1448
1449 emit_move_insn (adjust_address (registers, mode, size), tem);
1450 size += GET_MODE_SIZE (mode);
1451 }
1452
1453 /* Save the arg pointer to the block. */
1454 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1455 #ifdef STACK_GROWS_DOWNWARD
1456 /* We need the pointer as the caller actually passed them to us, not
1457 as we might have pretended they were passed. Make sure it's a valid
1458 operand, as emit_move_insn isn't expected to handle a PLUS. */
1459 tem
1460 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1461 NULL_RTX);
1462 #endif
1463 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1464
1465 size = GET_MODE_SIZE (Pmode);
1466
1467 /* Save the structure value address unless this is passed as an
1468 "invisible" first argument. */
1469 if (struct_incoming_value)
1470 {
1471 emit_move_insn (adjust_address (registers, Pmode, size),
1472 copy_to_reg (struct_incoming_value));
1473 size += GET_MODE_SIZE (Pmode);
1474 }
1475
1476 /* Return the address of the block. */
1477 return copy_addr_to_reg (XEXP (registers, 0));
1478 }
1479
1480 /* __builtin_apply_args returns block of memory allocated on
1481 the stack into which is stored the arg pointer, structure
1482 value address, static chain, and all the registers that might
1483 possibly be used in performing a function call. The code is
1484 moved to the start of the function so the incoming values are
1485 saved. */
1486
1487 static rtx
1488 expand_builtin_apply_args (void)
1489 {
1490 /* Don't do __builtin_apply_args more than once in a function.
1491 Save the result of the first call and reuse it. */
1492 if (apply_args_value != 0)
1493 return apply_args_value;
1494 {
1495 /* When this function is called, it means that registers must be
1496 saved on entry to this function. So we migrate the
1497 call to the first insn of this function. */
1498 rtx temp;
1499 rtx seq;
1500
1501 start_sequence ();
1502 temp = expand_builtin_apply_args_1 ();
1503 seq = get_insns ();
1504 end_sequence ();
1505
1506 apply_args_value = temp;
1507
1508 /* Put the insns after the NOTE that starts the function.
1509 If this is inside a start_sequence, make the outer-level insn
1510 chain current, so the code is placed at the start of the
1511 function. If internal_arg_pointer is a non-virtual pseudo,
1512 it needs to be placed after the function that initializes
1513 that pseudo. */
1514 push_topmost_sequence ();
1515 if (REG_P (crtl->args.internal_arg_pointer)
1516 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1517 emit_insn_before (seq, parm_birth_insn);
1518 else
1519 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1520 pop_topmost_sequence ();
1521 return temp;
1522 }
1523 }
1524
1525 /* Perform an untyped call and save the state required to perform an
1526 untyped return of whatever value was returned by the given function. */
1527
1528 static rtx
1529 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1530 {
1531 int size, align, regno;
1532 enum machine_mode mode;
1533 rtx incoming_args, result, reg, dest, src, call_insn;
1534 rtx old_stack_level = 0;
1535 rtx call_fusage = 0;
1536 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1537
1538 arguments = convert_memory_address (Pmode, arguments);
1539
1540 /* Create a block where the return registers can be saved. */
1541 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1542
1543 /* Fetch the arg pointer from the ARGUMENTS block. */
1544 incoming_args = gen_reg_rtx (Pmode);
1545 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1546 #ifndef STACK_GROWS_DOWNWARD
1547 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1548 incoming_args, 0, OPTAB_LIB_WIDEN);
1549 #endif
1550
1551 /* Push a new argument block and copy the arguments. Do not allow
1552 the (potential) memcpy call below to interfere with our stack
1553 manipulations. */
1554 do_pending_stack_adjust ();
1555 NO_DEFER_POP;
1556
1557 /* Save the stack with nonlocal if available. */
1558 #ifdef HAVE_save_stack_nonlocal
1559 if (HAVE_save_stack_nonlocal)
1560 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1561 else
1562 #endif
1563 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1564
1565 /* Allocate a block of memory onto the stack and copy the memory
1566 arguments to the outgoing arguments address. We can pass TRUE
1567 as the 4th argument because we just saved the stack pointer
1568 and will restore it right after the call. */
1569 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1570
1571 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1572 may have already set current_function_calls_alloca to true.
1573 current_function_calls_alloca won't be set if argsize is zero,
1574 so we have to guarantee need_drap is true here. */
1575 if (SUPPORTS_STACK_ALIGNMENT)
1576 crtl->need_drap = true;
1577
1578 dest = virtual_outgoing_args_rtx;
1579 #ifndef STACK_GROWS_DOWNWARD
1580 if (CONST_INT_P (argsize))
1581 dest = plus_constant (dest, -INTVAL (argsize));
1582 else
1583 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1584 #endif
1585 dest = gen_rtx_MEM (BLKmode, dest);
1586 set_mem_align (dest, PARM_BOUNDARY);
1587 src = gen_rtx_MEM (BLKmode, incoming_args);
1588 set_mem_align (src, PARM_BOUNDARY);
1589 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1590
1591 /* Refer to the argument block. */
1592 apply_args_size ();
1593 arguments = gen_rtx_MEM (BLKmode, arguments);
1594 set_mem_align (arguments, PARM_BOUNDARY);
1595
1596 /* Walk past the arg-pointer and structure value address. */
1597 size = GET_MODE_SIZE (Pmode);
1598 if (struct_value)
1599 size += GET_MODE_SIZE (Pmode);
1600
1601 /* Restore each of the registers previously saved. Make USE insns
1602 for each of these registers for use in making the call. */
1603 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1604 if ((mode = apply_args_mode[regno]) != VOIDmode)
1605 {
1606 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1607 if (size % align != 0)
1608 size = CEIL (size, align) * align;
1609 reg = gen_rtx_REG (mode, regno);
1610 emit_move_insn (reg, adjust_address (arguments, mode, size));
1611 use_reg (&call_fusage, reg);
1612 size += GET_MODE_SIZE (mode);
1613 }
1614
1615 /* Restore the structure value address unless this is passed as an
1616 "invisible" first argument. */
1617 size = GET_MODE_SIZE (Pmode);
1618 if (struct_value)
1619 {
1620 rtx value = gen_reg_rtx (Pmode);
1621 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1622 emit_move_insn (struct_value, value);
1623 if (REG_P (struct_value))
1624 use_reg (&call_fusage, struct_value);
1625 size += GET_MODE_SIZE (Pmode);
1626 }
1627
1628 /* All arguments and registers used for the call are set up by now! */
1629 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1630
1631 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1632 and we don't want to load it into a register as an optimization,
1633 because prepare_call_address already did it if it should be done. */
1634 if (GET_CODE (function) != SYMBOL_REF)
1635 function = memory_address (FUNCTION_MODE, function);
1636
1637 /* Generate the actual call instruction and save the return value. */
1638 #ifdef HAVE_untyped_call
1639 if (HAVE_untyped_call)
1640 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1641 result, result_vector (1, result)));
1642 else
1643 #endif
1644 #ifdef HAVE_call_value
1645 if (HAVE_call_value)
1646 {
1647 rtx valreg = 0;
1648
1649 /* Locate the unique return register. It is not possible to
1650 express a call that sets more than one return register using
1651 call_value; use untyped_call for that. In fact, untyped_call
1652 only needs to save the return registers in the given block. */
1653 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1654 if ((mode = apply_result_mode[regno]) != VOIDmode)
1655 {
1656 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1657
1658 valreg = gen_rtx_REG (mode, regno);
1659 }
1660
1661 emit_call_insn (GEN_CALL_VALUE (valreg,
1662 gen_rtx_MEM (FUNCTION_MODE, function),
1663 const0_rtx, NULL_RTX, const0_rtx));
1664
1665 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1666 }
1667 else
1668 #endif
1669 gcc_unreachable ();
1670
1671 /* Find the CALL insn we just emitted, and attach the register usage
1672 information. */
1673 call_insn = last_call_insn ();
1674 add_function_usage_to (call_insn, call_fusage);
1675
1676 /* Restore the stack. */
1677 #ifdef HAVE_save_stack_nonlocal
1678 if (HAVE_save_stack_nonlocal)
1679 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1680 else
1681 #endif
1682 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1683 fixup_args_size_notes (call_insn, get_last_insn(), 0);
1684
1685 OK_DEFER_POP;
1686
1687 /* Return the address of the result block. */
1688 result = copy_addr_to_reg (XEXP (result, 0));
1689 return convert_memory_address (ptr_mode, result);
1690 }
1691
1692 /* Perform an untyped return. */
1693
1694 static void
1695 expand_builtin_return (rtx result)
1696 {
1697 int size, align, regno;
1698 enum machine_mode mode;
1699 rtx reg;
1700 rtx call_fusage = 0;
1701
1702 result = convert_memory_address (Pmode, result);
1703
1704 apply_result_size ();
1705 result = gen_rtx_MEM (BLKmode, result);
1706
1707 #ifdef HAVE_untyped_return
1708 if (HAVE_untyped_return)
1709 {
1710 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1711 emit_barrier ();
1712 return;
1713 }
1714 #endif
1715
1716 /* Restore the return value and note that each value is used. */
1717 size = 0;
1718 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1719 if ((mode = apply_result_mode[regno]) != VOIDmode)
1720 {
1721 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1722 if (size % align != 0)
1723 size = CEIL (size, align) * align;
1724 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1725 emit_move_insn (reg, adjust_address (result, mode, size));
1726
1727 push_to_sequence (call_fusage);
1728 emit_use (reg);
1729 call_fusage = get_insns ();
1730 end_sequence ();
1731 size += GET_MODE_SIZE (mode);
1732 }
1733
1734 /* Put the USE insns before the return. */
1735 emit_insn (call_fusage);
1736
1737 /* Return whatever values was restored by jumping directly to the end
1738 of the function. */
1739 expand_naked_return ();
1740 }
1741
1742 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1743
1744 static enum type_class
1745 type_to_class (tree type)
1746 {
1747 switch (TREE_CODE (type))
1748 {
1749 case VOID_TYPE: return void_type_class;
1750 case INTEGER_TYPE: return integer_type_class;
1751 case ENUMERAL_TYPE: return enumeral_type_class;
1752 case BOOLEAN_TYPE: return boolean_type_class;
1753 case POINTER_TYPE: return pointer_type_class;
1754 case REFERENCE_TYPE: return reference_type_class;
1755 case OFFSET_TYPE: return offset_type_class;
1756 case REAL_TYPE: return real_type_class;
1757 case COMPLEX_TYPE: return complex_type_class;
1758 case FUNCTION_TYPE: return function_type_class;
1759 case METHOD_TYPE: return method_type_class;
1760 case RECORD_TYPE: return record_type_class;
1761 case UNION_TYPE:
1762 case QUAL_UNION_TYPE: return union_type_class;
1763 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1764 ? string_type_class : array_type_class);
1765 case LANG_TYPE: return lang_type_class;
1766 default: return no_type_class;
1767 }
1768 }
1769
1770 /* Expand a call EXP to __builtin_classify_type. */
1771
1772 static rtx
1773 expand_builtin_classify_type (tree exp)
1774 {
1775 if (call_expr_nargs (exp))
1776 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1777 return GEN_INT (no_type_class);
1778 }
1779
1780 /* This helper macro, meant to be used in mathfn_built_in below,
1781 determines which among a set of three builtin math functions is
1782 appropriate for a given type mode. The `F' and `L' cases are
1783 automatically generated from the `double' case. */
1784 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1785 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1786 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1787 fcodel = BUILT_IN_MATHFN##L ; break;
1788 /* Similar to above, but appends _R after any F/L suffix. */
1789 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1790 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1791 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1792 fcodel = BUILT_IN_MATHFN##L_R ; break;
1793
1794 /* Return mathematic function equivalent to FN but operating directly
1795 on TYPE, if available. If IMPLICIT is true find the function in
1796 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1797 can't do the conversion, return zero. */
1798
1799 static tree
1800 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1801 {
1802 tree const *const fn_arr
1803 = implicit ? implicit_built_in_decls : built_in_decls;
1804 enum built_in_function fcode, fcodef, fcodel;
1805
1806 switch (fn)
1807 {
1808 CASE_MATHFN (BUILT_IN_ACOS)
1809 CASE_MATHFN (BUILT_IN_ACOSH)
1810 CASE_MATHFN (BUILT_IN_ASIN)
1811 CASE_MATHFN (BUILT_IN_ASINH)
1812 CASE_MATHFN (BUILT_IN_ATAN)
1813 CASE_MATHFN (BUILT_IN_ATAN2)
1814 CASE_MATHFN (BUILT_IN_ATANH)
1815 CASE_MATHFN (BUILT_IN_CBRT)
1816 CASE_MATHFN (BUILT_IN_CEIL)
1817 CASE_MATHFN (BUILT_IN_CEXPI)
1818 CASE_MATHFN (BUILT_IN_COPYSIGN)
1819 CASE_MATHFN (BUILT_IN_COS)
1820 CASE_MATHFN (BUILT_IN_COSH)
1821 CASE_MATHFN (BUILT_IN_DREM)
1822 CASE_MATHFN (BUILT_IN_ERF)
1823 CASE_MATHFN (BUILT_IN_ERFC)
1824 CASE_MATHFN (BUILT_IN_EXP)
1825 CASE_MATHFN (BUILT_IN_EXP10)
1826 CASE_MATHFN (BUILT_IN_EXP2)
1827 CASE_MATHFN (BUILT_IN_EXPM1)
1828 CASE_MATHFN (BUILT_IN_FABS)
1829 CASE_MATHFN (BUILT_IN_FDIM)
1830 CASE_MATHFN (BUILT_IN_FLOOR)
1831 CASE_MATHFN (BUILT_IN_FMA)
1832 CASE_MATHFN (BUILT_IN_FMAX)
1833 CASE_MATHFN (BUILT_IN_FMIN)
1834 CASE_MATHFN (BUILT_IN_FMOD)
1835 CASE_MATHFN (BUILT_IN_FREXP)
1836 CASE_MATHFN (BUILT_IN_GAMMA)
1837 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1838 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1839 CASE_MATHFN (BUILT_IN_HYPOT)
1840 CASE_MATHFN (BUILT_IN_ILOGB)
1841 CASE_MATHFN (BUILT_IN_ICEIL)
1842 CASE_MATHFN (BUILT_IN_IFLOOR)
1843 CASE_MATHFN (BUILT_IN_INF)
1844 CASE_MATHFN (BUILT_IN_IRINT)
1845 CASE_MATHFN (BUILT_IN_IROUND)
1846 CASE_MATHFN (BUILT_IN_ISINF)
1847 CASE_MATHFN (BUILT_IN_J0)
1848 CASE_MATHFN (BUILT_IN_J1)
1849 CASE_MATHFN (BUILT_IN_JN)
1850 CASE_MATHFN (BUILT_IN_LCEIL)
1851 CASE_MATHFN (BUILT_IN_LDEXP)
1852 CASE_MATHFN (BUILT_IN_LFLOOR)
1853 CASE_MATHFN (BUILT_IN_LGAMMA)
1854 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1855 CASE_MATHFN (BUILT_IN_LLCEIL)
1856 CASE_MATHFN (BUILT_IN_LLFLOOR)
1857 CASE_MATHFN (BUILT_IN_LLRINT)
1858 CASE_MATHFN (BUILT_IN_LLROUND)
1859 CASE_MATHFN (BUILT_IN_LOG)
1860 CASE_MATHFN (BUILT_IN_LOG10)
1861 CASE_MATHFN (BUILT_IN_LOG1P)
1862 CASE_MATHFN (BUILT_IN_LOG2)
1863 CASE_MATHFN (BUILT_IN_LOGB)
1864 CASE_MATHFN (BUILT_IN_LRINT)
1865 CASE_MATHFN (BUILT_IN_LROUND)
1866 CASE_MATHFN (BUILT_IN_MODF)
1867 CASE_MATHFN (BUILT_IN_NAN)
1868 CASE_MATHFN (BUILT_IN_NANS)
1869 CASE_MATHFN (BUILT_IN_NEARBYINT)
1870 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1871 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1872 CASE_MATHFN (BUILT_IN_POW)
1873 CASE_MATHFN (BUILT_IN_POWI)
1874 CASE_MATHFN (BUILT_IN_POW10)
1875 CASE_MATHFN (BUILT_IN_REMAINDER)
1876 CASE_MATHFN (BUILT_IN_REMQUO)
1877 CASE_MATHFN (BUILT_IN_RINT)
1878 CASE_MATHFN (BUILT_IN_ROUND)
1879 CASE_MATHFN (BUILT_IN_SCALB)
1880 CASE_MATHFN (BUILT_IN_SCALBLN)
1881 CASE_MATHFN (BUILT_IN_SCALBN)
1882 CASE_MATHFN (BUILT_IN_SIGNBIT)
1883 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1884 CASE_MATHFN (BUILT_IN_SIN)
1885 CASE_MATHFN (BUILT_IN_SINCOS)
1886 CASE_MATHFN (BUILT_IN_SINH)
1887 CASE_MATHFN (BUILT_IN_SQRT)
1888 CASE_MATHFN (BUILT_IN_TAN)
1889 CASE_MATHFN (BUILT_IN_TANH)
1890 CASE_MATHFN (BUILT_IN_TGAMMA)
1891 CASE_MATHFN (BUILT_IN_TRUNC)
1892 CASE_MATHFN (BUILT_IN_Y0)
1893 CASE_MATHFN (BUILT_IN_Y1)
1894 CASE_MATHFN (BUILT_IN_YN)
1895
1896 default:
1897 return NULL_TREE;
1898 }
1899
1900 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1901 return fn_arr[fcode];
1902 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1903 return fn_arr[fcodef];
1904 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1905 return fn_arr[fcodel];
1906 else
1907 return NULL_TREE;
1908 }
1909
1910 /* Like mathfn_built_in_1(), but always use the implicit array. */
1911
1912 tree
1913 mathfn_built_in (tree type, enum built_in_function fn)
1914 {
1915 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1916 }
1917
1918 /* If errno must be maintained, expand the RTL to check if the result,
1919 TARGET, of a built-in function call, EXP, is NaN, and if so set
1920 errno to EDOM. */
1921
1922 static void
1923 expand_errno_check (tree exp, rtx target)
1924 {
1925 rtx lab = gen_label_rtx ();
1926
1927 /* Test the result; if it is NaN, set errno=EDOM because
1928 the argument was not in the domain. */
1929 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1930 NULL_RTX, NULL_RTX, lab,
1931 /* The jump is very likely. */
1932 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1933
1934 #ifdef TARGET_EDOM
1935 /* If this built-in doesn't throw an exception, set errno directly. */
1936 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1937 {
1938 #ifdef GEN_ERRNO_RTX
1939 rtx errno_rtx = GEN_ERRNO_RTX;
1940 #else
1941 rtx errno_rtx
1942 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1943 #endif
1944 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1945 emit_label (lab);
1946 return;
1947 }
1948 #endif
1949
1950 /* Make sure the library call isn't expanded as a tail call. */
1951 CALL_EXPR_TAILCALL (exp) = 0;
1952
1953 /* We can't set errno=EDOM directly; let the library call do it.
1954 Pop the arguments right away in case the call gets deleted. */
1955 NO_DEFER_POP;
1956 expand_call (exp, target, 0);
1957 OK_DEFER_POP;
1958 emit_label (lab);
1959 }
1960
1961 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1962 Return NULL_RTX if a normal call should be emitted rather than expanding
1963 the function in-line. EXP is the expression that is a call to the builtin
1964 function; if convenient, the result should be placed in TARGET.
1965 SUBTARGET may be used as the target for computing one of EXP's operands. */
1966
1967 static rtx
1968 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1969 {
1970 optab builtin_optab;
1971 rtx op0, insns;
1972 tree fndecl = get_callee_fndecl (exp);
1973 enum machine_mode mode;
1974 bool errno_set = false;
1975 tree arg;
1976
1977 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1978 return NULL_RTX;
1979
1980 arg = CALL_EXPR_ARG (exp, 0);
1981
1982 switch (DECL_FUNCTION_CODE (fndecl))
1983 {
1984 CASE_FLT_FN (BUILT_IN_SQRT):
1985 errno_set = ! tree_expr_nonnegative_p (arg);
1986 builtin_optab = sqrt_optab;
1987 break;
1988 CASE_FLT_FN (BUILT_IN_EXP):
1989 errno_set = true; builtin_optab = exp_optab; break;
1990 CASE_FLT_FN (BUILT_IN_EXP10):
1991 CASE_FLT_FN (BUILT_IN_POW10):
1992 errno_set = true; builtin_optab = exp10_optab; break;
1993 CASE_FLT_FN (BUILT_IN_EXP2):
1994 errno_set = true; builtin_optab = exp2_optab; break;
1995 CASE_FLT_FN (BUILT_IN_EXPM1):
1996 errno_set = true; builtin_optab = expm1_optab; break;
1997 CASE_FLT_FN (BUILT_IN_LOGB):
1998 errno_set = true; builtin_optab = logb_optab; break;
1999 CASE_FLT_FN (BUILT_IN_LOG):
2000 errno_set = true; builtin_optab = log_optab; break;
2001 CASE_FLT_FN (BUILT_IN_LOG10):
2002 errno_set = true; builtin_optab = log10_optab; break;
2003 CASE_FLT_FN (BUILT_IN_LOG2):
2004 errno_set = true; builtin_optab = log2_optab; break;
2005 CASE_FLT_FN (BUILT_IN_LOG1P):
2006 errno_set = true; builtin_optab = log1p_optab; break;
2007 CASE_FLT_FN (BUILT_IN_ASIN):
2008 builtin_optab = asin_optab; break;
2009 CASE_FLT_FN (BUILT_IN_ACOS):
2010 builtin_optab = acos_optab; break;
2011 CASE_FLT_FN (BUILT_IN_TAN):
2012 builtin_optab = tan_optab; break;
2013 CASE_FLT_FN (BUILT_IN_ATAN):
2014 builtin_optab = atan_optab; break;
2015 CASE_FLT_FN (BUILT_IN_FLOOR):
2016 builtin_optab = floor_optab; break;
2017 CASE_FLT_FN (BUILT_IN_CEIL):
2018 builtin_optab = ceil_optab; break;
2019 CASE_FLT_FN (BUILT_IN_TRUNC):
2020 builtin_optab = btrunc_optab; break;
2021 CASE_FLT_FN (BUILT_IN_ROUND):
2022 builtin_optab = round_optab; break;
2023 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2024 builtin_optab = nearbyint_optab;
2025 if (flag_trapping_math)
2026 break;
2027 /* Else fallthrough and expand as rint. */
2028 CASE_FLT_FN (BUILT_IN_RINT):
2029 builtin_optab = rint_optab; break;
2030 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2031 builtin_optab = significand_optab; break;
2032 default:
2033 gcc_unreachable ();
2034 }
2035
2036 /* Make a suitable register to place result in. */
2037 mode = TYPE_MODE (TREE_TYPE (exp));
2038
2039 if (! flag_errno_math || ! HONOR_NANS (mode))
2040 errno_set = false;
2041
2042 /* Before working hard, check whether the instruction is available. */
2043 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2044 && (!errno_set || !optimize_insn_for_size_p ()))
2045 {
2046 target = gen_reg_rtx (mode);
2047
2048 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2049 need to expand the argument again. This way, we will not perform
2050 side-effects more the once. */
2051 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2052
2053 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2054
2055 start_sequence ();
2056
2057 /* Compute into TARGET.
2058 Set TARGET to wherever the result comes back. */
2059 target = expand_unop (mode, builtin_optab, op0, target, 0);
2060
2061 if (target != 0)
2062 {
2063 if (errno_set)
2064 expand_errno_check (exp, target);
2065
2066 /* Output the entire sequence. */
2067 insns = get_insns ();
2068 end_sequence ();
2069 emit_insn (insns);
2070 return target;
2071 }
2072
2073 /* If we were unable to expand via the builtin, stop the sequence
2074 (without outputting the insns) and call to the library function
2075 with the stabilized argument list. */
2076 end_sequence ();
2077 }
2078
2079 return expand_call (exp, target, target == const0_rtx);
2080 }
2081
2082 /* Expand a call to the builtin binary math functions (pow and atan2).
2083 Return NULL_RTX if a normal call should be emitted rather than expanding the
2084 function in-line. EXP is the expression that is a call to the builtin
2085 function; if convenient, the result should be placed in TARGET.
2086 SUBTARGET may be used as the target for computing one of EXP's
2087 operands. */
2088
2089 static rtx
2090 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2091 {
2092 optab builtin_optab;
2093 rtx op0, op1, insns;
2094 int op1_type = REAL_TYPE;
2095 tree fndecl = get_callee_fndecl (exp);
2096 tree arg0, arg1;
2097 enum machine_mode mode;
2098 bool errno_set = true;
2099
2100 switch (DECL_FUNCTION_CODE (fndecl))
2101 {
2102 CASE_FLT_FN (BUILT_IN_SCALBN):
2103 CASE_FLT_FN (BUILT_IN_SCALBLN):
2104 CASE_FLT_FN (BUILT_IN_LDEXP):
2105 op1_type = INTEGER_TYPE;
2106 default:
2107 break;
2108 }
2109
2110 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2111 return NULL_RTX;
2112
2113 arg0 = CALL_EXPR_ARG (exp, 0);
2114 arg1 = CALL_EXPR_ARG (exp, 1);
2115
2116 switch (DECL_FUNCTION_CODE (fndecl))
2117 {
2118 CASE_FLT_FN (BUILT_IN_POW):
2119 builtin_optab = pow_optab; break;
2120 CASE_FLT_FN (BUILT_IN_ATAN2):
2121 builtin_optab = atan2_optab; break;
2122 CASE_FLT_FN (BUILT_IN_SCALB):
2123 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2124 return 0;
2125 builtin_optab = scalb_optab; break;
2126 CASE_FLT_FN (BUILT_IN_SCALBN):
2127 CASE_FLT_FN (BUILT_IN_SCALBLN):
2128 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2129 return 0;
2130 /* Fall through... */
2131 CASE_FLT_FN (BUILT_IN_LDEXP):
2132 builtin_optab = ldexp_optab; break;
2133 CASE_FLT_FN (BUILT_IN_FMOD):
2134 builtin_optab = fmod_optab; break;
2135 CASE_FLT_FN (BUILT_IN_REMAINDER):
2136 CASE_FLT_FN (BUILT_IN_DREM):
2137 builtin_optab = remainder_optab; break;
2138 default:
2139 gcc_unreachable ();
2140 }
2141
2142 /* Make a suitable register to place result in. */
2143 mode = TYPE_MODE (TREE_TYPE (exp));
2144
2145 /* Before working hard, check whether the instruction is available. */
2146 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2147 return NULL_RTX;
2148
2149 target = gen_reg_rtx (mode);
2150
2151 if (! flag_errno_math || ! HONOR_NANS (mode))
2152 errno_set = false;
2153
2154 if (errno_set && optimize_insn_for_size_p ())
2155 return 0;
2156
2157 /* Always stabilize the argument list. */
2158 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2159 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2160
2161 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2162 op1 = expand_normal (arg1);
2163
2164 start_sequence ();
2165
2166 /* Compute into TARGET.
2167 Set TARGET to wherever the result comes back. */
2168 target = expand_binop (mode, builtin_optab, op0, op1,
2169 target, 0, OPTAB_DIRECT);
2170
2171 /* If we were unable to expand via the builtin, stop the sequence
2172 (without outputting the insns) and call to the library function
2173 with the stabilized argument list. */
2174 if (target == 0)
2175 {
2176 end_sequence ();
2177 return expand_call (exp, target, target == const0_rtx);
2178 }
2179
2180 if (errno_set)
2181 expand_errno_check (exp, target);
2182
2183 /* Output the entire sequence. */
2184 insns = get_insns ();
2185 end_sequence ();
2186 emit_insn (insns);
2187
2188 return target;
2189 }
2190
2191 /* Expand a call to the builtin trinary math functions (fma).
2192 Return NULL_RTX if a normal call should be emitted rather than expanding the
2193 function in-line. EXP is the expression that is a call to the builtin
2194 function; if convenient, the result should be placed in TARGET.
2195 SUBTARGET may be used as the target for computing one of EXP's
2196 operands. */
2197
2198 static rtx
2199 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2200 {
2201 optab builtin_optab;
2202 rtx op0, op1, op2, insns;
2203 tree fndecl = get_callee_fndecl (exp);
2204 tree arg0, arg1, arg2;
2205 enum machine_mode mode;
2206
2207 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2208 return NULL_RTX;
2209
2210 arg0 = CALL_EXPR_ARG (exp, 0);
2211 arg1 = CALL_EXPR_ARG (exp, 1);
2212 arg2 = CALL_EXPR_ARG (exp, 2);
2213
2214 switch (DECL_FUNCTION_CODE (fndecl))
2215 {
2216 CASE_FLT_FN (BUILT_IN_FMA):
2217 builtin_optab = fma_optab; break;
2218 default:
2219 gcc_unreachable ();
2220 }
2221
2222 /* Make a suitable register to place result in. */
2223 mode = TYPE_MODE (TREE_TYPE (exp));
2224
2225 /* Before working hard, check whether the instruction is available. */
2226 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2227 return NULL_RTX;
2228
2229 target = gen_reg_rtx (mode);
2230
2231 /* Always stabilize the argument list. */
2232 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2233 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2234 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2235
2236 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2237 op1 = expand_normal (arg1);
2238 op2 = expand_normal (arg2);
2239
2240 start_sequence ();
2241
2242 /* Compute into TARGET.
2243 Set TARGET to wherever the result comes back. */
2244 target = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2245 target, 0);
2246
2247 /* If we were unable to expand via the builtin, stop the sequence
2248 (without outputting the insns) and call to the library function
2249 with the stabilized argument list. */
2250 if (target == 0)
2251 {
2252 end_sequence ();
2253 return expand_call (exp, target, target == const0_rtx);
2254 }
2255
2256 /* Output the entire sequence. */
2257 insns = get_insns ();
2258 end_sequence ();
2259 emit_insn (insns);
2260
2261 return target;
2262 }
2263
2264 /* Expand a call to the builtin sin and cos math functions.
2265 Return NULL_RTX if a normal call should be emitted rather than expanding the
2266 function in-line. EXP is the expression that is a call to the builtin
2267 function; if convenient, the result should be placed in TARGET.
2268 SUBTARGET may be used as the target for computing one of EXP's
2269 operands. */
2270
2271 static rtx
2272 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2273 {
2274 optab builtin_optab;
2275 rtx op0, insns;
2276 tree fndecl = get_callee_fndecl (exp);
2277 enum machine_mode mode;
2278 tree arg;
2279
2280 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2281 return NULL_RTX;
2282
2283 arg = CALL_EXPR_ARG (exp, 0);
2284
2285 switch (DECL_FUNCTION_CODE (fndecl))
2286 {
2287 CASE_FLT_FN (BUILT_IN_SIN):
2288 CASE_FLT_FN (BUILT_IN_COS):
2289 builtin_optab = sincos_optab; break;
2290 default:
2291 gcc_unreachable ();
2292 }
2293
2294 /* Make a suitable register to place result in. */
2295 mode = TYPE_MODE (TREE_TYPE (exp));
2296
2297 /* Check if sincos insn is available, otherwise fallback
2298 to sin or cos insn. */
2299 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2300 switch (DECL_FUNCTION_CODE (fndecl))
2301 {
2302 CASE_FLT_FN (BUILT_IN_SIN):
2303 builtin_optab = sin_optab; break;
2304 CASE_FLT_FN (BUILT_IN_COS):
2305 builtin_optab = cos_optab; break;
2306 default:
2307 gcc_unreachable ();
2308 }
2309
2310 /* Before working hard, check whether the instruction is available. */
2311 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2312 {
2313 target = gen_reg_rtx (mode);
2314
2315 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2316 need to expand the argument again. This way, we will not perform
2317 side-effects more the once. */
2318 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2319
2320 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2321
2322 start_sequence ();
2323
2324 /* Compute into TARGET.
2325 Set TARGET to wherever the result comes back. */
2326 if (builtin_optab == sincos_optab)
2327 {
2328 int result;
2329
2330 switch (DECL_FUNCTION_CODE (fndecl))
2331 {
2332 CASE_FLT_FN (BUILT_IN_SIN):
2333 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2334 break;
2335 CASE_FLT_FN (BUILT_IN_COS):
2336 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2337 break;
2338 default:
2339 gcc_unreachable ();
2340 }
2341 gcc_assert (result);
2342 }
2343 else
2344 {
2345 target = expand_unop (mode, builtin_optab, op0, target, 0);
2346 }
2347
2348 if (target != 0)
2349 {
2350 /* Output the entire sequence. */
2351 insns = get_insns ();
2352 end_sequence ();
2353 emit_insn (insns);
2354 return target;
2355 }
2356
2357 /* If we were unable to expand via the builtin, stop the sequence
2358 (without outputting the insns) and call to the library function
2359 with the stabilized argument list. */
2360 end_sequence ();
2361 }
2362
2363 target = expand_call (exp, target, target == const0_rtx);
2364
2365 return target;
2366 }
2367
2368 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2369 return an RTL instruction code that implements the functionality.
2370 If that isn't possible or available return CODE_FOR_nothing. */
2371
2372 static enum insn_code
2373 interclass_mathfn_icode (tree arg, tree fndecl)
2374 {
2375 bool errno_set = false;
2376 optab builtin_optab = 0;
2377 enum machine_mode mode;
2378
2379 switch (DECL_FUNCTION_CODE (fndecl))
2380 {
2381 CASE_FLT_FN (BUILT_IN_ILOGB):
2382 errno_set = true; builtin_optab = ilogb_optab; break;
2383 CASE_FLT_FN (BUILT_IN_ISINF):
2384 builtin_optab = isinf_optab; break;
2385 case BUILT_IN_ISNORMAL:
2386 case BUILT_IN_ISFINITE:
2387 CASE_FLT_FN (BUILT_IN_FINITE):
2388 case BUILT_IN_FINITED32:
2389 case BUILT_IN_FINITED64:
2390 case BUILT_IN_FINITED128:
2391 case BUILT_IN_ISINFD32:
2392 case BUILT_IN_ISINFD64:
2393 case BUILT_IN_ISINFD128:
2394 /* These builtins have no optabs (yet). */
2395 break;
2396 default:
2397 gcc_unreachable ();
2398 }
2399
2400 /* There's no easy way to detect the case we need to set EDOM. */
2401 if (flag_errno_math && errno_set)
2402 return CODE_FOR_nothing;
2403
2404 /* Optab mode depends on the mode of the input argument. */
2405 mode = TYPE_MODE (TREE_TYPE (arg));
2406
2407 if (builtin_optab)
2408 return optab_handler (builtin_optab, mode);
2409 return CODE_FOR_nothing;
2410 }
2411
2412 /* Expand a call to one of the builtin math functions that operate on
2413 floating point argument and output an integer result (ilogb, isinf,
2414 isnan, etc).
2415 Return 0 if a normal call should be emitted rather than expanding the
2416 function in-line. EXP is the expression that is a call to the builtin
2417 function; if convenient, the result should be placed in TARGET. */
2418
2419 static rtx
2420 expand_builtin_interclass_mathfn (tree exp, rtx target)
2421 {
2422 enum insn_code icode = CODE_FOR_nothing;
2423 rtx op0;
2424 tree fndecl = get_callee_fndecl (exp);
2425 enum machine_mode mode;
2426 tree arg;
2427
2428 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2429 return NULL_RTX;
2430
2431 arg = CALL_EXPR_ARG (exp, 0);
2432 icode = interclass_mathfn_icode (arg, fndecl);
2433 mode = TYPE_MODE (TREE_TYPE (arg));
2434
2435 if (icode != CODE_FOR_nothing)
2436 {
2437 struct expand_operand ops[1];
2438 rtx last = get_last_insn ();
2439 tree orig_arg = arg;
2440
2441 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2442 need to expand the argument again. This way, we will not perform
2443 side-effects more the once. */
2444 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2445
2446 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2447
2448 if (mode != GET_MODE (op0))
2449 op0 = convert_to_mode (mode, op0, 0);
2450
2451 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2452 if (maybe_legitimize_operands (icode, 0, 1, ops)
2453 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2454 return ops[0].value;
2455
2456 delete_insns_since (last);
2457 CALL_EXPR_ARG (exp, 0) = orig_arg;
2458 }
2459
2460 return NULL_RTX;
2461 }
2462
2463 /* Expand a call to the builtin sincos math function.
2464 Return NULL_RTX if a normal call should be emitted rather than expanding the
2465 function in-line. EXP is the expression that is a call to the builtin
2466 function. */
2467
2468 static rtx
2469 expand_builtin_sincos (tree exp)
2470 {
2471 rtx op0, op1, op2, target1, target2;
2472 enum machine_mode mode;
2473 tree arg, sinp, cosp;
2474 int result;
2475 location_t loc = EXPR_LOCATION (exp);
2476 tree alias_type, alias_off;
2477
2478 if (!validate_arglist (exp, REAL_TYPE,
2479 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2480 return NULL_RTX;
2481
2482 arg = CALL_EXPR_ARG (exp, 0);
2483 sinp = CALL_EXPR_ARG (exp, 1);
2484 cosp = CALL_EXPR_ARG (exp, 2);
2485
2486 /* Make a suitable register to place result in. */
2487 mode = TYPE_MODE (TREE_TYPE (arg));
2488
2489 /* Check if sincos insn is available, otherwise emit the call. */
2490 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2491 return NULL_RTX;
2492
2493 target1 = gen_reg_rtx (mode);
2494 target2 = gen_reg_rtx (mode);
2495
2496 op0 = expand_normal (arg);
2497 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2498 alias_off = build_int_cst (alias_type, 0);
2499 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2500 sinp, alias_off));
2501 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2502 cosp, alias_off));
2503
2504 /* Compute into target1 and target2.
2505 Set TARGET to wherever the result comes back. */
2506 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2507 gcc_assert (result);
2508
2509 /* Move target1 and target2 to the memory locations indicated
2510 by op1 and op2. */
2511 emit_move_insn (op1, target1);
2512 emit_move_insn (op2, target2);
2513
2514 return const0_rtx;
2515 }
2516
2517 /* Expand a call to the internal cexpi builtin to the sincos math function.
2518 EXP is the expression that is a call to the builtin function; if convenient,
2519 the result should be placed in TARGET. */
2520
2521 static rtx
2522 expand_builtin_cexpi (tree exp, rtx target)
2523 {
2524 tree fndecl = get_callee_fndecl (exp);
2525 tree arg, type;
2526 enum machine_mode mode;
2527 rtx op0, op1, op2;
2528 location_t loc = EXPR_LOCATION (exp);
2529
2530 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2531 return NULL_RTX;
2532
2533 arg = CALL_EXPR_ARG (exp, 0);
2534 type = TREE_TYPE (arg);
2535 mode = TYPE_MODE (TREE_TYPE (arg));
2536
2537 /* Try expanding via a sincos optab, fall back to emitting a libcall
2538 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2539 is only generated from sincos, cexp or if we have either of them. */
2540 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2541 {
2542 op1 = gen_reg_rtx (mode);
2543 op2 = gen_reg_rtx (mode);
2544
2545 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2546
2547 /* Compute into op1 and op2. */
2548 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2549 }
2550 else if (TARGET_HAS_SINCOS)
2551 {
2552 tree call, fn = NULL_TREE;
2553 tree top1, top2;
2554 rtx op1a, op2a;
2555
2556 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2557 fn = built_in_decls[BUILT_IN_SINCOSF];
2558 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2559 fn = built_in_decls[BUILT_IN_SINCOS];
2560 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2561 fn = built_in_decls[BUILT_IN_SINCOSL];
2562 else
2563 gcc_unreachable ();
2564
2565 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2566 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2567 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2568 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2569 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2570 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2571
2572 /* Make sure not to fold the sincos call again. */
2573 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2574 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2575 call, 3, arg, top1, top2));
2576 }
2577 else
2578 {
2579 tree call, fn = NULL_TREE, narg;
2580 tree ctype = build_complex_type (type);
2581
2582 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2583 fn = built_in_decls[BUILT_IN_CEXPF];
2584 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2585 fn = built_in_decls[BUILT_IN_CEXP];
2586 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2587 fn = built_in_decls[BUILT_IN_CEXPL];
2588 else
2589 gcc_unreachable ();
2590
2591 /* If we don't have a decl for cexp create one. This is the
2592 friendliest fallback if the user calls __builtin_cexpi
2593 without full target C99 function support. */
2594 if (fn == NULL_TREE)
2595 {
2596 tree fntype;
2597 const char *name = NULL;
2598
2599 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2600 name = "cexpf";
2601 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2602 name = "cexp";
2603 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2604 name = "cexpl";
2605
2606 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2607 fn = build_fn_decl (name, fntype);
2608 }
2609
2610 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2611 build_real (type, dconst0), arg);
2612
2613 /* Make sure not to fold the cexp call again. */
2614 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2615 return expand_expr (build_call_nary (ctype, call, 1, narg),
2616 target, VOIDmode, EXPAND_NORMAL);
2617 }
2618
2619 /* Now build the proper return type. */
2620 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2621 make_tree (TREE_TYPE (arg), op2),
2622 make_tree (TREE_TYPE (arg), op1)),
2623 target, VOIDmode, EXPAND_NORMAL);
2624 }
2625
2626 /* Conveniently construct a function call expression. FNDECL names the
2627 function to be called, N is the number of arguments, and the "..."
2628 parameters are the argument expressions. Unlike build_call_exr
2629 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2630
2631 static tree
2632 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2633 {
2634 va_list ap;
2635 tree fntype = TREE_TYPE (fndecl);
2636 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2637
2638 va_start (ap, n);
2639 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2640 va_end (ap);
2641 SET_EXPR_LOCATION (fn, loc);
2642 return fn;
2643 }
2644
2645 /* Expand a call to one of the builtin rounding functions gcc defines
2646 as an extension (lfloor and lceil). As these are gcc extensions we
2647 do not need to worry about setting errno to EDOM.
2648 If expanding via optab fails, lower expression to (int)(floor(x)).
2649 EXP is the expression that is a call to the builtin function;
2650 if convenient, the result should be placed in TARGET. */
2651
2652 static rtx
2653 expand_builtin_int_roundingfn (tree exp, rtx target)
2654 {
2655 convert_optab builtin_optab;
2656 rtx op0, insns, tmp;
2657 tree fndecl = get_callee_fndecl (exp);
2658 enum built_in_function fallback_fn;
2659 tree fallback_fndecl;
2660 enum machine_mode mode;
2661 tree arg;
2662
2663 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2664 gcc_unreachable ();
2665
2666 arg = CALL_EXPR_ARG (exp, 0);
2667
2668 switch (DECL_FUNCTION_CODE (fndecl))
2669 {
2670 CASE_FLT_FN (BUILT_IN_ICEIL):
2671 CASE_FLT_FN (BUILT_IN_LCEIL):
2672 CASE_FLT_FN (BUILT_IN_LLCEIL):
2673 builtin_optab = lceil_optab;
2674 fallback_fn = BUILT_IN_CEIL;
2675 break;
2676
2677 CASE_FLT_FN (BUILT_IN_IFLOOR):
2678 CASE_FLT_FN (BUILT_IN_LFLOOR):
2679 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2680 builtin_optab = lfloor_optab;
2681 fallback_fn = BUILT_IN_FLOOR;
2682 break;
2683
2684 default:
2685 gcc_unreachable ();
2686 }
2687
2688 /* Make a suitable register to place result in. */
2689 mode = TYPE_MODE (TREE_TYPE (exp));
2690
2691 target = gen_reg_rtx (mode);
2692
2693 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2694 need to expand the argument again. This way, we will not perform
2695 side-effects more the once. */
2696 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2697
2698 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2699
2700 start_sequence ();
2701
2702 /* Compute into TARGET. */
2703 if (expand_sfix_optab (target, op0, builtin_optab))
2704 {
2705 /* Output the entire sequence. */
2706 insns = get_insns ();
2707 end_sequence ();
2708 emit_insn (insns);
2709 return target;
2710 }
2711
2712 /* If we were unable to expand via the builtin, stop the sequence
2713 (without outputting the insns). */
2714 end_sequence ();
2715
2716 /* Fall back to floating point rounding optab. */
2717 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2718
2719 /* For non-C99 targets we may end up without a fallback fndecl here
2720 if the user called __builtin_lfloor directly. In this case emit
2721 a call to the floor/ceil variants nevertheless. This should result
2722 in the best user experience for not full C99 targets. */
2723 if (fallback_fndecl == NULL_TREE)
2724 {
2725 tree fntype;
2726 const char *name = NULL;
2727
2728 switch (DECL_FUNCTION_CODE (fndecl))
2729 {
2730 case BUILT_IN_ICEIL:
2731 case BUILT_IN_LCEIL:
2732 case BUILT_IN_LLCEIL:
2733 name = "ceil";
2734 break;
2735 case BUILT_IN_ICEILF:
2736 case BUILT_IN_LCEILF:
2737 case BUILT_IN_LLCEILF:
2738 name = "ceilf";
2739 break;
2740 case BUILT_IN_ICEILL:
2741 case BUILT_IN_LCEILL:
2742 case BUILT_IN_LLCEILL:
2743 name = "ceill";
2744 break;
2745 case BUILT_IN_IFLOOR:
2746 case BUILT_IN_LFLOOR:
2747 case BUILT_IN_LLFLOOR:
2748 name = "floor";
2749 break;
2750 case BUILT_IN_IFLOORF:
2751 case BUILT_IN_LFLOORF:
2752 case BUILT_IN_LLFLOORF:
2753 name = "floorf";
2754 break;
2755 case BUILT_IN_IFLOORL:
2756 case BUILT_IN_LFLOORL:
2757 case BUILT_IN_LLFLOORL:
2758 name = "floorl";
2759 break;
2760 default:
2761 gcc_unreachable ();
2762 }
2763
2764 fntype = build_function_type_list (TREE_TYPE (arg),
2765 TREE_TYPE (arg), NULL_TREE);
2766 fallback_fndecl = build_fn_decl (name, fntype);
2767 }
2768
2769 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2770
2771 tmp = expand_normal (exp);
2772
2773 /* Truncate the result of floating point optab to integer
2774 via expand_fix (). */
2775 target = gen_reg_rtx (mode);
2776 expand_fix (target, tmp, 0);
2777
2778 return target;
2779 }
2780
2781 /* Expand a call to one of the builtin math functions doing integer
2782 conversion (lrint).
2783 Return 0 if a normal call should be emitted rather than expanding the
2784 function in-line. EXP is the expression that is a call to the builtin
2785 function; if convenient, the result should be placed in TARGET. */
2786
2787 static rtx
2788 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2789 {
2790 convert_optab builtin_optab;
2791 rtx op0, insns;
2792 tree fndecl = get_callee_fndecl (exp);
2793 tree arg;
2794 enum machine_mode mode;
2795
2796 /* There's no easy way to detect the case we need to set EDOM. */
2797 if (flag_errno_math)
2798 return NULL_RTX;
2799
2800 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2801 gcc_unreachable ();
2802
2803 arg = CALL_EXPR_ARG (exp, 0);
2804
2805 switch (DECL_FUNCTION_CODE (fndecl))
2806 {
2807 CASE_FLT_FN (BUILT_IN_IRINT):
2808 CASE_FLT_FN (BUILT_IN_LRINT):
2809 CASE_FLT_FN (BUILT_IN_LLRINT):
2810 builtin_optab = lrint_optab; break;
2811
2812 CASE_FLT_FN (BUILT_IN_IROUND):
2813 CASE_FLT_FN (BUILT_IN_LROUND):
2814 CASE_FLT_FN (BUILT_IN_LLROUND):
2815 builtin_optab = lround_optab; break;
2816
2817 default:
2818 gcc_unreachable ();
2819 }
2820
2821 /* Make a suitable register to place result in. */
2822 mode = TYPE_MODE (TREE_TYPE (exp));
2823
2824 target = gen_reg_rtx (mode);
2825
2826 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2827 need to expand the argument again. This way, we will not perform
2828 side-effects more the once. */
2829 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2830
2831 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2832
2833 start_sequence ();
2834
2835 if (expand_sfix_optab (target, op0, builtin_optab))
2836 {
2837 /* Output the entire sequence. */
2838 insns = get_insns ();
2839 end_sequence ();
2840 emit_insn (insns);
2841 return target;
2842 }
2843
2844 /* If we were unable to expand via the builtin, stop the sequence
2845 (without outputting the insns) and call to the library function
2846 with the stabilized argument list. */
2847 end_sequence ();
2848
2849 target = expand_call (exp, target, target == const0_rtx);
2850
2851 return target;
2852 }
2853
2854 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2855 a normal call should be emitted rather than expanding the function
2856 in-line. EXP is the expression that is a call to the builtin
2857 function; if convenient, the result should be placed in TARGET. */
2858
2859 static rtx
2860 expand_builtin_powi (tree exp, rtx target)
2861 {
2862 tree arg0, arg1;
2863 rtx op0, op1;
2864 enum machine_mode mode;
2865 enum machine_mode mode2;
2866
2867 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2868 return NULL_RTX;
2869
2870 arg0 = CALL_EXPR_ARG (exp, 0);
2871 arg1 = CALL_EXPR_ARG (exp, 1);
2872 mode = TYPE_MODE (TREE_TYPE (exp));
2873
2874 /* Emit a libcall to libgcc. */
2875
2876 /* Mode of the 2nd argument must match that of an int. */
2877 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2878
2879 if (target == NULL_RTX)
2880 target = gen_reg_rtx (mode);
2881
2882 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2883 if (GET_MODE (op0) != mode)
2884 op0 = convert_to_mode (mode, op0, 0);
2885 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2886 if (GET_MODE (op1) != mode2)
2887 op1 = convert_to_mode (mode2, op1, 0);
2888
2889 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2890 target, LCT_CONST, mode, 2,
2891 op0, mode, op1, mode2);
2892
2893 return target;
2894 }
2895
2896 /* Expand expression EXP which is a call to the strlen builtin. Return
2897 NULL_RTX if we failed the caller should emit a normal call, otherwise
2898 try to get the result in TARGET, if convenient. */
2899
2900 static rtx
2901 expand_builtin_strlen (tree exp, rtx target,
2902 enum machine_mode target_mode)
2903 {
2904 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2905 return NULL_RTX;
2906 else
2907 {
2908 struct expand_operand ops[4];
2909 rtx pat;
2910 tree len;
2911 tree src = CALL_EXPR_ARG (exp, 0);
2912 rtx src_reg, before_strlen;
2913 enum machine_mode insn_mode = target_mode;
2914 enum insn_code icode = CODE_FOR_nothing;
2915 unsigned int align;
2916
2917 /* If the length can be computed at compile-time, return it. */
2918 len = c_strlen (src, 0);
2919 if (len)
2920 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2921
2922 /* If the length can be computed at compile-time and is constant
2923 integer, but there are side-effects in src, evaluate
2924 src for side-effects, then return len.
2925 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2926 can be optimized into: i++; x = 3; */
2927 len = c_strlen (src, 1);
2928 if (len && TREE_CODE (len) == INTEGER_CST)
2929 {
2930 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2931 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2932 }
2933
2934 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2935
2936 /* If SRC is not a pointer type, don't do this operation inline. */
2937 if (align == 0)
2938 return NULL_RTX;
2939
2940 /* Bail out if we can't compute strlen in the right mode. */
2941 while (insn_mode != VOIDmode)
2942 {
2943 icode = optab_handler (strlen_optab, insn_mode);
2944 if (icode != CODE_FOR_nothing)
2945 break;
2946
2947 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2948 }
2949 if (insn_mode == VOIDmode)
2950 return NULL_RTX;
2951
2952 /* Make a place to hold the source address. We will not expand
2953 the actual source until we are sure that the expansion will
2954 not fail -- there are trees that cannot be expanded twice. */
2955 src_reg = gen_reg_rtx (Pmode);
2956
2957 /* Mark the beginning of the strlen sequence so we can emit the
2958 source operand later. */
2959 before_strlen = get_last_insn ();
2960
2961 create_output_operand (&ops[0], target, insn_mode);
2962 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2963 create_integer_operand (&ops[2], 0);
2964 create_integer_operand (&ops[3], align);
2965 if (!maybe_expand_insn (icode, 4, ops))
2966 return NULL_RTX;
2967
2968 /* Now that we are assured of success, expand the source. */
2969 start_sequence ();
2970 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2971 if (pat != src_reg)
2972 {
2973 #ifdef POINTERS_EXTEND_UNSIGNED
2974 if (GET_MODE (pat) != Pmode)
2975 pat = convert_to_mode (Pmode, pat,
2976 POINTERS_EXTEND_UNSIGNED);
2977 #endif
2978 emit_move_insn (src_reg, pat);
2979 }
2980 pat = get_insns ();
2981 end_sequence ();
2982
2983 if (before_strlen)
2984 emit_insn_after (pat, before_strlen);
2985 else
2986 emit_insn_before (pat, get_insns ());
2987
2988 /* Return the value in the proper mode for this function. */
2989 if (GET_MODE (ops[0].value) == target_mode)
2990 target = ops[0].value;
2991 else if (target != 0)
2992 convert_move (target, ops[0].value, 0);
2993 else
2994 target = convert_to_mode (target_mode, ops[0].value, 0);
2995
2996 return target;
2997 }
2998 }
2999
3000 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3001 bytes from constant string DATA + OFFSET and return it as target
3002 constant. */
3003
3004 static rtx
3005 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3006 enum machine_mode mode)
3007 {
3008 const char *str = (const char *) data;
3009
3010 gcc_assert (offset >= 0
3011 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3012 <= strlen (str) + 1));
3013
3014 return c_readstr (str + offset, mode);
3015 }
3016
3017 /* Expand a call EXP to the memcpy builtin.
3018 Return NULL_RTX if we failed, the caller should emit a normal call,
3019 otherwise try to get the result in TARGET, if convenient (and in
3020 mode MODE if that's convenient). */
3021
3022 static rtx
3023 expand_builtin_memcpy (tree exp, rtx target)
3024 {
3025 if (!validate_arglist (exp,
3026 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3027 return NULL_RTX;
3028 else
3029 {
3030 tree dest = CALL_EXPR_ARG (exp, 0);
3031 tree src = CALL_EXPR_ARG (exp, 1);
3032 tree len = CALL_EXPR_ARG (exp, 2);
3033 const char *src_str;
3034 unsigned int src_align = get_pointer_alignment (src);
3035 unsigned int dest_align = get_pointer_alignment (dest);
3036 rtx dest_mem, src_mem, dest_addr, len_rtx;
3037 HOST_WIDE_INT expected_size = -1;
3038 unsigned int expected_align = 0;
3039
3040 /* If DEST is not a pointer type, call the normal function. */
3041 if (dest_align == 0)
3042 return NULL_RTX;
3043
3044 /* If either SRC is not a pointer type, don't do this
3045 operation in-line. */
3046 if (src_align == 0)
3047 return NULL_RTX;
3048
3049 if (currently_expanding_gimple_stmt)
3050 stringop_block_profile (currently_expanding_gimple_stmt,
3051 &expected_align, &expected_size);
3052
3053 if (expected_align < dest_align)
3054 expected_align = dest_align;
3055 dest_mem = get_memory_rtx (dest, len);
3056 set_mem_align (dest_mem, dest_align);
3057 len_rtx = expand_normal (len);
3058 src_str = c_getstr (src);
3059
3060 /* If SRC is a string constant and block move would be done
3061 by pieces, we can avoid loading the string from memory
3062 and only stored the computed constants. */
3063 if (src_str
3064 && CONST_INT_P (len_rtx)
3065 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3066 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3067 CONST_CAST (char *, src_str),
3068 dest_align, false))
3069 {
3070 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3071 builtin_memcpy_read_str,
3072 CONST_CAST (char *, src_str),
3073 dest_align, false, 0);
3074 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3075 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3076 return dest_mem;
3077 }
3078
3079 src_mem = get_memory_rtx (src, len);
3080 set_mem_align (src_mem, src_align);
3081
3082 /* Copy word part most expediently. */
3083 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3084 CALL_EXPR_TAILCALL (exp)
3085 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3086 expected_align, expected_size);
3087
3088 if (dest_addr == 0)
3089 {
3090 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3091 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3092 }
3093 return dest_addr;
3094 }
3095 }
3096
3097 /* Expand a call EXP to the mempcpy builtin.
3098 Return NULL_RTX if we failed; the caller should emit a normal call,
3099 otherwise try to get the result in TARGET, if convenient (and in
3100 mode MODE if that's convenient). If ENDP is 0 return the
3101 destination pointer, if ENDP is 1 return the end pointer ala
3102 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3103 stpcpy. */
3104
3105 static rtx
3106 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3107 {
3108 if (!validate_arglist (exp,
3109 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3110 return NULL_RTX;
3111 else
3112 {
3113 tree dest = CALL_EXPR_ARG (exp, 0);
3114 tree src = CALL_EXPR_ARG (exp, 1);
3115 tree len = CALL_EXPR_ARG (exp, 2);
3116 return expand_builtin_mempcpy_args (dest, src, len,
3117 target, mode, /*endp=*/ 1);
3118 }
3119 }
3120
3121 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3122 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3123 so that this can also be called without constructing an actual CALL_EXPR.
3124 The other arguments and return value are the same as for
3125 expand_builtin_mempcpy. */
3126
3127 static rtx
3128 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3129 rtx target, enum machine_mode mode, int endp)
3130 {
3131 /* If return value is ignored, transform mempcpy into memcpy. */
3132 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3133 {
3134 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3135 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3136 dest, src, len);
3137 return expand_expr (result, target, mode, EXPAND_NORMAL);
3138 }
3139 else
3140 {
3141 const char *src_str;
3142 unsigned int src_align = get_pointer_alignment (src);
3143 unsigned int dest_align = get_pointer_alignment (dest);
3144 rtx dest_mem, src_mem, len_rtx;
3145
3146 /* If either SRC or DEST is not a pointer type, don't do this
3147 operation in-line. */
3148 if (dest_align == 0 || src_align == 0)
3149 return NULL_RTX;
3150
3151 /* If LEN is not constant, call the normal function. */
3152 if (! host_integerp (len, 1))
3153 return NULL_RTX;
3154
3155 len_rtx = expand_normal (len);
3156 src_str = c_getstr (src);
3157
3158 /* If SRC is a string constant and block move would be done
3159 by pieces, we can avoid loading the string from memory
3160 and only stored the computed constants. */
3161 if (src_str
3162 && CONST_INT_P (len_rtx)
3163 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3164 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3165 CONST_CAST (char *, src_str),
3166 dest_align, false))
3167 {
3168 dest_mem = get_memory_rtx (dest, len);
3169 set_mem_align (dest_mem, dest_align);
3170 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3171 builtin_memcpy_read_str,
3172 CONST_CAST (char *, src_str),
3173 dest_align, false, endp);
3174 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3175 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3176 return dest_mem;
3177 }
3178
3179 if (CONST_INT_P (len_rtx)
3180 && can_move_by_pieces (INTVAL (len_rtx),
3181 MIN (dest_align, src_align)))
3182 {
3183 dest_mem = get_memory_rtx (dest, len);
3184 set_mem_align (dest_mem, dest_align);
3185 src_mem = get_memory_rtx (src, len);
3186 set_mem_align (src_mem, src_align);
3187 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3188 MIN (dest_align, src_align), endp);
3189 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3190 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3191 return dest_mem;
3192 }
3193
3194 return NULL_RTX;
3195 }
3196 }
3197
3198 #ifndef HAVE_movstr
3199 # define HAVE_movstr 0
3200 # define CODE_FOR_movstr CODE_FOR_nothing
3201 #endif
3202
3203 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3204 we failed, the caller should emit a normal call, otherwise try to
3205 get the result in TARGET, if convenient. If ENDP is 0 return the
3206 destination pointer, if ENDP is 1 return the end pointer ala
3207 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3208 stpcpy. */
3209
3210 static rtx
3211 expand_movstr (tree dest, tree src, rtx target, int endp)
3212 {
3213 struct expand_operand ops[3];
3214 rtx dest_mem;
3215 rtx src_mem;
3216
3217 if (!HAVE_movstr)
3218 return NULL_RTX;
3219
3220 dest_mem = get_memory_rtx (dest, NULL);
3221 src_mem = get_memory_rtx (src, NULL);
3222 if (!endp)
3223 {
3224 target = force_reg (Pmode, XEXP (dest_mem, 0));
3225 dest_mem = replace_equiv_address (dest_mem, target);
3226 }
3227
3228 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3229 create_fixed_operand (&ops[1], dest_mem);
3230 create_fixed_operand (&ops[2], src_mem);
3231 expand_insn (CODE_FOR_movstr, 3, ops);
3232
3233 if (endp && target != const0_rtx)
3234 {
3235 target = ops[0].value;
3236 /* movstr is supposed to set end to the address of the NUL
3237 terminator. If the caller requested a mempcpy-like return value,
3238 adjust it. */
3239 if (endp == 1)
3240 {
3241 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), target), 1);
3242 emit_move_insn (target, force_operand (tem, NULL_RTX));
3243 }
3244 }
3245 return target;
3246 }
3247
3248 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3249 NULL_RTX if we failed the caller should emit a normal call, otherwise
3250 try to get the result in TARGET, if convenient (and in mode MODE if that's
3251 convenient). */
3252
3253 static rtx
3254 expand_builtin_strcpy (tree exp, rtx target)
3255 {
3256 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3257 {
3258 tree dest = CALL_EXPR_ARG (exp, 0);
3259 tree src = CALL_EXPR_ARG (exp, 1);
3260 return expand_builtin_strcpy_args (dest, src, target);
3261 }
3262 return NULL_RTX;
3263 }
3264
3265 /* Helper function to do the actual work for expand_builtin_strcpy. The
3266 arguments to the builtin_strcpy call DEST and SRC are broken out
3267 so that this can also be called without constructing an actual CALL_EXPR.
3268 The other arguments and return value are the same as for
3269 expand_builtin_strcpy. */
3270
3271 static rtx
3272 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3273 {
3274 return expand_movstr (dest, src, target, /*endp=*/0);
3275 }
3276
3277 /* Expand a call EXP to the stpcpy builtin.
3278 Return NULL_RTX if we failed the caller should emit a normal call,
3279 otherwise try to get the result in TARGET, if convenient (and in
3280 mode MODE if that's convenient). */
3281
3282 static rtx
3283 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3284 {
3285 tree dst, src;
3286 location_t loc = EXPR_LOCATION (exp);
3287
3288 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3289 return NULL_RTX;
3290
3291 dst = CALL_EXPR_ARG (exp, 0);
3292 src = CALL_EXPR_ARG (exp, 1);
3293
3294 /* If return value is ignored, transform stpcpy into strcpy. */
3295 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3296 {
3297 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3298 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3299 return expand_expr (result, target, mode, EXPAND_NORMAL);
3300 }
3301 else
3302 {
3303 tree len, lenp1;
3304 rtx ret;
3305
3306 /* Ensure we get an actual string whose length can be evaluated at
3307 compile-time, not an expression containing a string. This is
3308 because the latter will potentially produce pessimized code
3309 when used to produce the return value. */
3310 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3311 return expand_movstr (dst, src, target, /*endp=*/2);
3312
3313 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3314 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3315 target, mode, /*endp=*/2);
3316
3317 if (ret)
3318 return ret;
3319
3320 if (TREE_CODE (len) == INTEGER_CST)
3321 {
3322 rtx len_rtx = expand_normal (len);
3323
3324 if (CONST_INT_P (len_rtx))
3325 {
3326 ret = expand_builtin_strcpy_args (dst, src, target);
3327
3328 if (ret)
3329 {
3330 if (! target)
3331 {
3332 if (mode != VOIDmode)
3333 target = gen_reg_rtx (mode);
3334 else
3335 target = gen_reg_rtx (GET_MODE (ret));
3336 }
3337 if (GET_MODE (target) != GET_MODE (ret))
3338 ret = gen_lowpart (GET_MODE (target), ret);
3339
3340 ret = plus_constant (ret, INTVAL (len_rtx));
3341 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3342 gcc_assert (ret);
3343
3344 return target;
3345 }
3346 }
3347 }
3348
3349 return expand_movstr (dst, src, target, /*endp=*/2);
3350 }
3351 }
3352
3353 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3354 bytes from constant string DATA + OFFSET and return it as target
3355 constant. */
3356
3357 rtx
3358 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3359 enum machine_mode mode)
3360 {
3361 const char *str = (const char *) data;
3362
3363 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3364 return const0_rtx;
3365
3366 return c_readstr (str + offset, mode);
3367 }
3368
3369 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3370 NULL_RTX if we failed the caller should emit a normal call. */
3371
3372 static rtx
3373 expand_builtin_strncpy (tree exp, rtx target)
3374 {
3375 location_t loc = EXPR_LOCATION (exp);
3376
3377 if (validate_arglist (exp,
3378 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3379 {
3380 tree dest = CALL_EXPR_ARG (exp, 0);
3381 tree src = CALL_EXPR_ARG (exp, 1);
3382 tree len = CALL_EXPR_ARG (exp, 2);
3383 tree slen = c_strlen (src, 1);
3384
3385 /* We must be passed a constant len and src parameter. */
3386 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3387 return NULL_RTX;
3388
3389 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3390
3391 /* We're required to pad with trailing zeros if the requested
3392 len is greater than strlen(s2)+1. In that case try to
3393 use store_by_pieces, if it fails, punt. */
3394 if (tree_int_cst_lt (slen, len))
3395 {
3396 unsigned int dest_align = get_pointer_alignment (dest);
3397 const char *p = c_getstr (src);
3398 rtx dest_mem;
3399
3400 if (!p || dest_align == 0 || !host_integerp (len, 1)
3401 || !can_store_by_pieces (tree_low_cst (len, 1),
3402 builtin_strncpy_read_str,
3403 CONST_CAST (char *, p),
3404 dest_align, false))
3405 return NULL_RTX;
3406
3407 dest_mem = get_memory_rtx (dest, len);
3408 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3409 builtin_strncpy_read_str,
3410 CONST_CAST (char *, p), dest_align, false, 0);
3411 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3412 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3413 return dest_mem;
3414 }
3415 }
3416 return NULL_RTX;
3417 }
3418
3419 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3420 bytes from constant string DATA + OFFSET and return it as target
3421 constant. */
3422
3423 rtx
3424 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3425 enum machine_mode mode)
3426 {
3427 const char *c = (const char *) data;
3428 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3429
3430 memset (p, *c, GET_MODE_SIZE (mode));
3431
3432 return c_readstr (p, mode);
3433 }
3434
3435 /* Callback routine for store_by_pieces. Return the RTL of a register
3436 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3437 char value given in the RTL register data. For example, if mode is
3438 4 bytes wide, return the RTL for 0x01010101*data. */
3439
3440 static rtx
3441 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3442 enum machine_mode mode)
3443 {
3444 rtx target, coeff;
3445 size_t size;
3446 char *p;
3447
3448 size = GET_MODE_SIZE (mode);
3449 if (size == 1)
3450 return (rtx) data;
3451
3452 p = XALLOCAVEC (char, size);
3453 memset (p, 1, size);
3454 coeff = c_readstr (p, mode);
3455
3456 target = convert_to_mode (mode, (rtx) data, 1);
3457 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3458 return force_reg (mode, target);
3459 }
3460
3461 /* Expand expression EXP, which is a call to the memset builtin. Return
3462 NULL_RTX if we failed the caller should emit a normal call, otherwise
3463 try to get the result in TARGET, if convenient (and in mode MODE if that's
3464 convenient). */
3465
3466 static rtx
3467 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3468 {
3469 if (!validate_arglist (exp,
3470 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3471 return NULL_RTX;
3472 else
3473 {
3474 tree dest = CALL_EXPR_ARG (exp, 0);
3475 tree val = CALL_EXPR_ARG (exp, 1);
3476 tree len = CALL_EXPR_ARG (exp, 2);
3477 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3478 }
3479 }
3480
3481 /* Helper function to do the actual work for expand_builtin_memset. The
3482 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3483 so that this can also be called without constructing an actual CALL_EXPR.
3484 The other arguments and return value are the same as for
3485 expand_builtin_memset. */
3486
3487 static rtx
3488 expand_builtin_memset_args (tree dest, tree val, tree len,
3489 rtx target, enum machine_mode mode, tree orig_exp)
3490 {
3491 tree fndecl, fn;
3492 enum built_in_function fcode;
3493 enum machine_mode val_mode;
3494 char c;
3495 unsigned int dest_align;
3496 rtx dest_mem, dest_addr, len_rtx;
3497 HOST_WIDE_INT expected_size = -1;
3498 unsigned int expected_align = 0;
3499
3500 dest_align = get_pointer_alignment (dest);
3501
3502 /* If DEST is not a pointer type, don't do this operation in-line. */
3503 if (dest_align == 0)
3504 return NULL_RTX;
3505
3506 if (currently_expanding_gimple_stmt)
3507 stringop_block_profile (currently_expanding_gimple_stmt,
3508 &expected_align, &expected_size);
3509
3510 if (expected_align < dest_align)
3511 expected_align = dest_align;
3512
3513 /* If the LEN parameter is zero, return DEST. */
3514 if (integer_zerop (len))
3515 {
3516 /* Evaluate and ignore VAL in case it has side-effects. */
3517 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3518 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3519 }
3520
3521 /* Stabilize the arguments in case we fail. */
3522 dest = builtin_save_expr (dest);
3523 val = builtin_save_expr (val);
3524 len = builtin_save_expr (len);
3525
3526 len_rtx = expand_normal (len);
3527 dest_mem = get_memory_rtx (dest, len);
3528 val_mode = TYPE_MODE (unsigned_char_type_node);
3529
3530 if (TREE_CODE (val) != INTEGER_CST)
3531 {
3532 rtx val_rtx;
3533
3534 val_rtx = expand_normal (val);
3535 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3536
3537 /* Assume that we can memset by pieces if we can store
3538 * the coefficients by pieces (in the required modes).
3539 * We can't pass builtin_memset_gen_str as that emits RTL. */
3540 c = 1;
3541 if (host_integerp (len, 1)
3542 && can_store_by_pieces (tree_low_cst (len, 1),
3543 builtin_memset_read_str, &c, dest_align,
3544 true))
3545 {
3546 val_rtx = force_reg (val_mode, val_rtx);
3547 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3548 builtin_memset_gen_str, val_rtx, dest_align,
3549 true, 0);
3550 }
3551 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3552 dest_align, expected_align,
3553 expected_size))
3554 goto do_libcall;
3555
3556 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3557 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3558 return dest_mem;
3559 }
3560
3561 if (target_char_cast (val, &c))
3562 goto do_libcall;
3563
3564 if (c)
3565 {
3566 if (host_integerp (len, 1)
3567 && can_store_by_pieces (tree_low_cst (len, 1),
3568 builtin_memset_read_str, &c, dest_align,
3569 true))
3570 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3571 builtin_memset_read_str, &c, dest_align, true, 0);
3572 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3573 gen_int_mode (c, val_mode),
3574 dest_align, expected_align,
3575 expected_size))
3576 goto do_libcall;
3577
3578 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3579 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3580 return dest_mem;
3581 }
3582
3583 set_mem_align (dest_mem, dest_align);
3584 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3585 CALL_EXPR_TAILCALL (orig_exp)
3586 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3587 expected_align, expected_size);
3588
3589 if (dest_addr == 0)
3590 {
3591 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3592 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3593 }
3594
3595 return dest_addr;
3596
3597 do_libcall:
3598 fndecl = get_callee_fndecl (orig_exp);
3599 fcode = DECL_FUNCTION_CODE (fndecl);
3600 if (fcode == BUILT_IN_MEMSET)
3601 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3602 dest, val, len);
3603 else if (fcode == BUILT_IN_BZERO)
3604 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3605 dest, len);
3606 else
3607 gcc_unreachable ();
3608 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3609 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3610 return expand_call (fn, target, target == const0_rtx);
3611 }
3612
3613 /* Expand expression EXP, which is a call to the bzero builtin. Return
3614 NULL_RTX if we failed the caller should emit a normal call. */
3615
3616 static rtx
3617 expand_builtin_bzero (tree exp)
3618 {
3619 tree dest, size;
3620 location_t loc = EXPR_LOCATION (exp);
3621
3622 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3623 return NULL_RTX;
3624
3625 dest = CALL_EXPR_ARG (exp, 0);
3626 size = CALL_EXPR_ARG (exp, 1);
3627
3628 /* New argument list transforming bzero(ptr x, int y) to
3629 memset(ptr x, int 0, size_t y). This is done this way
3630 so that if it isn't expanded inline, we fallback to
3631 calling bzero instead of memset. */
3632
3633 return expand_builtin_memset_args (dest, integer_zero_node,
3634 fold_convert_loc (loc,
3635 size_type_node, size),
3636 const0_rtx, VOIDmode, exp);
3637 }
3638
3639 /* Expand expression EXP, which is a call to the memcmp built-in function.
3640 Return NULL_RTX if we failed and the caller should emit a normal call,
3641 otherwise try to get the result in TARGET, if convenient (and in mode
3642 MODE, if that's convenient). */
3643
3644 static rtx
3645 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3646 ATTRIBUTE_UNUSED enum machine_mode mode)
3647 {
3648 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3649
3650 if (!validate_arglist (exp,
3651 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3652 return NULL_RTX;
3653
3654 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3655 implementing memcmp because it will stop if it encounters two
3656 zero bytes. */
3657 #if defined HAVE_cmpmemsi
3658 {
3659 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3660 rtx result;
3661 rtx insn;
3662 tree arg1 = CALL_EXPR_ARG (exp, 0);
3663 tree arg2 = CALL_EXPR_ARG (exp, 1);
3664 tree len = CALL_EXPR_ARG (exp, 2);
3665
3666 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3667 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3668 enum machine_mode insn_mode;
3669
3670 if (HAVE_cmpmemsi)
3671 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3672 else
3673 return NULL_RTX;
3674
3675 /* If we don't have POINTER_TYPE, call the function. */
3676 if (arg1_align == 0 || arg2_align == 0)
3677 return NULL_RTX;
3678
3679 /* Make a place to write the result of the instruction. */
3680 result = target;
3681 if (! (result != 0
3682 && REG_P (result) && GET_MODE (result) == insn_mode
3683 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3684 result = gen_reg_rtx (insn_mode);
3685
3686 arg1_rtx = get_memory_rtx (arg1, len);
3687 arg2_rtx = get_memory_rtx (arg2, len);
3688 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3689
3690 /* Set MEM_SIZE as appropriate. */
3691 if (CONST_INT_P (arg3_rtx))
3692 {
3693 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3694 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3695 }
3696
3697 if (HAVE_cmpmemsi)
3698 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3699 GEN_INT (MIN (arg1_align, arg2_align)));
3700 else
3701 gcc_unreachable ();
3702
3703 if (insn)
3704 emit_insn (insn);
3705 else
3706 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3707 TYPE_MODE (integer_type_node), 3,
3708 XEXP (arg1_rtx, 0), Pmode,
3709 XEXP (arg2_rtx, 0), Pmode,
3710 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3711 TYPE_UNSIGNED (sizetype)),
3712 TYPE_MODE (sizetype));
3713
3714 /* Return the value in the proper mode for this function. */
3715 mode = TYPE_MODE (TREE_TYPE (exp));
3716 if (GET_MODE (result) == mode)
3717 return result;
3718 else if (target != 0)
3719 {
3720 convert_move (target, result, 0);
3721 return target;
3722 }
3723 else
3724 return convert_to_mode (mode, result, 0);
3725 }
3726 #endif /* HAVE_cmpmemsi. */
3727
3728 return NULL_RTX;
3729 }
3730
3731 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3732 if we failed the caller should emit a normal call, otherwise try to get
3733 the result in TARGET, if convenient. */
3734
3735 static rtx
3736 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3737 {
3738 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3739 return NULL_RTX;
3740
3741 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3742 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3743 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3744 {
3745 rtx arg1_rtx, arg2_rtx;
3746 rtx result, insn = NULL_RTX;
3747 tree fndecl, fn;
3748 tree arg1 = CALL_EXPR_ARG (exp, 0);
3749 tree arg2 = CALL_EXPR_ARG (exp, 1);
3750
3751 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3752 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3753
3754 /* If we don't have POINTER_TYPE, call the function. */
3755 if (arg1_align == 0 || arg2_align == 0)
3756 return NULL_RTX;
3757
3758 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3759 arg1 = builtin_save_expr (arg1);
3760 arg2 = builtin_save_expr (arg2);
3761
3762 arg1_rtx = get_memory_rtx (arg1, NULL);
3763 arg2_rtx = get_memory_rtx (arg2, NULL);
3764
3765 #ifdef HAVE_cmpstrsi
3766 /* Try to call cmpstrsi. */
3767 if (HAVE_cmpstrsi)
3768 {
3769 enum machine_mode insn_mode
3770 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3771
3772 /* Make a place to write the result of the instruction. */
3773 result = target;
3774 if (! (result != 0
3775 && REG_P (result) && GET_MODE (result) == insn_mode
3776 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3777 result = gen_reg_rtx (insn_mode);
3778
3779 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3780 GEN_INT (MIN (arg1_align, arg2_align)));
3781 }
3782 #endif
3783 #ifdef HAVE_cmpstrnsi
3784 /* Try to determine at least one length and call cmpstrnsi. */
3785 if (!insn && HAVE_cmpstrnsi)
3786 {
3787 tree len;
3788 rtx arg3_rtx;
3789
3790 enum machine_mode insn_mode
3791 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3792 tree len1 = c_strlen (arg1, 1);
3793 tree len2 = c_strlen (arg2, 1);
3794
3795 if (len1)
3796 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3797 if (len2)
3798 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3799
3800 /* If we don't have a constant length for the first, use the length
3801 of the second, if we know it. We don't require a constant for
3802 this case; some cost analysis could be done if both are available
3803 but neither is constant. For now, assume they're equally cheap,
3804 unless one has side effects. If both strings have constant lengths,
3805 use the smaller. */
3806
3807 if (!len1)
3808 len = len2;
3809 else if (!len2)
3810 len = len1;
3811 else if (TREE_SIDE_EFFECTS (len1))
3812 len = len2;
3813 else if (TREE_SIDE_EFFECTS (len2))
3814 len = len1;
3815 else if (TREE_CODE (len1) != INTEGER_CST)
3816 len = len2;
3817 else if (TREE_CODE (len2) != INTEGER_CST)
3818 len = len1;
3819 else if (tree_int_cst_lt (len1, len2))
3820 len = len1;
3821 else
3822 len = len2;
3823
3824 /* If both arguments have side effects, we cannot optimize. */
3825 if (!len || TREE_SIDE_EFFECTS (len))
3826 goto do_libcall;
3827
3828 arg3_rtx = expand_normal (len);
3829
3830 /* Make a place to write the result of the instruction. */
3831 result = target;
3832 if (! (result != 0
3833 && REG_P (result) && GET_MODE (result) == insn_mode
3834 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3835 result = gen_reg_rtx (insn_mode);
3836
3837 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3838 GEN_INT (MIN (arg1_align, arg2_align)));
3839 }
3840 #endif
3841
3842 if (insn)
3843 {
3844 enum machine_mode mode;
3845 emit_insn (insn);
3846
3847 /* Return the value in the proper mode for this function. */
3848 mode = TYPE_MODE (TREE_TYPE (exp));
3849 if (GET_MODE (result) == mode)
3850 return result;
3851 if (target == 0)
3852 return convert_to_mode (mode, result, 0);
3853 convert_move (target, result, 0);
3854 return target;
3855 }
3856
3857 /* Expand the library call ourselves using a stabilized argument
3858 list to avoid re-evaluating the function's arguments twice. */
3859 #ifdef HAVE_cmpstrnsi
3860 do_libcall:
3861 #endif
3862 fndecl = get_callee_fndecl (exp);
3863 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3864 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3865 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3866 return expand_call (fn, target, target == const0_rtx);
3867 }
3868 #endif
3869 return NULL_RTX;
3870 }
3871
3872 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3873 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3874 the result in TARGET, if convenient. */
3875
3876 static rtx
3877 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3878 ATTRIBUTE_UNUSED enum machine_mode mode)
3879 {
3880 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3881
3882 if (!validate_arglist (exp,
3883 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3884 return NULL_RTX;
3885
3886 /* If c_strlen can determine an expression for one of the string
3887 lengths, and it doesn't have side effects, then emit cmpstrnsi
3888 using length MIN(strlen(string)+1, arg3). */
3889 #ifdef HAVE_cmpstrnsi
3890 if (HAVE_cmpstrnsi)
3891 {
3892 tree len, len1, len2;
3893 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3894 rtx result, insn;
3895 tree fndecl, fn;
3896 tree arg1 = CALL_EXPR_ARG (exp, 0);
3897 tree arg2 = CALL_EXPR_ARG (exp, 1);
3898 tree arg3 = CALL_EXPR_ARG (exp, 2);
3899
3900 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3901 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3902 enum machine_mode insn_mode
3903 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3904
3905 len1 = c_strlen (arg1, 1);
3906 len2 = c_strlen (arg2, 1);
3907
3908 if (len1)
3909 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3910 if (len2)
3911 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3912
3913 /* If we don't have a constant length for the first, use the length
3914 of the second, if we know it. We don't require a constant for
3915 this case; some cost analysis could be done if both are available
3916 but neither is constant. For now, assume they're equally cheap,
3917 unless one has side effects. If both strings have constant lengths,
3918 use the smaller. */
3919
3920 if (!len1)
3921 len = len2;
3922 else if (!len2)
3923 len = len1;
3924 else if (TREE_SIDE_EFFECTS (len1))
3925 len = len2;
3926 else if (TREE_SIDE_EFFECTS (len2))
3927 len = len1;
3928 else if (TREE_CODE (len1) != INTEGER_CST)
3929 len = len2;
3930 else if (TREE_CODE (len2) != INTEGER_CST)
3931 len = len1;
3932 else if (tree_int_cst_lt (len1, len2))
3933 len = len1;
3934 else
3935 len = len2;
3936
3937 /* If both arguments have side effects, we cannot optimize. */
3938 if (!len || TREE_SIDE_EFFECTS (len))
3939 return NULL_RTX;
3940
3941 /* The actual new length parameter is MIN(len,arg3). */
3942 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3943 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3944
3945 /* If we don't have POINTER_TYPE, call the function. */
3946 if (arg1_align == 0 || arg2_align == 0)
3947 return NULL_RTX;
3948
3949 /* Make a place to write the result of the instruction. */
3950 result = target;
3951 if (! (result != 0
3952 && REG_P (result) && GET_MODE (result) == insn_mode
3953 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3954 result = gen_reg_rtx (insn_mode);
3955
3956 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3957 arg1 = builtin_save_expr (arg1);
3958 arg2 = builtin_save_expr (arg2);
3959 len = builtin_save_expr (len);
3960
3961 arg1_rtx = get_memory_rtx (arg1, len);
3962 arg2_rtx = get_memory_rtx (arg2, len);
3963 arg3_rtx = expand_normal (len);
3964 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3965 GEN_INT (MIN (arg1_align, arg2_align)));
3966 if (insn)
3967 {
3968 emit_insn (insn);
3969
3970 /* Return the value in the proper mode for this function. */
3971 mode = TYPE_MODE (TREE_TYPE (exp));
3972 if (GET_MODE (result) == mode)
3973 return result;
3974 if (target == 0)
3975 return convert_to_mode (mode, result, 0);
3976 convert_move (target, result, 0);
3977 return target;
3978 }
3979
3980 /* Expand the library call ourselves using a stabilized argument
3981 list to avoid re-evaluating the function's arguments twice. */
3982 fndecl = get_callee_fndecl (exp);
3983 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
3984 arg1, arg2, len);
3985 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3986 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3987 return expand_call (fn, target, target == const0_rtx);
3988 }
3989 #endif
3990 return NULL_RTX;
3991 }
3992
3993 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3994 if that's convenient. */
3995
3996 rtx
3997 expand_builtin_saveregs (void)
3998 {
3999 rtx val, seq;
4000
4001 /* Don't do __builtin_saveregs more than once in a function.
4002 Save the result of the first call and reuse it. */
4003 if (saveregs_value != 0)
4004 return saveregs_value;
4005
4006 /* When this function is called, it means that registers must be
4007 saved on entry to this function. So we migrate the call to the
4008 first insn of this function. */
4009
4010 start_sequence ();
4011
4012 /* Do whatever the machine needs done in this case. */
4013 val = targetm.calls.expand_builtin_saveregs ();
4014
4015 seq = get_insns ();
4016 end_sequence ();
4017
4018 saveregs_value = val;
4019
4020 /* Put the insns after the NOTE that starts the function. If this
4021 is inside a start_sequence, make the outer-level insn chain current, so
4022 the code is placed at the start of the function. */
4023 push_topmost_sequence ();
4024 emit_insn_after (seq, entry_of_function ());
4025 pop_topmost_sequence ();
4026
4027 return val;
4028 }
4029
4030 /* Expand a call to __builtin_next_arg. */
4031
4032 static rtx
4033 expand_builtin_next_arg (void)
4034 {
4035 /* Checking arguments is already done in fold_builtin_next_arg
4036 that must be called before this function. */
4037 return expand_binop (ptr_mode, add_optab,
4038 crtl->args.internal_arg_pointer,
4039 crtl->args.arg_offset_rtx,
4040 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4041 }
4042
4043 /* Make it easier for the backends by protecting the valist argument
4044 from multiple evaluations. */
4045
4046 static tree
4047 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4048 {
4049 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4050
4051 /* The current way of determining the type of valist is completely
4052 bogus. We should have the information on the va builtin instead. */
4053 if (!vatype)
4054 vatype = targetm.fn_abi_va_list (cfun->decl);
4055
4056 if (TREE_CODE (vatype) == ARRAY_TYPE)
4057 {
4058 if (TREE_SIDE_EFFECTS (valist))
4059 valist = save_expr (valist);
4060
4061 /* For this case, the backends will be expecting a pointer to
4062 vatype, but it's possible we've actually been given an array
4063 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4064 So fix it. */
4065 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4066 {
4067 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4068 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4069 }
4070 }
4071 else
4072 {
4073 tree pt = build_pointer_type (vatype);
4074
4075 if (! needs_lvalue)
4076 {
4077 if (! TREE_SIDE_EFFECTS (valist))
4078 return valist;
4079
4080 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4081 TREE_SIDE_EFFECTS (valist) = 1;
4082 }
4083
4084 if (TREE_SIDE_EFFECTS (valist))
4085 valist = save_expr (valist);
4086 valist = fold_build2_loc (loc, MEM_REF,
4087 vatype, valist, build_int_cst (pt, 0));
4088 }
4089
4090 return valist;
4091 }
4092
4093 /* The "standard" definition of va_list is void*. */
4094
4095 tree
4096 std_build_builtin_va_list (void)
4097 {
4098 return ptr_type_node;
4099 }
4100
4101 /* The "standard" abi va_list is va_list_type_node. */
4102
4103 tree
4104 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4105 {
4106 return va_list_type_node;
4107 }
4108
4109 /* The "standard" type of va_list is va_list_type_node. */
4110
4111 tree
4112 std_canonical_va_list_type (tree type)
4113 {
4114 tree wtype, htype;
4115
4116 if (INDIRECT_REF_P (type))
4117 type = TREE_TYPE (type);
4118 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4119 type = TREE_TYPE (type);
4120 wtype = va_list_type_node;
4121 htype = type;
4122 /* Treat structure va_list types. */
4123 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4124 htype = TREE_TYPE (htype);
4125 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4126 {
4127 /* If va_list is an array type, the argument may have decayed
4128 to a pointer type, e.g. by being passed to another function.
4129 In that case, unwrap both types so that we can compare the
4130 underlying records. */
4131 if (TREE_CODE (htype) == ARRAY_TYPE
4132 || POINTER_TYPE_P (htype))
4133 {
4134 wtype = TREE_TYPE (wtype);
4135 htype = TREE_TYPE (htype);
4136 }
4137 }
4138 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4139 return va_list_type_node;
4140
4141 return NULL_TREE;
4142 }
4143
4144 /* The "standard" implementation of va_start: just assign `nextarg' to
4145 the variable. */
4146
4147 void
4148 std_expand_builtin_va_start (tree valist, rtx nextarg)
4149 {
4150 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4151 convert_move (va_r, nextarg, 0);
4152 }
4153
4154 /* Expand EXP, a call to __builtin_va_start. */
4155
4156 static rtx
4157 expand_builtin_va_start (tree exp)
4158 {
4159 rtx nextarg;
4160 tree valist;
4161 location_t loc = EXPR_LOCATION (exp);
4162
4163 if (call_expr_nargs (exp) < 2)
4164 {
4165 error_at (loc, "too few arguments to function %<va_start%>");
4166 return const0_rtx;
4167 }
4168
4169 if (fold_builtin_next_arg (exp, true))
4170 return const0_rtx;
4171
4172 nextarg = expand_builtin_next_arg ();
4173 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4174
4175 if (targetm.expand_builtin_va_start)
4176 targetm.expand_builtin_va_start (valist, nextarg);
4177 else
4178 std_expand_builtin_va_start (valist, nextarg);
4179
4180 return const0_rtx;
4181 }
4182
4183 /* The "standard" implementation of va_arg: read the value from the
4184 current (padded) address and increment by the (padded) size. */
4185
4186 tree
4187 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4188 gimple_seq *post_p)
4189 {
4190 tree addr, t, type_size, rounded_size, valist_tmp;
4191 unsigned HOST_WIDE_INT align, boundary;
4192 bool indirect;
4193
4194 #ifdef ARGS_GROW_DOWNWARD
4195 /* All of the alignment and movement below is for args-grow-up machines.
4196 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4197 implement their own specialized gimplify_va_arg_expr routines. */
4198 gcc_unreachable ();
4199 #endif
4200
4201 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4202 if (indirect)
4203 type = build_pointer_type (type);
4204
4205 align = PARM_BOUNDARY / BITS_PER_UNIT;
4206 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4207
4208 /* When we align parameter on stack for caller, if the parameter
4209 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4210 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4211 here with caller. */
4212 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4213 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4214
4215 boundary /= BITS_PER_UNIT;
4216
4217 /* Hoist the valist value into a temporary for the moment. */
4218 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4219
4220 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4221 requires greater alignment, we must perform dynamic alignment. */
4222 if (boundary > align
4223 && !integer_zerop (TYPE_SIZE (type)))
4224 {
4225 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4226 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
4227 gimplify_and_add (t, pre_p);
4228
4229 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4230 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
4231 valist_tmp,
4232 build_int_cst (TREE_TYPE (valist), -boundary)));
4233 gimplify_and_add (t, pre_p);
4234 }
4235 else
4236 boundary = align;
4237
4238 /* If the actual alignment is less than the alignment of the type,
4239 adjust the type accordingly so that we don't assume strict alignment
4240 when dereferencing the pointer. */
4241 boundary *= BITS_PER_UNIT;
4242 if (boundary < TYPE_ALIGN (type))
4243 {
4244 type = build_variant_type_copy (type);
4245 TYPE_ALIGN (type) = boundary;
4246 }
4247
4248 /* Compute the rounded size of the type. */
4249 type_size = size_in_bytes (type);
4250 rounded_size = round_up (type_size, align);
4251
4252 /* Reduce rounded_size so it's sharable with the postqueue. */
4253 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4254
4255 /* Get AP. */
4256 addr = valist_tmp;
4257 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4258 {
4259 /* Small args are padded downward. */
4260 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4261 rounded_size, size_int (align));
4262 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4263 size_binop (MINUS_EXPR, rounded_size, type_size));
4264 addr = fold_build_pointer_plus (addr, t);
4265 }
4266
4267 /* Compute new value for AP. */
4268 t = fold_build_pointer_plus (valist_tmp, rounded_size);
4269 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4270 gimplify_and_add (t, pre_p);
4271
4272 addr = fold_convert (build_pointer_type (type), addr);
4273
4274 if (indirect)
4275 addr = build_va_arg_indirect_ref (addr);
4276
4277 return build_va_arg_indirect_ref (addr);
4278 }
4279
4280 /* Build an indirect-ref expression over the given TREE, which represents a
4281 piece of a va_arg() expansion. */
4282 tree
4283 build_va_arg_indirect_ref (tree addr)
4284 {
4285 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
4286
4287 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4288 mf_mark (addr);
4289
4290 return addr;
4291 }
4292
4293 /* Return a dummy expression of type TYPE in order to keep going after an
4294 error. */
4295
4296 static tree
4297 dummy_object (tree type)
4298 {
4299 tree t = build_int_cst (build_pointer_type (type), 0);
4300 return build2 (MEM_REF, type, t, t);
4301 }
4302
4303 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4304 builtin function, but a very special sort of operator. */
4305
4306 enum gimplify_status
4307 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4308 {
4309 tree promoted_type, have_va_type;
4310 tree valist = TREE_OPERAND (*expr_p, 0);
4311 tree type = TREE_TYPE (*expr_p);
4312 tree t;
4313 location_t loc = EXPR_LOCATION (*expr_p);
4314
4315 /* Verify that valist is of the proper type. */
4316 have_va_type = TREE_TYPE (valist);
4317 if (have_va_type == error_mark_node)
4318 return GS_ERROR;
4319 have_va_type = targetm.canonical_va_list_type (have_va_type);
4320
4321 if (have_va_type == NULL_TREE)
4322 {
4323 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4324 return GS_ERROR;
4325 }
4326
4327 /* Generate a diagnostic for requesting data of a type that cannot
4328 be passed through `...' due to type promotion at the call site. */
4329 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4330 != type)
4331 {
4332 static bool gave_help;
4333 bool warned;
4334
4335 /* Unfortunately, this is merely undefined, rather than a constraint
4336 violation, so we cannot make this an error. If this call is never
4337 executed, the program is still strictly conforming. */
4338 warned = warning_at (loc, 0,
4339 "%qT is promoted to %qT when passed through %<...%>",
4340 type, promoted_type);
4341 if (!gave_help && warned)
4342 {
4343 gave_help = true;
4344 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4345 promoted_type, type);
4346 }
4347
4348 /* We can, however, treat "undefined" any way we please.
4349 Call abort to encourage the user to fix the program. */
4350 if (warned)
4351 inform (loc, "if this code is reached, the program will abort");
4352 /* Before the abort, allow the evaluation of the va_list
4353 expression to exit or longjmp. */
4354 gimplify_and_add (valist, pre_p);
4355 t = build_call_expr_loc (loc,
4356 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4357 gimplify_and_add (t, pre_p);
4358
4359 /* This is dead code, but go ahead and finish so that the
4360 mode of the result comes out right. */
4361 *expr_p = dummy_object (type);
4362 return GS_ALL_DONE;
4363 }
4364 else
4365 {
4366 /* Make it easier for the backends by protecting the valist argument
4367 from multiple evaluations. */
4368 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4369 {
4370 /* For this case, the backends will be expecting a pointer to
4371 TREE_TYPE (abi), but it's possible we've
4372 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4373 So fix it. */
4374 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4375 {
4376 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4377 valist = fold_convert_loc (loc, p1,
4378 build_fold_addr_expr_loc (loc, valist));
4379 }
4380
4381 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4382 }
4383 else
4384 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4385
4386 if (!targetm.gimplify_va_arg_expr)
4387 /* FIXME: Once most targets are converted we should merely
4388 assert this is non-null. */
4389 return GS_ALL_DONE;
4390
4391 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4392 return GS_OK;
4393 }
4394 }
4395
4396 /* Expand EXP, a call to __builtin_va_end. */
4397
4398 static rtx
4399 expand_builtin_va_end (tree exp)
4400 {
4401 tree valist = CALL_EXPR_ARG (exp, 0);
4402
4403 /* Evaluate for side effects, if needed. I hate macros that don't
4404 do that. */
4405 if (TREE_SIDE_EFFECTS (valist))
4406 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4407
4408 return const0_rtx;
4409 }
4410
4411 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4412 builtin rather than just as an assignment in stdarg.h because of the
4413 nastiness of array-type va_list types. */
4414
4415 static rtx
4416 expand_builtin_va_copy (tree exp)
4417 {
4418 tree dst, src, t;
4419 location_t loc = EXPR_LOCATION (exp);
4420
4421 dst = CALL_EXPR_ARG (exp, 0);
4422 src = CALL_EXPR_ARG (exp, 1);
4423
4424 dst = stabilize_va_list_loc (loc, dst, 1);
4425 src = stabilize_va_list_loc (loc, src, 0);
4426
4427 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4428
4429 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4430 {
4431 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4432 TREE_SIDE_EFFECTS (t) = 1;
4433 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4434 }
4435 else
4436 {
4437 rtx dstb, srcb, size;
4438
4439 /* Evaluate to pointers. */
4440 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4441 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4442 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4443 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4444
4445 dstb = convert_memory_address (Pmode, dstb);
4446 srcb = convert_memory_address (Pmode, srcb);
4447
4448 /* "Dereference" to BLKmode memories. */
4449 dstb = gen_rtx_MEM (BLKmode, dstb);
4450 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4451 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4452 srcb = gen_rtx_MEM (BLKmode, srcb);
4453 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4454 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4455
4456 /* Copy. */
4457 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4458 }
4459
4460 return const0_rtx;
4461 }
4462
4463 /* Expand a call to one of the builtin functions __builtin_frame_address or
4464 __builtin_return_address. */
4465
4466 static rtx
4467 expand_builtin_frame_address (tree fndecl, tree exp)
4468 {
4469 /* The argument must be a nonnegative integer constant.
4470 It counts the number of frames to scan up the stack.
4471 The value is the return address saved in that frame. */
4472 if (call_expr_nargs (exp) == 0)
4473 /* Warning about missing arg was already issued. */
4474 return const0_rtx;
4475 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4476 {
4477 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4478 error ("invalid argument to %<__builtin_frame_address%>");
4479 else
4480 error ("invalid argument to %<__builtin_return_address%>");
4481 return const0_rtx;
4482 }
4483 else
4484 {
4485 rtx tem
4486 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4487 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4488
4489 /* Some ports cannot access arbitrary stack frames. */
4490 if (tem == NULL)
4491 {
4492 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4493 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4494 else
4495 warning (0, "unsupported argument to %<__builtin_return_address%>");
4496 return const0_rtx;
4497 }
4498
4499 /* For __builtin_frame_address, return what we've got. */
4500 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4501 return tem;
4502
4503 if (!REG_P (tem)
4504 && ! CONSTANT_P (tem))
4505 tem = copy_to_mode_reg (Pmode, tem);
4506 return tem;
4507 }
4508 }
4509
4510 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4511 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4512 is the same as for allocate_dynamic_stack_space. */
4513
4514 static rtx
4515 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4516 {
4517 rtx op0;
4518 rtx result;
4519
4520 /* Emit normal call if marked not-inlineable. */
4521 if (CALL_CANNOT_INLINE_P (exp))
4522 return NULL_RTX;
4523
4524 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4525 return NULL_RTX;
4526
4527 /* Compute the argument. */
4528 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4529
4530 /* Allocate the desired space. */
4531 result = allocate_dynamic_stack_space (op0, 0, BIGGEST_ALIGNMENT,
4532 cannot_accumulate);
4533 result = convert_memory_address (ptr_mode, result);
4534
4535 return result;
4536 }
4537
4538 /* Expand a call to a bswap builtin with argument ARG0. MODE
4539 is the mode to expand with. */
4540
4541 static rtx
4542 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4543 {
4544 enum machine_mode mode;
4545 tree arg;
4546 rtx op0;
4547
4548 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4549 return NULL_RTX;
4550
4551 arg = CALL_EXPR_ARG (exp, 0);
4552 mode = TYPE_MODE (TREE_TYPE (arg));
4553 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4554
4555 target = expand_unop (mode, bswap_optab, op0, target, 1);
4556
4557 gcc_assert (target);
4558
4559 return convert_to_mode (mode, target, 0);
4560 }
4561
4562 /* Expand a call to a unary builtin in EXP.
4563 Return NULL_RTX if a normal call should be emitted rather than expanding the
4564 function in-line. If convenient, the result should be placed in TARGET.
4565 SUBTARGET may be used as the target for computing one of EXP's operands. */
4566
4567 static rtx
4568 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4569 rtx subtarget, optab op_optab)
4570 {
4571 rtx op0;
4572
4573 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4574 return NULL_RTX;
4575
4576 /* Compute the argument. */
4577 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4578 (subtarget
4579 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4580 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4581 VOIDmode, EXPAND_NORMAL);
4582 /* Compute op, into TARGET if possible.
4583 Set TARGET to wherever the result comes back. */
4584 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4585 op_optab, op0, target, op_optab != clrsb_optab);
4586 gcc_assert (target);
4587
4588 return convert_to_mode (target_mode, target, 0);
4589 }
4590
4591 /* Expand a call to __builtin_expect. We just return our argument
4592 as the builtin_expect semantic should've been already executed by
4593 tree branch prediction pass. */
4594
4595 static rtx
4596 expand_builtin_expect (tree exp, rtx target)
4597 {
4598 tree arg;
4599
4600 if (call_expr_nargs (exp) < 2)
4601 return const0_rtx;
4602 arg = CALL_EXPR_ARG (exp, 0);
4603
4604 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4605 /* When guessing was done, the hints should be already stripped away. */
4606 gcc_assert (!flag_guess_branch_prob
4607 || optimize == 0 || seen_error ());
4608 return target;
4609 }
4610
4611 /* Expand a call to __builtin_assume_aligned. We just return our first
4612 argument as the builtin_assume_aligned semantic should've been already
4613 executed by CCP. */
4614
4615 static rtx
4616 expand_builtin_assume_aligned (tree exp, rtx target)
4617 {
4618 if (call_expr_nargs (exp) < 2)
4619 return const0_rtx;
4620 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4621 EXPAND_NORMAL);
4622 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4623 && (call_expr_nargs (exp) < 3
4624 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4625 return target;
4626 }
4627
4628 void
4629 expand_builtin_trap (void)
4630 {
4631 #ifdef HAVE_trap
4632 if (HAVE_trap)
4633 emit_insn (gen_trap ());
4634 else
4635 #endif
4636 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4637 emit_barrier ();
4638 }
4639
4640 /* Expand a call to __builtin_unreachable. We do nothing except emit
4641 a barrier saying that control flow will not pass here.
4642
4643 It is the responsibility of the program being compiled to ensure
4644 that control flow does never reach __builtin_unreachable. */
4645 static void
4646 expand_builtin_unreachable (void)
4647 {
4648 emit_barrier ();
4649 }
4650
4651 /* Expand EXP, a call to fabs, fabsf or fabsl.
4652 Return NULL_RTX if a normal call should be emitted rather than expanding
4653 the function inline. If convenient, the result should be placed
4654 in TARGET. SUBTARGET may be used as the target for computing
4655 the operand. */
4656
4657 static rtx
4658 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4659 {
4660 enum machine_mode mode;
4661 tree arg;
4662 rtx op0;
4663
4664 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4665 return NULL_RTX;
4666
4667 arg = CALL_EXPR_ARG (exp, 0);
4668 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4669 mode = TYPE_MODE (TREE_TYPE (arg));
4670 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4671 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4672 }
4673
4674 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4675 Return NULL is a normal call should be emitted rather than expanding the
4676 function inline. If convenient, the result should be placed in TARGET.
4677 SUBTARGET may be used as the target for computing the operand. */
4678
4679 static rtx
4680 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4681 {
4682 rtx op0, op1;
4683 tree arg;
4684
4685 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4686 return NULL_RTX;
4687
4688 arg = CALL_EXPR_ARG (exp, 0);
4689 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4690
4691 arg = CALL_EXPR_ARG (exp, 1);
4692 op1 = expand_normal (arg);
4693
4694 return expand_copysign (op0, op1, target);
4695 }
4696
4697 /* Create a new constant string literal and return a char* pointer to it.
4698 The STRING_CST value is the LEN characters at STR. */
4699 tree
4700 build_string_literal (int len, const char *str)
4701 {
4702 tree t, elem, index, type;
4703
4704 t = build_string (len, str);
4705 elem = build_type_variant (char_type_node, 1, 0);
4706 index = build_index_type (size_int (len - 1));
4707 type = build_array_type (elem, index);
4708 TREE_TYPE (t) = type;
4709 TREE_CONSTANT (t) = 1;
4710 TREE_READONLY (t) = 1;
4711 TREE_STATIC (t) = 1;
4712
4713 type = build_pointer_type (elem);
4714 t = build1 (ADDR_EXPR, type,
4715 build4 (ARRAY_REF, elem,
4716 t, integer_zero_node, NULL_TREE, NULL_TREE));
4717 return t;
4718 }
4719
4720 /* Expand a call to __builtin___clear_cache. */
4721
4722 static rtx
4723 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4724 {
4725 #ifndef HAVE_clear_cache
4726 #ifdef CLEAR_INSN_CACHE
4727 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4728 does something. Just do the default expansion to a call to
4729 __clear_cache(). */
4730 return NULL_RTX;
4731 #else
4732 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4733 does nothing. There is no need to call it. Do nothing. */
4734 return const0_rtx;
4735 #endif /* CLEAR_INSN_CACHE */
4736 #else
4737 /* We have a "clear_cache" insn, and it will handle everything. */
4738 tree begin, end;
4739 rtx begin_rtx, end_rtx;
4740
4741 /* We must not expand to a library call. If we did, any
4742 fallback library function in libgcc that might contain a call to
4743 __builtin___clear_cache() would recurse infinitely. */
4744 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4745 {
4746 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4747 return const0_rtx;
4748 }
4749
4750 if (HAVE_clear_cache)
4751 {
4752 struct expand_operand ops[2];
4753
4754 begin = CALL_EXPR_ARG (exp, 0);
4755 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4756
4757 end = CALL_EXPR_ARG (exp, 1);
4758 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4759
4760 create_address_operand (&ops[0], begin_rtx);
4761 create_address_operand (&ops[1], end_rtx);
4762 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4763 return const0_rtx;
4764 }
4765 return const0_rtx;
4766 #endif /* HAVE_clear_cache */
4767 }
4768
4769 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4770
4771 static rtx
4772 round_trampoline_addr (rtx tramp)
4773 {
4774 rtx temp, addend, mask;
4775
4776 /* If we don't need too much alignment, we'll have been guaranteed
4777 proper alignment by get_trampoline_type. */
4778 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4779 return tramp;
4780
4781 /* Round address up to desired boundary. */
4782 temp = gen_reg_rtx (Pmode);
4783 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
4784 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
4785
4786 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4787 temp, 0, OPTAB_LIB_WIDEN);
4788 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4789 temp, 0, OPTAB_LIB_WIDEN);
4790
4791 return tramp;
4792 }
4793
4794 static rtx
4795 expand_builtin_init_trampoline (tree exp)
4796 {
4797 tree t_tramp, t_func, t_chain;
4798 rtx m_tramp, r_tramp, r_chain, tmp;
4799
4800 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4801 POINTER_TYPE, VOID_TYPE))
4802 return NULL_RTX;
4803
4804 t_tramp = CALL_EXPR_ARG (exp, 0);
4805 t_func = CALL_EXPR_ARG (exp, 1);
4806 t_chain = CALL_EXPR_ARG (exp, 2);
4807
4808 r_tramp = expand_normal (t_tramp);
4809 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4810 MEM_NOTRAP_P (m_tramp) = 1;
4811
4812 /* The TRAMP argument should be the address of a field within the
4813 local function's FRAME decl. Let's see if we can fill in the
4814 to fill in the MEM_ATTRs for this memory. */
4815 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4816 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
4817 true, 0);
4818
4819 tmp = round_trampoline_addr (r_tramp);
4820 if (tmp != r_tramp)
4821 {
4822 m_tramp = change_address (m_tramp, BLKmode, tmp);
4823 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4824 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4825 }
4826
4827 /* The FUNC argument should be the address of the nested function.
4828 Extract the actual function decl to pass to the hook. */
4829 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4830 t_func = TREE_OPERAND (t_func, 0);
4831 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4832
4833 r_chain = expand_normal (t_chain);
4834
4835 /* Generate insns to initialize the trampoline. */
4836 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4837
4838 trampolines_created = 1;
4839
4840 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4841 "trampoline generated for nested function %qD", t_func);
4842
4843 return const0_rtx;
4844 }
4845
4846 static rtx
4847 expand_builtin_adjust_trampoline (tree exp)
4848 {
4849 rtx tramp;
4850
4851 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4852 return NULL_RTX;
4853
4854 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4855 tramp = round_trampoline_addr (tramp);
4856 if (targetm.calls.trampoline_adjust_address)
4857 tramp = targetm.calls.trampoline_adjust_address (tramp);
4858
4859 return tramp;
4860 }
4861
4862 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4863 function. The function first checks whether the back end provides
4864 an insn to implement signbit for the respective mode. If not, it
4865 checks whether the floating point format of the value is such that
4866 the sign bit can be extracted. If that is not the case, the
4867 function returns NULL_RTX to indicate that a normal call should be
4868 emitted rather than expanding the function in-line. EXP is the
4869 expression that is a call to the builtin function; if convenient,
4870 the result should be placed in TARGET. */
4871 static rtx
4872 expand_builtin_signbit (tree exp, rtx target)
4873 {
4874 const struct real_format *fmt;
4875 enum machine_mode fmode, imode, rmode;
4876 tree arg;
4877 int word, bitpos;
4878 enum insn_code icode;
4879 rtx temp;
4880 location_t loc = EXPR_LOCATION (exp);
4881
4882 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4883 return NULL_RTX;
4884
4885 arg = CALL_EXPR_ARG (exp, 0);
4886 fmode = TYPE_MODE (TREE_TYPE (arg));
4887 rmode = TYPE_MODE (TREE_TYPE (exp));
4888 fmt = REAL_MODE_FORMAT (fmode);
4889
4890 arg = builtin_save_expr (arg);
4891
4892 /* Expand the argument yielding a RTX expression. */
4893 temp = expand_normal (arg);
4894
4895 /* Check if the back end provides an insn that handles signbit for the
4896 argument's mode. */
4897 icode = optab_handler (signbit_optab, fmode);
4898 if (icode != CODE_FOR_nothing)
4899 {
4900 rtx last = get_last_insn ();
4901 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4902 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4903 return target;
4904 delete_insns_since (last);
4905 }
4906
4907 /* For floating point formats without a sign bit, implement signbit
4908 as "ARG < 0.0". */
4909 bitpos = fmt->signbit_ro;
4910 if (bitpos < 0)
4911 {
4912 /* But we can't do this if the format supports signed zero. */
4913 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4914 return NULL_RTX;
4915
4916 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4917 build_real (TREE_TYPE (arg), dconst0));
4918 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4919 }
4920
4921 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4922 {
4923 imode = int_mode_for_mode (fmode);
4924 if (imode == BLKmode)
4925 return NULL_RTX;
4926 temp = gen_lowpart (imode, temp);
4927 }
4928 else
4929 {
4930 imode = word_mode;
4931 /* Handle targets with different FP word orders. */
4932 if (FLOAT_WORDS_BIG_ENDIAN)
4933 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4934 else
4935 word = bitpos / BITS_PER_WORD;
4936 temp = operand_subword_force (temp, word, fmode);
4937 bitpos = bitpos % BITS_PER_WORD;
4938 }
4939
4940 /* Force the intermediate word_mode (or narrower) result into a
4941 register. This avoids attempting to create paradoxical SUBREGs
4942 of floating point modes below. */
4943 temp = force_reg (imode, temp);
4944
4945 /* If the bitpos is within the "result mode" lowpart, the operation
4946 can be implement with a single bitwise AND. Otherwise, we need
4947 a right shift and an AND. */
4948
4949 if (bitpos < GET_MODE_BITSIZE (rmode))
4950 {
4951 double_int mask = double_int_setbit (double_int_zero, bitpos);
4952
4953 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4954 temp = gen_lowpart (rmode, temp);
4955 temp = expand_binop (rmode, and_optab, temp,
4956 immed_double_int_const (mask, rmode),
4957 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4958 }
4959 else
4960 {
4961 /* Perform a logical right shift to place the signbit in the least
4962 significant bit, then truncate the result to the desired mode
4963 and mask just this bit. */
4964 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4965 temp = gen_lowpart (rmode, temp);
4966 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4967 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4968 }
4969
4970 return temp;
4971 }
4972
4973 /* Expand fork or exec calls. TARGET is the desired target of the
4974 call. EXP is the call. FN is the
4975 identificator of the actual function. IGNORE is nonzero if the
4976 value is to be ignored. */
4977
4978 static rtx
4979 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4980 {
4981 tree id, decl;
4982 tree call;
4983
4984 /* If we are not profiling, just call the function. */
4985 if (!profile_arc_flag)
4986 return NULL_RTX;
4987
4988 /* Otherwise call the wrapper. This should be equivalent for the rest of
4989 compiler, so the code does not diverge, and the wrapper may run the
4990 code necessary for keeping the profiling sane. */
4991
4992 switch (DECL_FUNCTION_CODE (fn))
4993 {
4994 case BUILT_IN_FORK:
4995 id = get_identifier ("__gcov_fork");
4996 break;
4997
4998 case BUILT_IN_EXECL:
4999 id = get_identifier ("__gcov_execl");
5000 break;
5001
5002 case BUILT_IN_EXECV:
5003 id = get_identifier ("__gcov_execv");
5004 break;
5005
5006 case BUILT_IN_EXECLP:
5007 id = get_identifier ("__gcov_execlp");
5008 break;
5009
5010 case BUILT_IN_EXECLE:
5011 id = get_identifier ("__gcov_execle");
5012 break;
5013
5014 case BUILT_IN_EXECVP:
5015 id = get_identifier ("__gcov_execvp");
5016 break;
5017
5018 case BUILT_IN_EXECVE:
5019 id = get_identifier ("__gcov_execve");
5020 break;
5021
5022 default:
5023 gcc_unreachable ();
5024 }
5025
5026 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5027 FUNCTION_DECL, id, TREE_TYPE (fn));
5028 DECL_EXTERNAL (decl) = 1;
5029 TREE_PUBLIC (decl) = 1;
5030 DECL_ARTIFICIAL (decl) = 1;
5031 TREE_NOTHROW (decl) = 1;
5032 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5033 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5034 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5035 return expand_call (call, target, ignore);
5036 }
5037
5038
5039 \f
5040 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5041 the pointer in these functions is void*, the tree optimizers may remove
5042 casts. The mode computed in expand_builtin isn't reliable either, due
5043 to __sync_bool_compare_and_swap.
5044
5045 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5046 group of builtins. This gives us log2 of the mode size. */
5047
5048 static inline enum machine_mode
5049 get_builtin_sync_mode (int fcode_diff)
5050 {
5051 /* The size is not negotiable, so ask not to get BLKmode in return
5052 if the target indicates that a smaller size would be better. */
5053 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5054 }
5055
5056 /* Expand the memory expression LOC and return the appropriate memory operand
5057 for the builtin_sync operations. */
5058
5059 static rtx
5060 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5061 {
5062 rtx addr, mem;
5063
5064 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5065 addr = convert_memory_address (Pmode, addr);
5066
5067 /* Note that we explicitly do not want any alias information for this
5068 memory, so that we kill all other live memories. Otherwise we don't
5069 satisfy the full barrier semantics of the intrinsic. */
5070 mem = validize_mem (gen_rtx_MEM (mode, addr));
5071
5072 /* The alignment needs to be at least according to that of the mode. */
5073 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5074 get_pointer_alignment (loc)));
5075 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5076 MEM_VOLATILE_P (mem) = 1;
5077
5078 return mem;
5079 }
5080
5081 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5082 EXP is the CALL_EXPR. CODE is the rtx code
5083 that corresponds to the arithmetic or logical operation from the name;
5084 an exception here is that NOT actually means NAND. TARGET is an optional
5085 place for us to store the results; AFTER is true if this is the
5086 fetch_and_xxx form. IGNORE is true if we don't actually care about
5087 the result of the operation at all. */
5088
5089 static rtx
5090 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5091 enum rtx_code code, bool after,
5092 rtx target, bool ignore)
5093 {
5094 rtx val, mem;
5095 enum machine_mode old_mode;
5096 location_t loc = EXPR_LOCATION (exp);
5097
5098 if (code == NOT && warn_sync_nand)
5099 {
5100 tree fndecl = get_callee_fndecl (exp);
5101 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5102
5103 static bool warned_f_a_n, warned_n_a_f;
5104
5105 switch (fcode)
5106 {
5107 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5108 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5109 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5110 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5111 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5112
5113 if (warned_f_a_n)
5114 break;
5115
5116 fndecl = implicit_built_in_decls[BUILT_IN_SYNC_FETCH_AND_NAND_N];
5117 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5118 warned_f_a_n = true;
5119 break;
5120
5121 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5122 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5123 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5124 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5125 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5126
5127 if (warned_n_a_f)
5128 break;
5129
5130 fndecl = implicit_built_in_decls[BUILT_IN_SYNC_NAND_AND_FETCH_N];
5131 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5132 warned_n_a_f = true;
5133 break;
5134
5135 default:
5136 gcc_unreachable ();
5137 }
5138 }
5139
5140 /* Expand the operands. */
5141 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5142
5143 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5144 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5145 of CONST_INTs, where we know the old_mode only from the call argument. */
5146 old_mode = GET_MODE (val);
5147 if (old_mode == VOIDmode)
5148 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5149 val = convert_modes (mode, old_mode, val, 1);
5150
5151 if (ignore)
5152 return expand_sync_operation (mem, val, code);
5153 else
5154 return expand_sync_fetch_operation (mem, val, code, after, target);
5155 }
5156
5157 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5158 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5159 true if this is the boolean form. TARGET is a place for us to store the
5160 results; this is NOT optional if IS_BOOL is true. */
5161
5162 static rtx
5163 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5164 bool is_bool, rtx target)
5165 {
5166 rtx old_val, new_val, mem;
5167 enum machine_mode old_mode;
5168
5169 /* Expand the operands. */
5170 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5171
5172
5173 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5174 mode, EXPAND_NORMAL);
5175 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5176 of CONST_INTs, where we know the old_mode only from the call argument. */
5177 old_mode = GET_MODE (old_val);
5178 if (old_mode == VOIDmode)
5179 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5180 old_val = convert_modes (mode, old_mode, old_val, 1);
5181
5182 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5183 mode, EXPAND_NORMAL);
5184 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5185 of CONST_INTs, where we know the old_mode only from the call argument. */
5186 old_mode = GET_MODE (new_val);
5187 if (old_mode == VOIDmode)
5188 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5189 new_val = convert_modes (mode, old_mode, new_val, 1);
5190
5191 if (is_bool)
5192 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5193 else
5194 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5195 }
5196
5197 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5198 general form is actually an atomic exchange, and some targets only
5199 support a reduced form with the second argument being a constant 1.
5200 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5201 the results. */
5202
5203 static rtx
5204 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5205 rtx target)
5206 {
5207 rtx val, mem;
5208 enum machine_mode old_mode;
5209
5210 /* Expand the operands. */
5211 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5212 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5213 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5214 of CONST_INTs, where we know the old_mode only from the call argument. */
5215 old_mode = GET_MODE (val);
5216 if (old_mode == VOIDmode)
5217 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5218 val = convert_modes (mode, old_mode, val, 1);
5219
5220 return expand_sync_lock_test_and_set (mem, val, target);
5221 }
5222
5223 /* Expand the __sync_synchronize intrinsic. */
5224
5225 static void
5226 expand_builtin_sync_synchronize (void)
5227 {
5228 gimple x;
5229 VEC (tree, gc) *v_clobbers;
5230
5231 #ifdef HAVE_memory_barrier
5232 if (HAVE_memory_barrier)
5233 {
5234 emit_insn (gen_memory_barrier ());
5235 return;
5236 }
5237 #endif
5238
5239 if (synchronize_libfunc != NULL_RTX)
5240 {
5241 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5242 return;
5243 }
5244
5245 /* If no explicit memory barrier instruction is available, create an
5246 empty asm stmt with a memory clobber. */
5247 v_clobbers = VEC_alloc (tree, gc, 1);
5248 VEC_quick_push (tree, v_clobbers,
5249 tree_cons (NULL, build_string (6, "memory"), NULL));
5250 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5251 gimple_asm_set_volatile (x, true);
5252 expand_asm_stmt (x);
5253 }
5254
5255 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5256
5257 static void
5258 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5259 {
5260 struct expand_operand ops[2];
5261 enum insn_code icode;
5262 rtx mem;
5263
5264 /* Expand the operands. */
5265 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5266
5267 /* If there is an explicit operation in the md file, use it. */
5268 icode = direct_optab_handler (sync_lock_release_optab, mode);
5269 if (icode != CODE_FOR_nothing)
5270 {
5271 create_fixed_operand (&ops[0], mem);
5272 create_input_operand (&ops[1], const0_rtx, mode);
5273 if (maybe_expand_insn (icode, 2, ops))
5274 return;
5275 }
5276
5277 /* Otherwise we can implement this operation by emitting a barrier
5278 followed by a store of zero. */
5279 expand_builtin_sync_synchronize ();
5280 emit_move_insn (mem, const0_rtx);
5281 }
5282 \f
5283 /* Expand an expression EXP that calls a built-in function,
5284 with result going to TARGET if that's convenient
5285 (and in mode MODE if that's convenient).
5286 SUBTARGET may be used as the target for computing one of EXP's operands.
5287 IGNORE is nonzero if the value is to be ignored. */
5288
5289 rtx
5290 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5291 int ignore)
5292 {
5293 tree fndecl = get_callee_fndecl (exp);
5294 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5295 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5296 int flags;
5297
5298 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5299 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5300
5301 /* When not optimizing, generate calls to library functions for a certain
5302 set of builtins. */
5303 if (!optimize
5304 && !called_as_built_in (fndecl)
5305 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5306 && fcode != BUILT_IN_ALLOCA
5307 && fcode != BUILT_IN_FREE)
5308 return expand_call (exp, target, ignore);
5309
5310 /* The built-in function expanders test for target == const0_rtx
5311 to determine whether the function's result will be ignored. */
5312 if (ignore)
5313 target = const0_rtx;
5314
5315 /* If the result of a pure or const built-in function is ignored, and
5316 none of its arguments are volatile, we can avoid expanding the
5317 built-in call and just evaluate the arguments for side-effects. */
5318 if (target == const0_rtx
5319 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5320 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5321 {
5322 bool volatilep = false;
5323 tree arg;
5324 call_expr_arg_iterator iter;
5325
5326 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5327 if (TREE_THIS_VOLATILE (arg))
5328 {
5329 volatilep = true;
5330 break;
5331 }
5332
5333 if (! volatilep)
5334 {
5335 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5336 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5337 return const0_rtx;
5338 }
5339 }
5340
5341 switch (fcode)
5342 {
5343 CASE_FLT_FN (BUILT_IN_FABS):
5344 target = expand_builtin_fabs (exp, target, subtarget);
5345 if (target)
5346 return target;
5347 break;
5348
5349 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5350 target = expand_builtin_copysign (exp, target, subtarget);
5351 if (target)
5352 return target;
5353 break;
5354
5355 /* Just do a normal library call if we were unable to fold
5356 the values. */
5357 CASE_FLT_FN (BUILT_IN_CABS):
5358 break;
5359
5360 CASE_FLT_FN (BUILT_IN_EXP):
5361 CASE_FLT_FN (BUILT_IN_EXP10):
5362 CASE_FLT_FN (BUILT_IN_POW10):
5363 CASE_FLT_FN (BUILT_IN_EXP2):
5364 CASE_FLT_FN (BUILT_IN_EXPM1):
5365 CASE_FLT_FN (BUILT_IN_LOGB):
5366 CASE_FLT_FN (BUILT_IN_LOG):
5367 CASE_FLT_FN (BUILT_IN_LOG10):
5368 CASE_FLT_FN (BUILT_IN_LOG2):
5369 CASE_FLT_FN (BUILT_IN_LOG1P):
5370 CASE_FLT_FN (BUILT_IN_TAN):
5371 CASE_FLT_FN (BUILT_IN_ASIN):
5372 CASE_FLT_FN (BUILT_IN_ACOS):
5373 CASE_FLT_FN (BUILT_IN_ATAN):
5374 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5375 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5376 because of possible accuracy problems. */
5377 if (! flag_unsafe_math_optimizations)
5378 break;
5379 CASE_FLT_FN (BUILT_IN_SQRT):
5380 CASE_FLT_FN (BUILT_IN_FLOOR):
5381 CASE_FLT_FN (BUILT_IN_CEIL):
5382 CASE_FLT_FN (BUILT_IN_TRUNC):
5383 CASE_FLT_FN (BUILT_IN_ROUND):
5384 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5385 CASE_FLT_FN (BUILT_IN_RINT):
5386 target = expand_builtin_mathfn (exp, target, subtarget);
5387 if (target)
5388 return target;
5389 break;
5390
5391 CASE_FLT_FN (BUILT_IN_FMA):
5392 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5393 if (target)
5394 return target;
5395 break;
5396
5397 CASE_FLT_FN (BUILT_IN_ILOGB):
5398 if (! flag_unsafe_math_optimizations)
5399 break;
5400 CASE_FLT_FN (BUILT_IN_ISINF):
5401 CASE_FLT_FN (BUILT_IN_FINITE):
5402 case BUILT_IN_ISFINITE:
5403 case BUILT_IN_ISNORMAL:
5404 target = expand_builtin_interclass_mathfn (exp, target);
5405 if (target)
5406 return target;
5407 break;
5408
5409 CASE_FLT_FN (BUILT_IN_ICEIL):
5410 CASE_FLT_FN (BUILT_IN_LCEIL):
5411 CASE_FLT_FN (BUILT_IN_LLCEIL):
5412 CASE_FLT_FN (BUILT_IN_LFLOOR):
5413 CASE_FLT_FN (BUILT_IN_IFLOOR):
5414 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5415 target = expand_builtin_int_roundingfn (exp, target);
5416 if (target)
5417 return target;
5418 break;
5419
5420 CASE_FLT_FN (BUILT_IN_IRINT):
5421 CASE_FLT_FN (BUILT_IN_LRINT):
5422 CASE_FLT_FN (BUILT_IN_LLRINT):
5423 CASE_FLT_FN (BUILT_IN_IROUND):
5424 CASE_FLT_FN (BUILT_IN_LROUND):
5425 CASE_FLT_FN (BUILT_IN_LLROUND):
5426 target = expand_builtin_int_roundingfn_2 (exp, target);
5427 if (target)
5428 return target;
5429 break;
5430
5431 CASE_FLT_FN (BUILT_IN_POWI):
5432 target = expand_builtin_powi (exp, target);
5433 if (target)
5434 return target;
5435 break;
5436
5437 CASE_FLT_FN (BUILT_IN_ATAN2):
5438 CASE_FLT_FN (BUILT_IN_LDEXP):
5439 CASE_FLT_FN (BUILT_IN_SCALB):
5440 CASE_FLT_FN (BUILT_IN_SCALBN):
5441 CASE_FLT_FN (BUILT_IN_SCALBLN):
5442 if (! flag_unsafe_math_optimizations)
5443 break;
5444
5445 CASE_FLT_FN (BUILT_IN_FMOD):
5446 CASE_FLT_FN (BUILT_IN_REMAINDER):
5447 CASE_FLT_FN (BUILT_IN_DREM):
5448 CASE_FLT_FN (BUILT_IN_POW):
5449 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5450 if (target)
5451 return target;
5452 break;
5453
5454 CASE_FLT_FN (BUILT_IN_CEXPI):
5455 target = expand_builtin_cexpi (exp, target);
5456 gcc_assert (target);
5457 return target;
5458
5459 CASE_FLT_FN (BUILT_IN_SIN):
5460 CASE_FLT_FN (BUILT_IN_COS):
5461 if (! flag_unsafe_math_optimizations)
5462 break;
5463 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5464 if (target)
5465 return target;
5466 break;
5467
5468 CASE_FLT_FN (BUILT_IN_SINCOS):
5469 if (! flag_unsafe_math_optimizations)
5470 break;
5471 target = expand_builtin_sincos (exp);
5472 if (target)
5473 return target;
5474 break;
5475
5476 case BUILT_IN_APPLY_ARGS:
5477 return expand_builtin_apply_args ();
5478
5479 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5480 FUNCTION with a copy of the parameters described by
5481 ARGUMENTS, and ARGSIZE. It returns a block of memory
5482 allocated on the stack into which is stored all the registers
5483 that might possibly be used for returning the result of a
5484 function. ARGUMENTS is the value returned by
5485 __builtin_apply_args. ARGSIZE is the number of bytes of
5486 arguments that must be copied. ??? How should this value be
5487 computed? We'll also need a safe worst case value for varargs
5488 functions. */
5489 case BUILT_IN_APPLY:
5490 if (!validate_arglist (exp, POINTER_TYPE,
5491 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5492 && !validate_arglist (exp, REFERENCE_TYPE,
5493 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5494 return const0_rtx;
5495 else
5496 {
5497 rtx ops[3];
5498
5499 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5500 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5501 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5502
5503 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5504 }
5505
5506 /* __builtin_return (RESULT) causes the function to return the
5507 value described by RESULT. RESULT is address of the block of
5508 memory returned by __builtin_apply. */
5509 case BUILT_IN_RETURN:
5510 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5511 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5512 return const0_rtx;
5513
5514 case BUILT_IN_SAVEREGS:
5515 return expand_builtin_saveregs ();
5516
5517 case BUILT_IN_VA_ARG_PACK:
5518 /* All valid uses of __builtin_va_arg_pack () are removed during
5519 inlining. */
5520 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5521 return const0_rtx;
5522
5523 case BUILT_IN_VA_ARG_PACK_LEN:
5524 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5525 inlining. */
5526 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5527 return const0_rtx;
5528
5529 /* Return the address of the first anonymous stack arg. */
5530 case BUILT_IN_NEXT_ARG:
5531 if (fold_builtin_next_arg (exp, false))
5532 return const0_rtx;
5533 return expand_builtin_next_arg ();
5534
5535 case BUILT_IN_CLEAR_CACHE:
5536 target = expand_builtin___clear_cache (exp);
5537 if (target)
5538 return target;
5539 break;
5540
5541 case BUILT_IN_CLASSIFY_TYPE:
5542 return expand_builtin_classify_type (exp);
5543
5544 case BUILT_IN_CONSTANT_P:
5545 return const0_rtx;
5546
5547 case BUILT_IN_FRAME_ADDRESS:
5548 case BUILT_IN_RETURN_ADDRESS:
5549 return expand_builtin_frame_address (fndecl, exp);
5550
5551 /* Returns the address of the area where the structure is returned.
5552 0 otherwise. */
5553 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5554 if (call_expr_nargs (exp) != 0
5555 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5556 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5557 return const0_rtx;
5558 else
5559 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5560
5561 case BUILT_IN_ALLOCA:
5562 /* If the allocation stems from the declaration of a variable-sized
5563 object, it cannot accumulate. */
5564 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
5565 if (target)
5566 return target;
5567 break;
5568
5569 case BUILT_IN_STACK_SAVE:
5570 return expand_stack_save ();
5571
5572 case BUILT_IN_STACK_RESTORE:
5573 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5574 return const0_rtx;
5575
5576 case BUILT_IN_BSWAP32:
5577 case BUILT_IN_BSWAP64:
5578 target = expand_builtin_bswap (exp, target, subtarget);
5579
5580 if (target)
5581 return target;
5582 break;
5583
5584 CASE_INT_FN (BUILT_IN_FFS):
5585 case BUILT_IN_FFSIMAX:
5586 target = expand_builtin_unop (target_mode, exp, target,
5587 subtarget, ffs_optab);
5588 if (target)
5589 return target;
5590 break;
5591
5592 CASE_INT_FN (BUILT_IN_CLZ):
5593 case BUILT_IN_CLZIMAX:
5594 target = expand_builtin_unop (target_mode, exp, target,
5595 subtarget, clz_optab);
5596 if (target)
5597 return target;
5598 break;
5599
5600 CASE_INT_FN (BUILT_IN_CTZ):
5601 case BUILT_IN_CTZIMAX:
5602 target = expand_builtin_unop (target_mode, exp, target,
5603 subtarget, ctz_optab);
5604 if (target)
5605 return target;
5606 break;
5607
5608 CASE_INT_FN (BUILT_IN_CLRSB):
5609 case BUILT_IN_CLRSBIMAX:
5610 target = expand_builtin_unop (target_mode, exp, target,
5611 subtarget, clrsb_optab);
5612 if (target)
5613 return target;
5614 break;
5615
5616 CASE_INT_FN (BUILT_IN_POPCOUNT):
5617 case BUILT_IN_POPCOUNTIMAX:
5618 target = expand_builtin_unop (target_mode, exp, target,
5619 subtarget, popcount_optab);
5620 if (target)
5621 return target;
5622 break;
5623
5624 CASE_INT_FN (BUILT_IN_PARITY):
5625 case BUILT_IN_PARITYIMAX:
5626 target = expand_builtin_unop (target_mode, exp, target,
5627 subtarget, parity_optab);
5628 if (target)
5629 return target;
5630 break;
5631
5632 case BUILT_IN_STRLEN:
5633 target = expand_builtin_strlen (exp, target, target_mode);
5634 if (target)
5635 return target;
5636 break;
5637
5638 case BUILT_IN_STRCPY:
5639 target = expand_builtin_strcpy (exp, target);
5640 if (target)
5641 return target;
5642 break;
5643
5644 case BUILT_IN_STRNCPY:
5645 target = expand_builtin_strncpy (exp, target);
5646 if (target)
5647 return target;
5648 break;
5649
5650 case BUILT_IN_STPCPY:
5651 target = expand_builtin_stpcpy (exp, target, mode);
5652 if (target)
5653 return target;
5654 break;
5655
5656 case BUILT_IN_MEMCPY:
5657 target = expand_builtin_memcpy (exp, target);
5658 if (target)
5659 return target;
5660 break;
5661
5662 case BUILT_IN_MEMPCPY:
5663 target = expand_builtin_mempcpy (exp, target, mode);
5664 if (target)
5665 return target;
5666 break;
5667
5668 case BUILT_IN_MEMSET:
5669 target = expand_builtin_memset (exp, target, mode);
5670 if (target)
5671 return target;
5672 break;
5673
5674 case BUILT_IN_BZERO:
5675 target = expand_builtin_bzero (exp);
5676 if (target)
5677 return target;
5678 break;
5679
5680 case BUILT_IN_STRCMP:
5681 target = expand_builtin_strcmp (exp, target);
5682 if (target)
5683 return target;
5684 break;
5685
5686 case BUILT_IN_STRNCMP:
5687 target = expand_builtin_strncmp (exp, target, mode);
5688 if (target)
5689 return target;
5690 break;
5691
5692 case BUILT_IN_BCMP:
5693 case BUILT_IN_MEMCMP:
5694 target = expand_builtin_memcmp (exp, target, mode);
5695 if (target)
5696 return target;
5697 break;
5698
5699 case BUILT_IN_SETJMP:
5700 /* This should have been lowered to the builtins below. */
5701 gcc_unreachable ();
5702
5703 case BUILT_IN_SETJMP_SETUP:
5704 /* __builtin_setjmp_setup is passed a pointer to an array of five words
5705 and the receiver label. */
5706 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5707 {
5708 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5709 VOIDmode, EXPAND_NORMAL);
5710 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
5711 rtx label_r = label_rtx (label);
5712
5713 /* This is copied from the handling of non-local gotos. */
5714 expand_builtin_setjmp_setup (buf_addr, label_r);
5715 nonlocal_goto_handler_labels
5716 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
5717 nonlocal_goto_handler_labels);
5718 /* ??? Do not let expand_label treat us as such since we would
5719 not want to be both on the list of non-local labels and on
5720 the list of forced labels. */
5721 FORCED_LABEL (label) = 0;
5722 return const0_rtx;
5723 }
5724 break;
5725
5726 case BUILT_IN_SETJMP_DISPATCHER:
5727 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
5728 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5729 {
5730 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
5731 rtx label_r = label_rtx (label);
5732
5733 /* Remove the dispatcher label from the list of non-local labels
5734 since the receiver labels have been added to it above. */
5735 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
5736 return const0_rtx;
5737 }
5738 break;
5739
5740 case BUILT_IN_SETJMP_RECEIVER:
5741 /* __builtin_setjmp_receiver is passed the receiver label. */
5742 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5743 {
5744 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
5745 rtx label_r = label_rtx (label);
5746
5747 expand_builtin_setjmp_receiver (label_r);
5748 return const0_rtx;
5749 }
5750 break;
5751
5752 /* __builtin_longjmp is passed a pointer to an array of five words.
5753 It's similar to the C library longjmp function but works with
5754 __builtin_setjmp above. */
5755 case BUILT_IN_LONGJMP:
5756 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5757 {
5758 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5759 VOIDmode, EXPAND_NORMAL);
5760 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
5761
5762 if (value != const1_rtx)
5763 {
5764 error ("%<__builtin_longjmp%> second argument must be 1");
5765 return const0_rtx;
5766 }
5767
5768 expand_builtin_longjmp (buf_addr, value);
5769 return const0_rtx;
5770 }
5771 break;
5772
5773 case BUILT_IN_NONLOCAL_GOTO:
5774 target = expand_builtin_nonlocal_goto (exp);
5775 if (target)
5776 return target;
5777 break;
5778
5779 /* This updates the setjmp buffer that is its argument with the value
5780 of the current stack pointer. */
5781 case BUILT_IN_UPDATE_SETJMP_BUF:
5782 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5783 {
5784 rtx buf_addr
5785 = expand_normal (CALL_EXPR_ARG (exp, 0));
5786
5787 expand_builtin_update_setjmp_buf (buf_addr);
5788 return const0_rtx;
5789 }
5790 break;
5791
5792 case BUILT_IN_TRAP:
5793 expand_builtin_trap ();
5794 return const0_rtx;
5795
5796 case BUILT_IN_UNREACHABLE:
5797 expand_builtin_unreachable ();
5798 return const0_rtx;
5799
5800 CASE_FLT_FN (BUILT_IN_SIGNBIT):
5801 case BUILT_IN_SIGNBITD32:
5802 case BUILT_IN_SIGNBITD64:
5803 case BUILT_IN_SIGNBITD128:
5804 target = expand_builtin_signbit (exp, target);
5805 if (target)
5806 return target;
5807 break;
5808
5809 /* Various hooks for the DWARF 2 __throw routine. */
5810 case BUILT_IN_UNWIND_INIT:
5811 expand_builtin_unwind_init ();
5812 return const0_rtx;
5813 case BUILT_IN_DWARF_CFA:
5814 return virtual_cfa_rtx;
5815 #ifdef DWARF2_UNWIND_INFO
5816 case BUILT_IN_DWARF_SP_COLUMN:
5817 return expand_builtin_dwarf_sp_column ();
5818 case BUILT_IN_INIT_DWARF_REG_SIZES:
5819 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
5820 return const0_rtx;
5821 #endif
5822 case BUILT_IN_FROB_RETURN_ADDR:
5823 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
5824 case BUILT_IN_EXTRACT_RETURN_ADDR:
5825 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
5826 case BUILT_IN_EH_RETURN:
5827 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
5828 CALL_EXPR_ARG (exp, 1));
5829 return const0_rtx;
5830 #ifdef EH_RETURN_DATA_REGNO
5831 case BUILT_IN_EH_RETURN_DATA_REGNO:
5832 return expand_builtin_eh_return_data_regno (exp);
5833 #endif
5834 case BUILT_IN_EXTEND_POINTER:
5835 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
5836 case BUILT_IN_EH_POINTER:
5837 return expand_builtin_eh_pointer (exp);
5838 case BUILT_IN_EH_FILTER:
5839 return expand_builtin_eh_filter (exp);
5840 case BUILT_IN_EH_COPY_VALUES:
5841 return expand_builtin_eh_copy_values (exp);
5842
5843 case BUILT_IN_VA_START:
5844 return expand_builtin_va_start (exp);
5845 case BUILT_IN_VA_END:
5846 return expand_builtin_va_end (exp);
5847 case BUILT_IN_VA_COPY:
5848 return expand_builtin_va_copy (exp);
5849 case BUILT_IN_EXPECT:
5850 return expand_builtin_expect (exp, target);
5851 case BUILT_IN_ASSUME_ALIGNED:
5852 return expand_builtin_assume_aligned (exp, target);
5853 case BUILT_IN_PREFETCH:
5854 expand_builtin_prefetch (exp);
5855 return const0_rtx;
5856
5857 case BUILT_IN_INIT_TRAMPOLINE:
5858 return expand_builtin_init_trampoline (exp);
5859 case BUILT_IN_ADJUST_TRAMPOLINE:
5860 return expand_builtin_adjust_trampoline (exp);
5861
5862 case BUILT_IN_FORK:
5863 case BUILT_IN_EXECL:
5864 case BUILT_IN_EXECV:
5865 case BUILT_IN_EXECLP:
5866 case BUILT_IN_EXECLE:
5867 case BUILT_IN_EXECVP:
5868 case BUILT_IN_EXECVE:
5869 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
5870 if (target)
5871 return target;
5872 break;
5873
5874 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
5875 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
5876 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
5877 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
5878 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
5879 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
5880 target = expand_builtin_sync_operation (mode, exp, PLUS,
5881 false, target, ignore);
5882 if (target)
5883 return target;
5884 break;
5885
5886 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
5887 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
5888 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
5889 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
5890 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
5891 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
5892 target = expand_builtin_sync_operation (mode, exp, MINUS,
5893 false, target, ignore);
5894 if (target)
5895 return target;
5896 break;
5897
5898 case BUILT_IN_SYNC_FETCH_AND_OR_1:
5899 case BUILT_IN_SYNC_FETCH_AND_OR_2:
5900 case BUILT_IN_SYNC_FETCH_AND_OR_4:
5901 case BUILT_IN_SYNC_FETCH_AND_OR_8:
5902 case BUILT_IN_SYNC_FETCH_AND_OR_16:
5903 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
5904 target = expand_builtin_sync_operation (mode, exp, IOR,
5905 false, target, ignore);
5906 if (target)
5907 return target;
5908 break;
5909
5910 case BUILT_IN_SYNC_FETCH_AND_AND_1:
5911 case BUILT_IN_SYNC_FETCH_AND_AND_2:
5912 case BUILT_IN_SYNC_FETCH_AND_AND_4:
5913 case BUILT_IN_SYNC_FETCH_AND_AND_8:
5914 case BUILT_IN_SYNC_FETCH_AND_AND_16:
5915 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
5916 target = expand_builtin_sync_operation (mode, exp, AND,
5917 false, target, ignore);
5918 if (target)
5919 return target;
5920 break;
5921
5922 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
5923 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
5924 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
5925 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
5926 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
5927 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
5928 target = expand_builtin_sync_operation (mode, exp, XOR,
5929 false, target, ignore);
5930 if (target)
5931 return target;
5932 break;
5933
5934 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5935 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5936 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5937 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5938 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5939 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
5940 target = expand_builtin_sync_operation (mode, exp, NOT,
5941 false, target, ignore);
5942 if (target)
5943 return target;
5944 break;
5945
5946 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
5947 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
5948 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
5949 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
5950 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
5951 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
5952 target = expand_builtin_sync_operation (mode, exp, PLUS,
5953 true, target, ignore);
5954 if (target)
5955 return target;
5956 break;
5957
5958 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
5959 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
5960 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
5961 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
5962 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
5963 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
5964 target = expand_builtin_sync_operation (mode, exp, MINUS,
5965 true, target, ignore);
5966 if (target)
5967 return target;
5968 break;
5969
5970 case BUILT_IN_SYNC_OR_AND_FETCH_1:
5971 case BUILT_IN_SYNC_OR_AND_FETCH_2:
5972 case BUILT_IN_SYNC_OR_AND_FETCH_4:
5973 case BUILT_IN_SYNC_OR_AND_FETCH_8:
5974 case BUILT_IN_SYNC_OR_AND_FETCH_16:
5975 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
5976 target = expand_builtin_sync_operation (mode, exp, IOR,
5977 true, target, ignore);
5978 if (target)
5979 return target;
5980 break;
5981
5982 case BUILT_IN_SYNC_AND_AND_FETCH_1:
5983 case BUILT_IN_SYNC_AND_AND_FETCH_2:
5984 case BUILT_IN_SYNC_AND_AND_FETCH_4:
5985 case BUILT_IN_SYNC_AND_AND_FETCH_8:
5986 case BUILT_IN_SYNC_AND_AND_FETCH_16:
5987 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
5988 target = expand_builtin_sync_operation (mode, exp, AND,
5989 true, target, ignore);
5990 if (target)
5991 return target;
5992 break;
5993
5994 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
5995 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
5996 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
5997 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
5998 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
5999 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6000 target = expand_builtin_sync_operation (mode, exp, XOR,
6001 true, target, ignore);
6002 if (target)
6003 return target;
6004 break;
6005
6006 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6007 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6008 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6009 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6010 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6011 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6012 target = expand_builtin_sync_operation (mode, exp, NOT,
6013 true, target, ignore);
6014 if (target)
6015 return target;
6016 break;
6017
6018 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6019 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6020 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6021 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6022 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6023 if (mode == VOIDmode)
6024 mode = TYPE_MODE (boolean_type_node);
6025 if (!target || !register_operand (target, mode))
6026 target = gen_reg_rtx (mode);
6027
6028 mode = get_builtin_sync_mode
6029 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6030 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6031 if (target)
6032 return target;
6033 break;
6034
6035 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6036 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6037 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6038 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6039 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6040 mode = get_builtin_sync_mode
6041 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6042 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6043 if (target)
6044 return target;
6045 break;
6046
6047 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6048 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6049 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6050 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6051 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6052 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6053 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6054 if (target)
6055 return target;
6056 break;
6057
6058 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6059 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6060 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6061 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6062 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6063 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6064 expand_builtin_sync_lock_release (mode, exp);
6065 return const0_rtx;
6066
6067 case BUILT_IN_SYNC_SYNCHRONIZE:
6068 expand_builtin_sync_synchronize ();
6069 return const0_rtx;
6070
6071 case BUILT_IN_OBJECT_SIZE:
6072 return expand_builtin_object_size (exp);
6073
6074 case BUILT_IN_MEMCPY_CHK:
6075 case BUILT_IN_MEMPCPY_CHK:
6076 case BUILT_IN_MEMMOVE_CHK:
6077 case BUILT_IN_MEMSET_CHK:
6078 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6079 if (target)
6080 return target;
6081 break;
6082
6083 case BUILT_IN_STRCPY_CHK:
6084 case BUILT_IN_STPCPY_CHK:
6085 case BUILT_IN_STRNCPY_CHK:
6086 case BUILT_IN_STRCAT_CHK:
6087 case BUILT_IN_STRNCAT_CHK:
6088 case BUILT_IN_SNPRINTF_CHK:
6089 case BUILT_IN_VSNPRINTF_CHK:
6090 maybe_emit_chk_warning (exp, fcode);
6091 break;
6092
6093 case BUILT_IN_SPRINTF_CHK:
6094 case BUILT_IN_VSPRINTF_CHK:
6095 maybe_emit_sprintf_chk_warning (exp, fcode);
6096 break;
6097
6098 case BUILT_IN_FREE:
6099 if (warn_free_nonheap_object)
6100 maybe_emit_free_warning (exp);
6101 break;
6102
6103 default: /* just do library call, if unknown builtin */
6104 break;
6105 }
6106
6107 /* The switch statement above can drop through to cause the function
6108 to be called normally. */
6109 return expand_call (exp, target, ignore);
6110 }
6111
6112 /* Determine whether a tree node represents a call to a built-in
6113 function. If the tree T is a call to a built-in function with
6114 the right number of arguments of the appropriate types, return
6115 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6116 Otherwise the return value is END_BUILTINS. */
6117
6118 enum built_in_function
6119 builtin_mathfn_code (const_tree t)
6120 {
6121 const_tree fndecl, arg, parmlist;
6122 const_tree argtype, parmtype;
6123 const_call_expr_arg_iterator iter;
6124
6125 if (TREE_CODE (t) != CALL_EXPR
6126 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6127 return END_BUILTINS;
6128
6129 fndecl = get_callee_fndecl (t);
6130 if (fndecl == NULL_TREE
6131 || TREE_CODE (fndecl) != FUNCTION_DECL
6132 || ! DECL_BUILT_IN (fndecl)
6133 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6134 return END_BUILTINS;
6135
6136 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6137 init_const_call_expr_arg_iterator (t, &iter);
6138 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6139 {
6140 /* If a function doesn't take a variable number of arguments,
6141 the last element in the list will have type `void'. */
6142 parmtype = TREE_VALUE (parmlist);
6143 if (VOID_TYPE_P (parmtype))
6144 {
6145 if (more_const_call_expr_args_p (&iter))
6146 return END_BUILTINS;
6147 return DECL_FUNCTION_CODE (fndecl);
6148 }
6149
6150 if (! more_const_call_expr_args_p (&iter))
6151 return END_BUILTINS;
6152
6153 arg = next_const_call_expr_arg (&iter);
6154 argtype = TREE_TYPE (arg);
6155
6156 if (SCALAR_FLOAT_TYPE_P (parmtype))
6157 {
6158 if (! SCALAR_FLOAT_TYPE_P (argtype))
6159 return END_BUILTINS;
6160 }
6161 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6162 {
6163 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6164 return END_BUILTINS;
6165 }
6166 else if (POINTER_TYPE_P (parmtype))
6167 {
6168 if (! POINTER_TYPE_P (argtype))
6169 return END_BUILTINS;
6170 }
6171 else if (INTEGRAL_TYPE_P (parmtype))
6172 {
6173 if (! INTEGRAL_TYPE_P (argtype))
6174 return END_BUILTINS;
6175 }
6176 else
6177 return END_BUILTINS;
6178 }
6179
6180 /* Variable-length argument list. */
6181 return DECL_FUNCTION_CODE (fndecl);
6182 }
6183
6184 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6185 evaluate to a constant. */
6186
6187 static tree
6188 fold_builtin_constant_p (tree arg)
6189 {
6190 /* We return 1 for a numeric type that's known to be a constant
6191 value at compile-time or for an aggregate type that's a
6192 literal constant. */
6193 STRIP_NOPS (arg);
6194
6195 /* If we know this is a constant, emit the constant of one. */
6196 if (CONSTANT_CLASS_P (arg)
6197 || (TREE_CODE (arg) == CONSTRUCTOR
6198 && TREE_CONSTANT (arg)))
6199 return integer_one_node;
6200 if (TREE_CODE (arg) == ADDR_EXPR)
6201 {
6202 tree op = TREE_OPERAND (arg, 0);
6203 if (TREE_CODE (op) == STRING_CST
6204 || (TREE_CODE (op) == ARRAY_REF
6205 && integer_zerop (TREE_OPERAND (op, 1))
6206 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6207 return integer_one_node;
6208 }
6209
6210 /* If this expression has side effects, show we don't know it to be a
6211 constant. Likewise if it's a pointer or aggregate type since in
6212 those case we only want literals, since those are only optimized
6213 when generating RTL, not later.
6214 And finally, if we are compiling an initializer, not code, we
6215 need to return a definite result now; there's not going to be any
6216 more optimization done. */
6217 if (TREE_SIDE_EFFECTS (arg)
6218 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6219 || POINTER_TYPE_P (TREE_TYPE (arg))
6220 || cfun == 0
6221 || folding_initializer)
6222 return integer_zero_node;
6223
6224 return NULL_TREE;
6225 }
6226
6227 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6228 return it as a truthvalue. */
6229
6230 static tree
6231 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6232 {
6233 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6234
6235 fn = built_in_decls[BUILT_IN_EXPECT];
6236 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6237 ret_type = TREE_TYPE (TREE_TYPE (fn));
6238 pred_type = TREE_VALUE (arg_types);
6239 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6240
6241 pred = fold_convert_loc (loc, pred_type, pred);
6242 expected = fold_convert_loc (loc, expected_type, expected);
6243 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6244
6245 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6246 build_int_cst (ret_type, 0));
6247 }
6248
6249 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6250 NULL_TREE if no simplification is possible. */
6251
6252 static tree
6253 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6254 {
6255 tree inner, fndecl, inner_arg0;
6256 enum tree_code code;
6257
6258 /* Distribute the expected value over short-circuiting operators.
6259 See through the cast from truthvalue_type_node to long. */
6260 inner_arg0 = arg0;
6261 while (TREE_CODE (inner_arg0) == NOP_EXPR
6262 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6263 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6264 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6265
6266 /* If this is a builtin_expect within a builtin_expect keep the
6267 inner one. See through a comparison against a constant. It
6268 might have been added to create a thruthvalue. */
6269 inner = inner_arg0;
6270
6271 if (COMPARISON_CLASS_P (inner)
6272 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6273 inner = TREE_OPERAND (inner, 0);
6274
6275 if (TREE_CODE (inner) == CALL_EXPR
6276 && (fndecl = get_callee_fndecl (inner))
6277 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6278 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6279 return arg0;
6280
6281 inner = inner_arg0;
6282 code = TREE_CODE (inner);
6283 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6284 {
6285 tree op0 = TREE_OPERAND (inner, 0);
6286 tree op1 = TREE_OPERAND (inner, 1);
6287
6288 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6289 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6290 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6291
6292 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6293 }
6294
6295 /* If the argument isn't invariant then there's nothing else we can do. */
6296 if (!TREE_CONSTANT (inner_arg0))
6297 return NULL_TREE;
6298
6299 /* If we expect that a comparison against the argument will fold to
6300 a constant return the constant. In practice, this means a true
6301 constant or the address of a non-weak symbol. */
6302 inner = inner_arg0;
6303 STRIP_NOPS (inner);
6304 if (TREE_CODE (inner) == ADDR_EXPR)
6305 {
6306 do
6307 {
6308 inner = TREE_OPERAND (inner, 0);
6309 }
6310 while (TREE_CODE (inner) == COMPONENT_REF
6311 || TREE_CODE (inner) == ARRAY_REF);
6312 if ((TREE_CODE (inner) == VAR_DECL
6313 || TREE_CODE (inner) == FUNCTION_DECL)
6314 && DECL_WEAK (inner))
6315 return NULL_TREE;
6316 }
6317
6318 /* Otherwise, ARG0 already has the proper type for the return value. */
6319 return arg0;
6320 }
6321
6322 /* Fold a call to __builtin_classify_type with argument ARG. */
6323
6324 static tree
6325 fold_builtin_classify_type (tree arg)
6326 {
6327 if (arg == 0)
6328 return build_int_cst (integer_type_node, no_type_class);
6329
6330 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
6331 }
6332
6333 /* Fold a call to __builtin_strlen with argument ARG. */
6334
6335 static tree
6336 fold_builtin_strlen (location_t loc, tree type, tree arg)
6337 {
6338 if (!validate_arg (arg, POINTER_TYPE))
6339 return NULL_TREE;
6340 else
6341 {
6342 tree len = c_strlen (arg, 0);
6343
6344 if (len)
6345 return fold_convert_loc (loc, type, len);
6346
6347 return NULL_TREE;
6348 }
6349 }
6350
6351 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6352
6353 static tree
6354 fold_builtin_inf (location_t loc, tree type, int warn)
6355 {
6356 REAL_VALUE_TYPE real;
6357
6358 /* __builtin_inff is intended to be usable to define INFINITY on all
6359 targets. If an infinity is not available, INFINITY expands "to a
6360 positive constant of type float that overflows at translation
6361 time", footnote "In this case, using INFINITY will violate the
6362 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6363 Thus we pedwarn to ensure this constraint violation is
6364 diagnosed. */
6365 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6366 pedwarn (loc, 0, "target format does not support infinity");
6367
6368 real_inf (&real);
6369 return build_real (type, real);
6370 }
6371
6372 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6373
6374 static tree
6375 fold_builtin_nan (tree arg, tree type, int quiet)
6376 {
6377 REAL_VALUE_TYPE real;
6378 const char *str;
6379
6380 if (!validate_arg (arg, POINTER_TYPE))
6381 return NULL_TREE;
6382 str = c_getstr (arg);
6383 if (!str)
6384 return NULL_TREE;
6385
6386 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6387 return NULL_TREE;
6388
6389 return build_real (type, real);
6390 }
6391
6392 /* Return true if the floating point expression T has an integer value.
6393 We also allow +Inf, -Inf and NaN to be considered integer values. */
6394
6395 static bool
6396 integer_valued_real_p (tree t)
6397 {
6398 switch (TREE_CODE (t))
6399 {
6400 case FLOAT_EXPR:
6401 return true;
6402
6403 case ABS_EXPR:
6404 case SAVE_EXPR:
6405 return integer_valued_real_p (TREE_OPERAND (t, 0));
6406
6407 case COMPOUND_EXPR:
6408 case MODIFY_EXPR:
6409 case BIND_EXPR:
6410 return integer_valued_real_p (TREE_OPERAND (t, 1));
6411
6412 case PLUS_EXPR:
6413 case MINUS_EXPR:
6414 case MULT_EXPR:
6415 case MIN_EXPR:
6416 case MAX_EXPR:
6417 return integer_valued_real_p (TREE_OPERAND (t, 0))
6418 && integer_valued_real_p (TREE_OPERAND (t, 1));
6419
6420 case COND_EXPR:
6421 return integer_valued_real_p (TREE_OPERAND (t, 1))
6422 && integer_valued_real_p (TREE_OPERAND (t, 2));
6423
6424 case REAL_CST:
6425 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6426
6427 case NOP_EXPR:
6428 {
6429 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6430 if (TREE_CODE (type) == INTEGER_TYPE)
6431 return true;
6432 if (TREE_CODE (type) == REAL_TYPE)
6433 return integer_valued_real_p (TREE_OPERAND (t, 0));
6434 break;
6435 }
6436
6437 case CALL_EXPR:
6438 switch (builtin_mathfn_code (t))
6439 {
6440 CASE_FLT_FN (BUILT_IN_CEIL):
6441 CASE_FLT_FN (BUILT_IN_FLOOR):
6442 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6443 CASE_FLT_FN (BUILT_IN_RINT):
6444 CASE_FLT_FN (BUILT_IN_ROUND):
6445 CASE_FLT_FN (BUILT_IN_TRUNC):
6446 return true;
6447
6448 CASE_FLT_FN (BUILT_IN_FMIN):
6449 CASE_FLT_FN (BUILT_IN_FMAX):
6450 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6451 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6452
6453 default:
6454 break;
6455 }
6456 break;
6457
6458 default:
6459 break;
6460 }
6461 return false;
6462 }
6463
6464 /* FNDECL is assumed to be a builtin where truncation can be propagated
6465 across (for instance floor((double)f) == (double)floorf (f).
6466 Do the transformation for a call with argument ARG. */
6467
6468 static tree
6469 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6470 {
6471 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6472
6473 if (!validate_arg (arg, REAL_TYPE))
6474 return NULL_TREE;
6475
6476 /* Integer rounding functions are idempotent. */
6477 if (fcode == builtin_mathfn_code (arg))
6478 return arg;
6479
6480 /* If argument is already integer valued, and we don't need to worry
6481 about setting errno, there's no need to perform rounding. */
6482 if (! flag_errno_math && integer_valued_real_p (arg))
6483 return arg;
6484
6485 if (optimize)
6486 {
6487 tree arg0 = strip_float_extensions (arg);
6488 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6489 tree newtype = TREE_TYPE (arg0);
6490 tree decl;
6491
6492 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6493 && (decl = mathfn_built_in (newtype, fcode)))
6494 return fold_convert_loc (loc, ftype,
6495 build_call_expr_loc (loc, decl, 1,
6496 fold_convert_loc (loc,
6497 newtype,
6498 arg0)));
6499 }
6500 return NULL_TREE;
6501 }
6502
6503 /* FNDECL is assumed to be builtin which can narrow the FP type of
6504 the argument, for instance lround((double)f) -> lroundf (f).
6505 Do the transformation for a call with argument ARG. */
6506
6507 static tree
6508 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6509 {
6510 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6511
6512 if (!validate_arg (arg, REAL_TYPE))
6513 return NULL_TREE;
6514
6515 /* If argument is already integer valued, and we don't need to worry
6516 about setting errno, there's no need to perform rounding. */
6517 if (! flag_errno_math && integer_valued_real_p (arg))
6518 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6519 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6520
6521 if (optimize)
6522 {
6523 tree ftype = TREE_TYPE (arg);
6524 tree arg0 = strip_float_extensions (arg);
6525 tree newtype = TREE_TYPE (arg0);
6526 tree decl;
6527
6528 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6529 && (decl = mathfn_built_in (newtype, fcode)))
6530 return build_call_expr_loc (loc, decl, 1,
6531 fold_convert_loc (loc, newtype, arg0));
6532 }
6533
6534 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
6535 sizeof (int) == sizeof (long). */
6536 if (TYPE_PRECISION (integer_type_node)
6537 == TYPE_PRECISION (long_integer_type_node))
6538 {
6539 tree newfn = NULL_TREE;
6540 switch (fcode)
6541 {
6542 CASE_FLT_FN (BUILT_IN_ICEIL):
6543 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6544 break;
6545
6546 CASE_FLT_FN (BUILT_IN_IFLOOR):
6547 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6548 break;
6549
6550 CASE_FLT_FN (BUILT_IN_IROUND):
6551 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6552 break;
6553
6554 CASE_FLT_FN (BUILT_IN_IRINT):
6555 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6556 break;
6557
6558 default:
6559 break;
6560 }
6561
6562 if (newfn)
6563 {
6564 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6565 return fold_convert_loc (loc,
6566 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6567 }
6568 }
6569
6570 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6571 sizeof (long long) == sizeof (long). */
6572 if (TYPE_PRECISION (long_long_integer_type_node)
6573 == TYPE_PRECISION (long_integer_type_node))
6574 {
6575 tree newfn = NULL_TREE;
6576 switch (fcode)
6577 {
6578 CASE_FLT_FN (BUILT_IN_LLCEIL):
6579 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6580 break;
6581
6582 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6583 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6584 break;
6585
6586 CASE_FLT_FN (BUILT_IN_LLROUND):
6587 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6588 break;
6589
6590 CASE_FLT_FN (BUILT_IN_LLRINT):
6591 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6592 break;
6593
6594 default:
6595 break;
6596 }
6597
6598 if (newfn)
6599 {
6600 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6601 return fold_convert_loc (loc,
6602 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6603 }
6604 }
6605
6606 return NULL_TREE;
6607 }
6608
6609 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6610 return type. Return NULL_TREE if no simplification can be made. */
6611
6612 static tree
6613 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6614 {
6615 tree res;
6616
6617 if (!validate_arg (arg, COMPLEX_TYPE)
6618 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6619 return NULL_TREE;
6620
6621 /* Calculate the result when the argument is a constant. */
6622 if (TREE_CODE (arg) == COMPLEX_CST
6623 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6624 type, mpfr_hypot)))
6625 return res;
6626
6627 if (TREE_CODE (arg) == COMPLEX_EXPR)
6628 {
6629 tree real = TREE_OPERAND (arg, 0);
6630 tree imag = TREE_OPERAND (arg, 1);
6631
6632 /* If either part is zero, cabs is fabs of the other. */
6633 if (real_zerop (real))
6634 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6635 if (real_zerop (imag))
6636 return fold_build1_loc (loc, ABS_EXPR, type, real);
6637
6638 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6639 if (flag_unsafe_math_optimizations
6640 && operand_equal_p (real, imag, OEP_PURE_SAME))
6641 {
6642 const REAL_VALUE_TYPE sqrt2_trunc
6643 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
6644 STRIP_NOPS (real);
6645 return fold_build2_loc (loc, MULT_EXPR, type,
6646 fold_build1_loc (loc, ABS_EXPR, type, real),
6647 build_real (type, sqrt2_trunc));
6648 }
6649 }
6650
6651 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
6652 if (TREE_CODE (arg) == NEGATE_EXPR
6653 || TREE_CODE (arg) == CONJ_EXPR)
6654 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
6655
6656 /* Don't do this when optimizing for size. */
6657 if (flag_unsafe_math_optimizations
6658 && optimize && optimize_function_for_speed_p (cfun))
6659 {
6660 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6661
6662 if (sqrtfn != NULL_TREE)
6663 {
6664 tree rpart, ipart, result;
6665
6666 arg = builtin_save_expr (arg);
6667
6668 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
6669 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
6670
6671 rpart = builtin_save_expr (rpart);
6672 ipart = builtin_save_expr (ipart);
6673
6674 result = fold_build2_loc (loc, PLUS_EXPR, type,
6675 fold_build2_loc (loc, MULT_EXPR, type,
6676 rpart, rpart),
6677 fold_build2_loc (loc, MULT_EXPR, type,
6678 ipart, ipart));
6679
6680 return build_call_expr_loc (loc, sqrtfn, 1, result);
6681 }
6682 }
6683
6684 return NULL_TREE;
6685 }
6686
6687 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
6688 complex tree type of the result. If NEG is true, the imaginary
6689 zero is negative. */
6690
6691 static tree
6692 build_complex_cproj (tree type, bool neg)
6693 {
6694 REAL_VALUE_TYPE rinf, rzero = dconst0;
6695
6696 real_inf (&rinf);
6697 rzero.sign = neg;
6698 return build_complex (type, build_real (TREE_TYPE (type), rinf),
6699 build_real (TREE_TYPE (type), rzero));
6700 }
6701
6702 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
6703 return type. Return NULL_TREE if no simplification can be made. */
6704
6705 static tree
6706 fold_builtin_cproj (location_t loc, tree arg, tree type)
6707 {
6708 if (!validate_arg (arg, COMPLEX_TYPE)
6709 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6710 return NULL_TREE;
6711
6712 /* If there are no infinities, return arg. */
6713 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
6714 return non_lvalue_loc (loc, arg);
6715
6716 /* Calculate the result when the argument is a constant. */
6717 if (TREE_CODE (arg) == COMPLEX_CST)
6718 {
6719 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
6720 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
6721
6722 if (real_isinf (real) || real_isinf (imag))
6723 return build_complex_cproj (type, imag->sign);
6724 else
6725 return arg;
6726 }
6727 else if (TREE_CODE (arg) == COMPLEX_EXPR)
6728 {
6729 tree real = TREE_OPERAND (arg, 0);
6730 tree imag = TREE_OPERAND (arg, 1);
6731
6732 STRIP_NOPS (real);
6733 STRIP_NOPS (imag);
6734
6735 /* If the real part is inf and the imag part is known to be
6736 nonnegative, return (inf + 0i). Remember side-effects are
6737 possible in the imag part. */
6738 if (TREE_CODE (real) == REAL_CST
6739 && real_isinf (TREE_REAL_CST_PTR (real))
6740 && tree_expr_nonnegative_p (imag))
6741 return omit_one_operand_loc (loc, type,
6742 build_complex_cproj (type, false),
6743 arg);
6744
6745 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
6746 Remember side-effects are possible in the real part. */
6747 if (TREE_CODE (imag) == REAL_CST
6748 && real_isinf (TREE_REAL_CST_PTR (imag)))
6749 return
6750 omit_one_operand_loc (loc, type,
6751 build_complex_cproj (type, TREE_REAL_CST_PTR
6752 (imag)->sign), arg);
6753 }
6754
6755 return NULL_TREE;
6756 }
6757
6758 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
6759 Return NULL_TREE if no simplification can be made. */
6760
6761 static tree
6762 fold_builtin_sqrt (location_t loc, tree arg, tree type)
6763 {
6764
6765 enum built_in_function fcode;
6766 tree res;
6767
6768 if (!validate_arg (arg, REAL_TYPE))
6769 return NULL_TREE;
6770
6771 /* Calculate the result when the argument is a constant. */
6772 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
6773 return res;
6774
6775 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6776 fcode = builtin_mathfn_code (arg);
6777 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6778 {
6779 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6780 arg = fold_build2_loc (loc, MULT_EXPR, type,
6781 CALL_EXPR_ARG (arg, 0),
6782 build_real (type, dconsthalf));
6783 return build_call_expr_loc (loc, expfn, 1, arg);
6784 }
6785
6786 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6787 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6788 {
6789 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6790
6791 if (powfn)
6792 {
6793 tree arg0 = CALL_EXPR_ARG (arg, 0);
6794 tree tree_root;
6795 /* The inner root was either sqrt or cbrt. */
6796 /* This was a conditional expression but it triggered a bug
6797 in Sun C 5.5. */
6798 REAL_VALUE_TYPE dconstroot;
6799 if (BUILTIN_SQRT_P (fcode))
6800 dconstroot = dconsthalf;
6801 else
6802 dconstroot = dconst_third ();
6803
6804 /* Adjust for the outer root. */
6805 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6806 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6807 tree_root = build_real (type, dconstroot);
6808 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6809 }
6810 }
6811
6812 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
6813 if (flag_unsafe_math_optimizations
6814 && (fcode == BUILT_IN_POW
6815 || fcode == BUILT_IN_POWF
6816 || fcode == BUILT_IN_POWL))
6817 {
6818 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6819 tree arg0 = CALL_EXPR_ARG (arg, 0);
6820 tree arg1 = CALL_EXPR_ARG (arg, 1);
6821 tree narg1;
6822 if (!tree_expr_nonnegative_p (arg0))
6823 arg0 = build1 (ABS_EXPR, type, arg0);
6824 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
6825 build_real (type, dconsthalf));
6826 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
6827 }
6828
6829 return NULL_TREE;
6830 }
6831
6832 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
6833 Return NULL_TREE if no simplification can be made. */
6834
6835 static tree
6836 fold_builtin_cbrt (location_t loc, tree arg, tree type)
6837 {
6838 const enum built_in_function fcode = builtin_mathfn_code (arg);
6839 tree res;
6840
6841 if (!validate_arg (arg, REAL_TYPE))
6842 return NULL_TREE;
6843
6844 /* Calculate the result when the argument is a constant. */
6845 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
6846 return res;
6847
6848 if (flag_unsafe_math_optimizations)
6849 {
6850 /* Optimize cbrt(expN(x)) -> expN(x/3). */
6851 if (BUILTIN_EXPONENT_P (fcode))
6852 {
6853 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6854 const REAL_VALUE_TYPE third_trunc =
6855 real_value_truncate (TYPE_MODE (type), dconst_third ());
6856 arg = fold_build2_loc (loc, MULT_EXPR, type,
6857 CALL_EXPR_ARG (arg, 0),
6858 build_real (type, third_trunc));
6859 return build_call_expr_loc (loc, expfn, 1, arg);
6860 }
6861
6862 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
6863 if (BUILTIN_SQRT_P (fcode))
6864 {
6865 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6866
6867 if (powfn)
6868 {
6869 tree arg0 = CALL_EXPR_ARG (arg, 0);
6870 tree tree_root;
6871 REAL_VALUE_TYPE dconstroot = dconst_third ();
6872
6873 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6874 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6875 tree_root = build_real (type, dconstroot);
6876 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6877 }
6878 }
6879
6880 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
6881 if (BUILTIN_CBRT_P (fcode))
6882 {
6883 tree arg0 = CALL_EXPR_ARG (arg, 0);
6884 if (tree_expr_nonnegative_p (arg0))
6885 {
6886 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6887
6888 if (powfn)
6889 {
6890 tree tree_root;
6891 REAL_VALUE_TYPE dconstroot;
6892
6893 real_arithmetic (&dconstroot, MULT_EXPR,
6894 dconst_third_ptr (), dconst_third_ptr ());
6895 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6896 tree_root = build_real (type, dconstroot);
6897 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6898 }
6899 }
6900 }
6901
6902 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
6903 if (fcode == BUILT_IN_POW
6904 || fcode == BUILT_IN_POWF
6905 || fcode == BUILT_IN_POWL)
6906 {
6907 tree arg00 = CALL_EXPR_ARG (arg, 0);
6908 tree arg01 = CALL_EXPR_ARG (arg, 1);
6909 if (tree_expr_nonnegative_p (arg00))
6910 {
6911 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6912 const REAL_VALUE_TYPE dconstroot
6913 = real_value_truncate (TYPE_MODE (type), dconst_third ());
6914 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
6915 build_real (type, dconstroot));
6916 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
6917 }
6918 }
6919 }
6920 return NULL_TREE;
6921 }
6922
6923 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
6924 TYPE is the type of the return value. Return NULL_TREE if no
6925 simplification can be made. */
6926
6927 static tree
6928 fold_builtin_cos (location_t loc,
6929 tree arg, tree type, tree fndecl)
6930 {
6931 tree res, narg;
6932
6933 if (!validate_arg (arg, REAL_TYPE))
6934 return NULL_TREE;
6935
6936 /* Calculate the result when the argument is a constant. */
6937 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
6938 return res;
6939
6940 /* Optimize cos(-x) into cos (x). */
6941 if ((narg = fold_strip_sign_ops (arg)))
6942 return build_call_expr_loc (loc, fndecl, 1, narg);
6943
6944 return NULL_TREE;
6945 }
6946
6947 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
6948 Return NULL_TREE if no simplification can be made. */
6949
6950 static tree
6951 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
6952 {
6953 if (validate_arg (arg, REAL_TYPE))
6954 {
6955 tree res, narg;
6956
6957 /* Calculate the result when the argument is a constant. */
6958 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
6959 return res;
6960
6961 /* Optimize cosh(-x) into cosh (x). */
6962 if ((narg = fold_strip_sign_ops (arg)))
6963 return build_call_expr_loc (loc, fndecl, 1, narg);
6964 }
6965
6966 return NULL_TREE;
6967 }
6968
6969 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
6970 argument ARG. TYPE is the type of the return value. Return
6971 NULL_TREE if no simplification can be made. */
6972
6973 static tree
6974 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
6975 bool hyper)
6976 {
6977 if (validate_arg (arg, COMPLEX_TYPE)
6978 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
6979 {
6980 tree tmp;
6981
6982 /* Calculate the result when the argument is a constant. */
6983 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
6984 return tmp;
6985
6986 /* Optimize fn(-x) into fn(x). */
6987 if ((tmp = fold_strip_sign_ops (arg)))
6988 return build_call_expr_loc (loc, fndecl, 1, tmp);
6989 }
6990
6991 return NULL_TREE;
6992 }
6993
6994 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
6995 Return NULL_TREE if no simplification can be made. */
6996
6997 static tree
6998 fold_builtin_tan (tree arg, tree type)
6999 {
7000 enum built_in_function fcode;
7001 tree res;
7002
7003 if (!validate_arg (arg, REAL_TYPE))
7004 return NULL_TREE;
7005
7006 /* Calculate the result when the argument is a constant. */
7007 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7008 return res;
7009
7010 /* Optimize tan(atan(x)) = x. */
7011 fcode = builtin_mathfn_code (arg);
7012 if (flag_unsafe_math_optimizations
7013 && (fcode == BUILT_IN_ATAN
7014 || fcode == BUILT_IN_ATANF
7015 || fcode == BUILT_IN_ATANL))
7016 return CALL_EXPR_ARG (arg, 0);
7017
7018 return NULL_TREE;
7019 }
7020
7021 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7022 NULL_TREE if no simplification can be made. */
7023
7024 static tree
7025 fold_builtin_sincos (location_t loc,
7026 tree arg0, tree arg1, tree arg2)
7027 {
7028 tree type;
7029 tree res, fn, call;
7030
7031 if (!validate_arg (arg0, REAL_TYPE)
7032 || !validate_arg (arg1, POINTER_TYPE)
7033 || !validate_arg (arg2, POINTER_TYPE))
7034 return NULL_TREE;
7035
7036 type = TREE_TYPE (arg0);
7037
7038 /* Calculate the result when the argument is a constant. */
7039 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7040 return res;
7041
7042 /* Canonicalize sincos to cexpi. */
7043 if (!TARGET_C99_FUNCTIONS)
7044 return NULL_TREE;
7045 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7046 if (!fn)
7047 return NULL_TREE;
7048
7049 call = build_call_expr_loc (loc, fn, 1, arg0);
7050 call = builtin_save_expr (call);
7051
7052 return build2 (COMPOUND_EXPR, void_type_node,
7053 build2 (MODIFY_EXPR, void_type_node,
7054 build_fold_indirect_ref_loc (loc, arg1),
7055 build1 (IMAGPART_EXPR, type, call)),
7056 build2 (MODIFY_EXPR, void_type_node,
7057 build_fold_indirect_ref_loc (loc, arg2),
7058 build1 (REALPART_EXPR, type, call)));
7059 }
7060
7061 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7062 NULL_TREE if no simplification can be made. */
7063
7064 static tree
7065 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7066 {
7067 tree rtype;
7068 tree realp, imagp, ifn;
7069 tree res;
7070
7071 if (!validate_arg (arg0, COMPLEX_TYPE)
7072 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7073 return NULL_TREE;
7074
7075 /* Calculate the result when the argument is a constant. */
7076 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7077 return res;
7078
7079 rtype = TREE_TYPE (TREE_TYPE (arg0));
7080
7081 /* In case we can figure out the real part of arg0 and it is constant zero
7082 fold to cexpi. */
7083 if (!TARGET_C99_FUNCTIONS)
7084 return NULL_TREE;
7085 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7086 if (!ifn)
7087 return NULL_TREE;
7088
7089 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7090 && real_zerop (realp))
7091 {
7092 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7093 return build_call_expr_loc (loc, ifn, 1, narg);
7094 }
7095
7096 /* In case we can easily decompose real and imaginary parts split cexp
7097 to exp (r) * cexpi (i). */
7098 if (flag_unsafe_math_optimizations
7099 && realp)
7100 {
7101 tree rfn, rcall, icall;
7102
7103 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7104 if (!rfn)
7105 return NULL_TREE;
7106
7107 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7108 if (!imagp)
7109 return NULL_TREE;
7110
7111 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7112 icall = builtin_save_expr (icall);
7113 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7114 rcall = builtin_save_expr (rcall);
7115 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7116 fold_build2_loc (loc, MULT_EXPR, rtype,
7117 rcall,
7118 fold_build1_loc (loc, REALPART_EXPR,
7119 rtype, icall)),
7120 fold_build2_loc (loc, MULT_EXPR, rtype,
7121 rcall,
7122 fold_build1_loc (loc, IMAGPART_EXPR,
7123 rtype, icall)));
7124 }
7125
7126 return NULL_TREE;
7127 }
7128
7129 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7130 Return NULL_TREE if no simplification can be made. */
7131
7132 static tree
7133 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7134 {
7135 if (!validate_arg (arg, REAL_TYPE))
7136 return NULL_TREE;
7137
7138 /* Optimize trunc of constant value. */
7139 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7140 {
7141 REAL_VALUE_TYPE r, x;
7142 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7143
7144 x = TREE_REAL_CST (arg);
7145 real_trunc (&r, TYPE_MODE (type), &x);
7146 return build_real (type, r);
7147 }
7148
7149 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7150 }
7151
7152 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7153 Return NULL_TREE if no simplification can be made. */
7154
7155 static tree
7156 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7157 {
7158 if (!validate_arg (arg, REAL_TYPE))
7159 return NULL_TREE;
7160
7161 /* Optimize floor of constant value. */
7162 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7163 {
7164 REAL_VALUE_TYPE x;
7165
7166 x = TREE_REAL_CST (arg);
7167 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7168 {
7169 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7170 REAL_VALUE_TYPE r;
7171
7172 real_floor (&r, TYPE_MODE (type), &x);
7173 return build_real (type, r);
7174 }
7175 }
7176
7177 /* Fold floor (x) where x is nonnegative to trunc (x). */
7178 if (tree_expr_nonnegative_p (arg))
7179 {
7180 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7181 if (truncfn)
7182 return build_call_expr_loc (loc, truncfn, 1, arg);
7183 }
7184
7185 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7186 }
7187
7188 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7189 Return NULL_TREE if no simplification can be made. */
7190
7191 static tree
7192 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7193 {
7194 if (!validate_arg (arg, REAL_TYPE))
7195 return NULL_TREE;
7196
7197 /* Optimize ceil of constant value. */
7198 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7199 {
7200 REAL_VALUE_TYPE x;
7201
7202 x = TREE_REAL_CST (arg);
7203 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7204 {
7205 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7206 REAL_VALUE_TYPE r;
7207
7208 real_ceil (&r, TYPE_MODE (type), &x);
7209 return build_real (type, r);
7210 }
7211 }
7212
7213 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7214 }
7215
7216 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7217 Return NULL_TREE if no simplification can be made. */
7218
7219 static tree
7220 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7221 {
7222 if (!validate_arg (arg, REAL_TYPE))
7223 return NULL_TREE;
7224
7225 /* Optimize round of constant value. */
7226 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7227 {
7228 REAL_VALUE_TYPE x;
7229
7230 x = TREE_REAL_CST (arg);
7231 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7232 {
7233 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7234 REAL_VALUE_TYPE r;
7235
7236 real_round (&r, TYPE_MODE (type), &x);
7237 return build_real (type, r);
7238 }
7239 }
7240
7241 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7242 }
7243
7244 /* Fold function call to builtin lround, lroundf or lroundl (or the
7245 corresponding long long versions) and other rounding functions. ARG
7246 is the argument to the call. Return NULL_TREE if no simplification
7247 can be made. */
7248
7249 static tree
7250 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7251 {
7252 if (!validate_arg (arg, REAL_TYPE))
7253 return NULL_TREE;
7254
7255 /* Optimize lround of constant value. */
7256 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7257 {
7258 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7259
7260 if (real_isfinite (&x))
7261 {
7262 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7263 tree ftype = TREE_TYPE (arg);
7264 double_int val;
7265 REAL_VALUE_TYPE r;
7266
7267 switch (DECL_FUNCTION_CODE (fndecl))
7268 {
7269 CASE_FLT_FN (BUILT_IN_IFLOOR):
7270 CASE_FLT_FN (BUILT_IN_LFLOOR):
7271 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7272 real_floor (&r, TYPE_MODE (ftype), &x);
7273 break;
7274
7275 CASE_FLT_FN (BUILT_IN_ICEIL):
7276 CASE_FLT_FN (BUILT_IN_LCEIL):
7277 CASE_FLT_FN (BUILT_IN_LLCEIL):
7278 real_ceil (&r, TYPE_MODE (ftype), &x);
7279 break;
7280
7281 CASE_FLT_FN (BUILT_IN_IROUND):
7282 CASE_FLT_FN (BUILT_IN_LROUND):
7283 CASE_FLT_FN (BUILT_IN_LLROUND):
7284 real_round (&r, TYPE_MODE (ftype), &x);
7285 break;
7286
7287 default:
7288 gcc_unreachable ();
7289 }
7290
7291 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7292 if (double_int_fits_to_tree_p (itype, val))
7293 return double_int_to_tree (itype, val);
7294 }
7295 }
7296
7297 switch (DECL_FUNCTION_CODE (fndecl))
7298 {
7299 CASE_FLT_FN (BUILT_IN_LFLOOR):
7300 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7301 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7302 if (tree_expr_nonnegative_p (arg))
7303 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7304 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7305 break;
7306 default:;
7307 }
7308
7309 return fold_fixed_mathfn (loc, fndecl, arg);
7310 }
7311
7312 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7313 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7314 the argument to the call. Return NULL_TREE if no simplification can
7315 be made. */
7316
7317 static tree
7318 fold_builtin_bitop (tree fndecl, tree arg)
7319 {
7320 if (!validate_arg (arg, INTEGER_TYPE))
7321 return NULL_TREE;
7322
7323 /* Optimize for constant argument. */
7324 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7325 {
7326 HOST_WIDE_INT hi, width, result;
7327 unsigned HOST_WIDE_INT lo;
7328 tree type;
7329
7330 type = TREE_TYPE (arg);
7331 width = TYPE_PRECISION (type);
7332 lo = TREE_INT_CST_LOW (arg);
7333
7334 /* Clear all the bits that are beyond the type's precision. */
7335 if (width > HOST_BITS_PER_WIDE_INT)
7336 {
7337 hi = TREE_INT_CST_HIGH (arg);
7338 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7339 hi &= ~((unsigned HOST_WIDE_INT) (-1)
7340 << (width - HOST_BITS_PER_WIDE_INT));
7341 }
7342 else
7343 {
7344 hi = 0;
7345 if (width < HOST_BITS_PER_WIDE_INT)
7346 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7347 }
7348
7349 switch (DECL_FUNCTION_CODE (fndecl))
7350 {
7351 CASE_INT_FN (BUILT_IN_FFS):
7352 if (lo != 0)
7353 result = ffs_hwi (lo);
7354 else if (hi != 0)
7355 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
7356 else
7357 result = 0;
7358 break;
7359
7360 CASE_INT_FN (BUILT_IN_CLZ):
7361 if (hi != 0)
7362 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7363 else if (lo != 0)
7364 result = width - floor_log2 (lo) - 1;
7365 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7366 result = width;
7367 break;
7368
7369 CASE_INT_FN (BUILT_IN_CTZ):
7370 if (lo != 0)
7371 result = ctz_hwi (lo);
7372 else if (hi != 0)
7373 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
7374 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7375 result = width;
7376 break;
7377
7378 CASE_INT_FN (BUILT_IN_CLRSB):
7379 if (width > HOST_BITS_PER_WIDE_INT
7380 && (hi & ((unsigned HOST_WIDE_INT) 1
7381 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
7382 {
7383 hi = ~hi & ~((unsigned HOST_WIDE_INT) (-1)
7384 << (width - HOST_BITS_PER_WIDE_INT - 1));
7385 lo = ~lo;
7386 }
7387 else if (width <= HOST_BITS_PER_WIDE_INT
7388 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
7389 lo = ~lo & ~((unsigned HOST_WIDE_INT) (-1) << (width - 1));
7390 if (hi != 0)
7391 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
7392 else if (lo != 0)
7393 result = width - floor_log2 (lo) - 2;
7394 else
7395 result = width - 1;
7396 break;
7397
7398 CASE_INT_FN (BUILT_IN_POPCOUNT):
7399 result = 0;
7400 while (lo)
7401 result++, lo &= lo - 1;
7402 while (hi)
7403 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7404 break;
7405
7406 CASE_INT_FN (BUILT_IN_PARITY):
7407 result = 0;
7408 while (lo)
7409 result++, lo &= lo - 1;
7410 while (hi)
7411 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7412 result &= 1;
7413 break;
7414
7415 default:
7416 gcc_unreachable ();
7417 }
7418
7419 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7420 }
7421
7422 return NULL_TREE;
7423 }
7424
7425 /* Fold function call to builtin_bswap and the long and long long
7426 variants. Return NULL_TREE if no simplification can be made. */
7427 static tree
7428 fold_builtin_bswap (tree fndecl, tree arg)
7429 {
7430 if (! validate_arg (arg, INTEGER_TYPE))
7431 return NULL_TREE;
7432
7433 /* Optimize constant value. */
7434 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7435 {
7436 HOST_WIDE_INT hi, width, r_hi = 0;
7437 unsigned HOST_WIDE_INT lo, r_lo = 0;
7438 tree type;
7439
7440 type = TREE_TYPE (arg);
7441 width = TYPE_PRECISION (type);
7442 lo = TREE_INT_CST_LOW (arg);
7443 hi = TREE_INT_CST_HIGH (arg);
7444
7445 switch (DECL_FUNCTION_CODE (fndecl))
7446 {
7447 case BUILT_IN_BSWAP32:
7448 case BUILT_IN_BSWAP64:
7449 {
7450 int s;
7451
7452 for (s = 0; s < width; s += 8)
7453 {
7454 int d = width - s - 8;
7455 unsigned HOST_WIDE_INT byte;
7456
7457 if (s < HOST_BITS_PER_WIDE_INT)
7458 byte = (lo >> s) & 0xff;
7459 else
7460 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7461
7462 if (d < HOST_BITS_PER_WIDE_INT)
7463 r_lo |= byte << d;
7464 else
7465 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7466 }
7467 }
7468
7469 break;
7470
7471 default:
7472 gcc_unreachable ();
7473 }
7474
7475 if (width < HOST_BITS_PER_WIDE_INT)
7476 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7477 else
7478 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7479 }
7480
7481 return NULL_TREE;
7482 }
7483
7484 /* A subroutine of fold_builtin to fold the various logarithmic
7485 functions. Return NULL_TREE if no simplification can me made.
7486 FUNC is the corresponding MPFR logarithm function. */
7487
7488 static tree
7489 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7490 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7491 {
7492 if (validate_arg (arg, REAL_TYPE))
7493 {
7494 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7495 tree res;
7496 const enum built_in_function fcode = builtin_mathfn_code (arg);
7497
7498 /* Calculate the result when the argument is a constant. */
7499 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7500 return res;
7501
7502 /* Special case, optimize logN(expN(x)) = x. */
7503 if (flag_unsafe_math_optimizations
7504 && ((func == mpfr_log
7505 && (fcode == BUILT_IN_EXP
7506 || fcode == BUILT_IN_EXPF
7507 || fcode == BUILT_IN_EXPL))
7508 || (func == mpfr_log2
7509 && (fcode == BUILT_IN_EXP2
7510 || fcode == BUILT_IN_EXP2F
7511 || fcode == BUILT_IN_EXP2L))
7512 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7513 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7514
7515 /* Optimize logN(func()) for various exponential functions. We
7516 want to determine the value "x" and the power "exponent" in
7517 order to transform logN(x**exponent) into exponent*logN(x). */
7518 if (flag_unsafe_math_optimizations)
7519 {
7520 tree exponent = 0, x = 0;
7521
7522 switch (fcode)
7523 {
7524 CASE_FLT_FN (BUILT_IN_EXP):
7525 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7526 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7527 dconst_e ()));
7528 exponent = CALL_EXPR_ARG (arg, 0);
7529 break;
7530 CASE_FLT_FN (BUILT_IN_EXP2):
7531 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7532 x = build_real (type, dconst2);
7533 exponent = CALL_EXPR_ARG (arg, 0);
7534 break;
7535 CASE_FLT_FN (BUILT_IN_EXP10):
7536 CASE_FLT_FN (BUILT_IN_POW10):
7537 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7538 {
7539 REAL_VALUE_TYPE dconst10;
7540 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7541 x = build_real (type, dconst10);
7542 }
7543 exponent = CALL_EXPR_ARG (arg, 0);
7544 break;
7545 CASE_FLT_FN (BUILT_IN_SQRT):
7546 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7547 x = CALL_EXPR_ARG (arg, 0);
7548 exponent = build_real (type, dconsthalf);
7549 break;
7550 CASE_FLT_FN (BUILT_IN_CBRT):
7551 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7552 x = CALL_EXPR_ARG (arg, 0);
7553 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7554 dconst_third ()));
7555 break;
7556 CASE_FLT_FN (BUILT_IN_POW):
7557 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7558 x = CALL_EXPR_ARG (arg, 0);
7559 exponent = CALL_EXPR_ARG (arg, 1);
7560 break;
7561 default:
7562 break;
7563 }
7564
7565 /* Now perform the optimization. */
7566 if (x && exponent)
7567 {
7568 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7569 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7570 }
7571 }
7572 }
7573
7574 return NULL_TREE;
7575 }
7576
7577 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7578 NULL_TREE if no simplification can be made. */
7579
7580 static tree
7581 fold_builtin_hypot (location_t loc, tree fndecl,
7582 tree arg0, tree arg1, tree type)
7583 {
7584 tree res, narg0, narg1;
7585
7586 if (!validate_arg (arg0, REAL_TYPE)
7587 || !validate_arg (arg1, REAL_TYPE))
7588 return NULL_TREE;
7589
7590 /* Calculate the result when the argument is a constant. */
7591 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7592 return res;
7593
7594 /* If either argument to hypot has a negate or abs, strip that off.
7595 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7596 narg0 = fold_strip_sign_ops (arg0);
7597 narg1 = fold_strip_sign_ops (arg1);
7598 if (narg0 || narg1)
7599 {
7600 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7601 narg1 ? narg1 : arg1);
7602 }
7603
7604 /* If either argument is zero, hypot is fabs of the other. */
7605 if (real_zerop (arg0))
7606 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7607 else if (real_zerop (arg1))
7608 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7609
7610 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7611 if (flag_unsafe_math_optimizations
7612 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7613 {
7614 const REAL_VALUE_TYPE sqrt2_trunc
7615 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7616 return fold_build2_loc (loc, MULT_EXPR, type,
7617 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7618 build_real (type, sqrt2_trunc));
7619 }
7620
7621 return NULL_TREE;
7622 }
7623
7624
7625 /* Fold a builtin function call to pow, powf, or powl. Return
7626 NULL_TREE if no simplification can be made. */
7627 static tree
7628 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7629 {
7630 tree res;
7631
7632 if (!validate_arg (arg0, REAL_TYPE)
7633 || !validate_arg (arg1, REAL_TYPE))
7634 return NULL_TREE;
7635
7636 /* Calculate the result when the argument is a constant. */
7637 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7638 return res;
7639
7640 /* Optimize pow(1.0,y) = 1.0. */
7641 if (real_onep (arg0))
7642 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7643
7644 if (TREE_CODE (arg1) == REAL_CST
7645 && !TREE_OVERFLOW (arg1))
7646 {
7647 REAL_VALUE_TYPE cint;
7648 REAL_VALUE_TYPE c;
7649 HOST_WIDE_INT n;
7650
7651 c = TREE_REAL_CST (arg1);
7652
7653 /* Optimize pow(x,0.0) = 1.0. */
7654 if (REAL_VALUES_EQUAL (c, dconst0))
7655 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7656 arg0);
7657
7658 /* Optimize pow(x,1.0) = x. */
7659 if (REAL_VALUES_EQUAL (c, dconst1))
7660 return arg0;
7661
7662 /* Optimize pow(x,-1.0) = 1.0/x. */
7663 if (REAL_VALUES_EQUAL (c, dconstm1))
7664 return fold_build2_loc (loc, RDIV_EXPR, type,
7665 build_real (type, dconst1), arg0);
7666
7667 /* Optimize pow(x,0.5) = sqrt(x). */
7668 if (flag_unsafe_math_optimizations
7669 && REAL_VALUES_EQUAL (c, dconsthalf))
7670 {
7671 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7672
7673 if (sqrtfn != NULL_TREE)
7674 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7675 }
7676
7677 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7678 if (flag_unsafe_math_optimizations)
7679 {
7680 const REAL_VALUE_TYPE dconstroot
7681 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7682
7683 if (REAL_VALUES_EQUAL (c, dconstroot))
7684 {
7685 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7686 if (cbrtfn != NULL_TREE)
7687 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7688 }
7689 }
7690
7691 /* Check for an integer exponent. */
7692 n = real_to_integer (&c);
7693 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7694 if (real_identical (&c, &cint))
7695 {
7696 /* Attempt to evaluate pow at compile-time, unless this should
7697 raise an exception. */
7698 if (TREE_CODE (arg0) == REAL_CST
7699 && !TREE_OVERFLOW (arg0)
7700 && (n > 0
7701 || (!flag_trapping_math && !flag_errno_math)
7702 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
7703 {
7704 REAL_VALUE_TYPE x;
7705 bool inexact;
7706
7707 x = TREE_REAL_CST (arg0);
7708 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7709 if (flag_unsafe_math_optimizations || !inexact)
7710 return build_real (type, x);
7711 }
7712
7713 /* Strip sign ops from even integer powers. */
7714 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7715 {
7716 tree narg0 = fold_strip_sign_ops (arg0);
7717 if (narg0)
7718 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
7719 }
7720 }
7721 }
7722
7723 if (flag_unsafe_math_optimizations)
7724 {
7725 const enum built_in_function fcode = builtin_mathfn_code (arg0);
7726
7727 /* Optimize pow(expN(x),y) = expN(x*y). */
7728 if (BUILTIN_EXPONENT_P (fcode))
7729 {
7730 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7731 tree arg = CALL_EXPR_ARG (arg0, 0);
7732 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7733 return build_call_expr_loc (loc, expfn, 1, arg);
7734 }
7735
7736 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7737 if (BUILTIN_SQRT_P (fcode))
7738 {
7739 tree narg0 = CALL_EXPR_ARG (arg0, 0);
7740 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7741 build_real (type, dconsthalf));
7742 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
7743 }
7744
7745 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7746 if (BUILTIN_CBRT_P (fcode))
7747 {
7748 tree arg = CALL_EXPR_ARG (arg0, 0);
7749 if (tree_expr_nonnegative_p (arg))
7750 {
7751 const REAL_VALUE_TYPE dconstroot
7752 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7753 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7754 build_real (type, dconstroot));
7755 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
7756 }
7757 }
7758
7759 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
7760 if (fcode == BUILT_IN_POW
7761 || fcode == BUILT_IN_POWF
7762 || fcode == BUILT_IN_POWL)
7763 {
7764 tree arg00 = CALL_EXPR_ARG (arg0, 0);
7765 if (tree_expr_nonnegative_p (arg00))
7766 {
7767 tree arg01 = CALL_EXPR_ARG (arg0, 1);
7768 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7769 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7770 }
7771 }
7772 }
7773
7774 return NULL_TREE;
7775 }
7776
7777 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
7778 Return NULL_TREE if no simplification can be made. */
7779 static tree
7780 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
7781 tree arg0, tree arg1, tree type)
7782 {
7783 if (!validate_arg (arg0, REAL_TYPE)
7784 || !validate_arg (arg1, INTEGER_TYPE))
7785 return NULL_TREE;
7786
7787 /* Optimize pow(1.0,y) = 1.0. */
7788 if (real_onep (arg0))
7789 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7790
7791 if (host_integerp (arg1, 0))
7792 {
7793 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
7794
7795 /* Evaluate powi at compile-time. */
7796 if (TREE_CODE (arg0) == REAL_CST
7797 && !TREE_OVERFLOW (arg0))
7798 {
7799 REAL_VALUE_TYPE x;
7800 x = TREE_REAL_CST (arg0);
7801 real_powi (&x, TYPE_MODE (type), &x, c);
7802 return build_real (type, x);
7803 }
7804
7805 /* Optimize pow(x,0) = 1.0. */
7806 if (c == 0)
7807 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7808 arg0);
7809
7810 /* Optimize pow(x,1) = x. */
7811 if (c == 1)
7812 return arg0;
7813
7814 /* Optimize pow(x,-1) = 1.0/x. */
7815 if (c == -1)
7816 return fold_build2_loc (loc, RDIV_EXPR, type,
7817 build_real (type, dconst1), arg0);
7818 }
7819
7820 return NULL_TREE;
7821 }
7822
7823 /* A subroutine of fold_builtin to fold the various exponent
7824 functions. Return NULL_TREE if no simplification can be made.
7825 FUNC is the corresponding MPFR exponent function. */
7826
7827 static tree
7828 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
7829 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7830 {
7831 if (validate_arg (arg, REAL_TYPE))
7832 {
7833 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7834 tree res;
7835
7836 /* Calculate the result when the argument is a constant. */
7837 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
7838 return res;
7839
7840 /* Optimize expN(logN(x)) = x. */
7841 if (flag_unsafe_math_optimizations)
7842 {
7843 const enum built_in_function fcode = builtin_mathfn_code (arg);
7844
7845 if ((func == mpfr_exp
7846 && (fcode == BUILT_IN_LOG
7847 || fcode == BUILT_IN_LOGF
7848 || fcode == BUILT_IN_LOGL))
7849 || (func == mpfr_exp2
7850 && (fcode == BUILT_IN_LOG2
7851 || fcode == BUILT_IN_LOG2F
7852 || fcode == BUILT_IN_LOG2L))
7853 || (func == mpfr_exp10
7854 && (fcode == BUILT_IN_LOG10
7855 || fcode == BUILT_IN_LOG10F
7856 || fcode == BUILT_IN_LOG10L)))
7857 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7858 }
7859 }
7860
7861 return NULL_TREE;
7862 }
7863
7864 /* Return true if VAR is a VAR_DECL or a component thereof. */
7865
7866 static bool
7867 var_decl_component_p (tree var)
7868 {
7869 tree inner = var;
7870 while (handled_component_p (inner))
7871 inner = TREE_OPERAND (inner, 0);
7872 return SSA_VAR_P (inner);
7873 }
7874
7875 /* Fold function call to builtin memset. Return
7876 NULL_TREE if no simplification can be made. */
7877
7878 static tree
7879 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
7880 tree type, bool ignore)
7881 {
7882 tree var, ret, etype;
7883 unsigned HOST_WIDE_INT length, cval;
7884
7885 if (! validate_arg (dest, POINTER_TYPE)
7886 || ! validate_arg (c, INTEGER_TYPE)
7887 || ! validate_arg (len, INTEGER_TYPE))
7888 return NULL_TREE;
7889
7890 if (! host_integerp (len, 1))
7891 return NULL_TREE;
7892
7893 /* If the LEN parameter is zero, return DEST. */
7894 if (integer_zerop (len))
7895 return omit_one_operand_loc (loc, type, dest, c);
7896
7897 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
7898 return NULL_TREE;
7899
7900 var = dest;
7901 STRIP_NOPS (var);
7902 if (TREE_CODE (var) != ADDR_EXPR)
7903 return NULL_TREE;
7904
7905 var = TREE_OPERAND (var, 0);
7906 if (TREE_THIS_VOLATILE (var))
7907 return NULL_TREE;
7908
7909 etype = TREE_TYPE (var);
7910 if (TREE_CODE (etype) == ARRAY_TYPE)
7911 etype = TREE_TYPE (etype);
7912
7913 if (!INTEGRAL_TYPE_P (etype)
7914 && !POINTER_TYPE_P (etype))
7915 return NULL_TREE;
7916
7917 if (! var_decl_component_p (var))
7918 return NULL_TREE;
7919
7920 length = tree_low_cst (len, 1);
7921 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
7922 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
7923 return NULL_TREE;
7924
7925 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
7926 return NULL_TREE;
7927
7928 if (integer_zerop (c))
7929 cval = 0;
7930 else
7931 {
7932 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
7933 return NULL_TREE;
7934
7935 cval = TREE_INT_CST_LOW (c);
7936 cval &= 0xff;
7937 cval |= cval << 8;
7938 cval |= cval << 16;
7939 cval |= (cval << 31) << 1;
7940 }
7941
7942 ret = build_int_cst_type (etype, cval);
7943 var = build_fold_indirect_ref_loc (loc,
7944 fold_convert_loc (loc,
7945 build_pointer_type (etype),
7946 dest));
7947 ret = build2 (MODIFY_EXPR, etype, var, ret);
7948 if (ignore)
7949 return ret;
7950
7951 return omit_one_operand_loc (loc, type, dest, ret);
7952 }
7953
7954 /* Fold function call to builtin memset. Return
7955 NULL_TREE if no simplification can be made. */
7956
7957 static tree
7958 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
7959 {
7960 if (! validate_arg (dest, POINTER_TYPE)
7961 || ! validate_arg (size, INTEGER_TYPE))
7962 return NULL_TREE;
7963
7964 if (!ignore)
7965 return NULL_TREE;
7966
7967 /* New argument list transforming bzero(ptr x, int y) to
7968 memset(ptr x, int 0, size_t y). This is done this way
7969 so that if it isn't expanded inline, we fallback to
7970 calling bzero instead of memset. */
7971
7972 return fold_builtin_memset (loc, dest, integer_zero_node,
7973 fold_convert_loc (loc, size_type_node, size),
7974 void_type_node, ignore);
7975 }
7976
7977 /* Fold function call to builtin mem{{,p}cpy,move}. Return
7978 NULL_TREE if no simplification can be made.
7979 If ENDP is 0, return DEST (like memcpy).
7980 If ENDP is 1, return DEST+LEN (like mempcpy).
7981 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
7982 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
7983 (memmove). */
7984
7985 static tree
7986 fold_builtin_memory_op (location_t loc, tree dest, tree src,
7987 tree len, tree type, bool ignore, int endp)
7988 {
7989 tree destvar, srcvar, expr;
7990
7991 if (! validate_arg (dest, POINTER_TYPE)
7992 || ! validate_arg (src, POINTER_TYPE)
7993 || ! validate_arg (len, INTEGER_TYPE))
7994 return NULL_TREE;
7995
7996 /* If the LEN parameter is zero, return DEST. */
7997 if (integer_zerop (len))
7998 return omit_one_operand_loc (loc, type, dest, src);
7999
8000 /* If SRC and DEST are the same (and not volatile), return
8001 DEST{,+LEN,+LEN-1}. */
8002 if (operand_equal_p (src, dest, 0))
8003 expr = len;
8004 else
8005 {
8006 tree srctype, desttype;
8007 unsigned int src_align, dest_align;
8008 tree off0;
8009
8010 if (endp == 3)
8011 {
8012 src_align = get_pointer_alignment (src);
8013 dest_align = get_pointer_alignment (dest);
8014
8015 /* Both DEST and SRC must be pointer types.
8016 ??? This is what old code did. Is the testing for pointer types
8017 really mandatory?
8018
8019 If either SRC is readonly or length is 1, we can use memcpy. */
8020 if (!dest_align || !src_align)
8021 return NULL_TREE;
8022 if (readonly_data_expr (src)
8023 || (host_integerp (len, 1)
8024 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8025 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8026 {
8027 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8028 if (!fn)
8029 return NULL_TREE;
8030 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8031 }
8032
8033 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8034 if (TREE_CODE (src) == ADDR_EXPR
8035 && TREE_CODE (dest) == ADDR_EXPR)
8036 {
8037 tree src_base, dest_base, fn;
8038 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8039 HOST_WIDE_INT size = -1;
8040 HOST_WIDE_INT maxsize = -1;
8041
8042 srcvar = TREE_OPERAND (src, 0);
8043 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8044 &size, &maxsize);
8045 destvar = TREE_OPERAND (dest, 0);
8046 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8047 &size, &maxsize);
8048 if (host_integerp (len, 1))
8049 maxsize = tree_low_cst (len, 1);
8050 else
8051 maxsize = -1;
8052 src_offset /= BITS_PER_UNIT;
8053 dest_offset /= BITS_PER_UNIT;
8054 if (SSA_VAR_P (src_base)
8055 && SSA_VAR_P (dest_base))
8056 {
8057 if (operand_equal_p (src_base, dest_base, 0)
8058 && ranges_overlap_p (src_offset, maxsize,
8059 dest_offset, maxsize))
8060 return NULL_TREE;
8061 }
8062 else if (TREE_CODE (src_base) == MEM_REF
8063 && TREE_CODE (dest_base) == MEM_REF)
8064 {
8065 double_int off;
8066 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8067 TREE_OPERAND (dest_base, 0), 0))
8068 return NULL_TREE;
8069 off = double_int_add (mem_ref_offset (src_base),
8070 shwi_to_double_int (src_offset));
8071 if (!double_int_fits_in_shwi_p (off))
8072 return NULL_TREE;
8073 src_offset = off.low;
8074 off = double_int_add (mem_ref_offset (dest_base),
8075 shwi_to_double_int (dest_offset));
8076 if (!double_int_fits_in_shwi_p (off))
8077 return NULL_TREE;
8078 dest_offset = off.low;
8079 if (ranges_overlap_p (src_offset, maxsize,
8080 dest_offset, maxsize))
8081 return NULL_TREE;
8082 }
8083 else
8084 return NULL_TREE;
8085
8086 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8087 if (!fn)
8088 return NULL_TREE;
8089 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8090 }
8091
8092 /* If the destination and source do not alias optimize into
8093 memcpy as well. */
8094 if ((is_gimple_min_invariant (dest)
8095 || TREE_CODE (dest) == SSA_NAME)
8096 && (is_gimple_min_invariant (src)
8097 || TREE_CODE (src) == SSA_NAME))
8098 {
8099 ao_ref destr, srcr;
8100 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8101 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8102 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8103 {
8104 tree fn;
8105 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8106 if (!fn)
8107 return NULL_TREE;
8108 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8109 }
8110 }
8111
8112 return NULL_TREE;
8113 }
8114
8115 if (!host_integerp (len, 0))
8116 return NULL_TREE;
8117 /* FIXME:
8118 This logic lose for arguments like (type *)malloc (sizeof (type)),
8119 since we strip the casts of up to VOID return value from malloc.
8120 Perhaps we ought to inherit type from non-VOID argument here? */
8121 STRIP_NOPS (src);
8122 STRIP_NOPS (dest);
8123 if (!POINTER_TYPE_P (TREE_TYPE (src))
8124 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8125 return NULL_TREE;
8126 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8127 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8128 {
8129 tree tem = TREE_OPERAND (src, 0);
8130 STRIP_NOPS (tem);
8131 if (tem != TREE_OPERAND (src, 0))
8132 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8133 }
8134 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8135 {
8136 tree tem = TREE_OPERAND (dest, 0);
8137 STRIP_NOPS (tem);
8138 if (tem != TREE_OPERAND (dest, 0))
8139 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8140 }
8141 srctype = TREE_TYPE (TREE_TYPE (src));
8142 if (TREE_CODE (srctype) == ARRAY_TYPE
8143 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8144 {
8145 srctype = TREE_TYPE (srctype);
8146 STRIP_NOPS (src);
8147 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8148 }
8149 desttype = TREE_TYPE (TREE_TYPE (dest));
8150 if (TREE_CODE (desttype) == ARRAY_TYPE
8151 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8152 {
8153 desttype = TREE_TYPE (desttype);
8154 STRIP_NOPS (dest);
8155 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8156 }
8157 if (TREE_ADDRESSABLE (srctype)
8158 || TREE_ADDRESSABLE (desttype))
8159 return NULL_TREE;
8160
8161 src_align = get_pointer_alignment (src);
8162 dest_align = get_pointer_alignment (dest);
8163 if (dest_align < TYPE_ALIGN (desttype)
8164 || src_align < TYPE_ALIGN (srctype))
8165 return NULL_TREE;
8166
8167 if (!ignore)
8168 dest = builtin_save_expr (dest);
8169
8170 /* Build accesses at offset zero with a ref-all character type. */
8171 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8172 ptr_mode, true), 0);
8173
8174 destvar = dest;
8175 STRIP_NOPS (destvar);
8176 if (TREE_CODE (destvar) == ADDR_EXPR
8177 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8178 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8179 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8180 else
8181 destvar = NULL_TREE;
8182
8183 srcvar = src;
8184 STRIP_NOPS (srcvar);
8185 if (TREE_CODE (srcvar) == ADDR_EXPR
8186 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8187 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8188 {
8189 if (!destvar
8190 || src_align >= TYPE_ALIGN (desttype))
8191 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8192 srcvar, off0);
8193 else if (!STRICT_ALIGNMENT)
8194 {
8195 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8196 src_align);
8197 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8198 }
8199 else
8200 srcvar = NULL_TREE;
8201 }
8202 else
8203 srcvar = NULL_TREE;
8204
8205 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8206 return NULL_TREE;
8207
8208 if (srcvar == NULL_TREE)
8209 {
8210 STRIP_NOPS (src);
8211 if (src_align >= TYPE_ALIGN (desttype))
8212 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8213 else
8214 {
8215 if (STRICT_ALIGNMENT)
8216 return NULL_TREE;
8217 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8218 src_align);
8219 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8220 }
8221 }
8222 else if (destvar == NULL_TREE)
8223 {
8224 STRIP_NOPS (dest);
8225 if (dest_align >= TYPE_ALIGN (srctype))
8226 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8227 else
8228 {
8229 if (STRICT_ALIGNMENT)
8230 return NULL_TREE;
8231 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8232 dest_align);
8233 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8234 }
8235 }
8236
8237 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8238 }
8239
8240 if (ignore)
8241 return expr;
8242
8243 if (endp == 0 || endp == 3)
8244 return omit_one_operand_loc (loc, type, dest, expr);
8245
8246 if (expr == len)
8247 expr = NULL_TREE;
8248
8249 if (endp == 2)
8250 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8251 ssize_int (1));
8252
8253 dest = fold_build_pointer_plus_loc (loc, dest, len);
8254 dest = fold_convert_loc (loc, type, dest);
8255 if (expr)
8256 dest = omit_one_operand_loc (loc, type, dest, expr);
8257 return dest;
8258 }
8259
8260 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8261 If LEN is not NULL, it represents the length of the string to be
8262 copied. Return NULL_TREE if no simplification can be made. */
8263
8264 tree
8265 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8266 {
8267 tree fn;
8268
8269 if (!validate_arg (dest, POINTER_TYPE)
8270 || !validate_arg (src, POINTER_TYPE))
8271 return NULL_TREE;
8272
8273 /* If SRC and DEST are the same (and not volatile), return DEST. */
8274 if (operand_equal_p (src, dest, 0))
8275 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8276
8277 if (optimize_function_for_size_p (cfun))
8278 return NULL_TREE;
8279
8280 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8281 if (!fn)
8282 return NULL_TREE;
8283
8284 if (!len)
8285 {
8286 len = c_strlen (src, 1);
8287 if (! len || TREE_SIDE_EFFECTS (len))
8288 return NULL_TREE;
8289 }
8290
8291 len = fold_convert_loc (loc, size_type_node, len);
8292 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
8293 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8294 build_call_expr_loc (loc, fn, 3, dest, src, len));
8295 }
8296
8297 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8298 Return NULL_TREE if no simplification can be made. */
8299
8300 static tree
8301 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8302 {
8303 tree fn, len, lenp1, call, type;
8304
8305 if (!validate_arg (dest, POINTER_TYPE)
8306 || !validate_arg (src, POINTER_TYPE))
8307 return NULL_TREE;
8308
8309 len = c_strlen (src, 1);
8310 if (!len
8311 || TREE_CODE (len) != INTEGER_CST)
8312 return NULL_TREE;
8313
8314 if (optimize_function_for_size_p (cfun)
8315 /* If length is zero it's small enough. */
8316 && !integer_zerop (len))
8317 return NULL_TREE;
8318
8319 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8320 if (!fn)
8321 return NULL_TREE;
8322
8323 lenp1 = size_binop_loc (loc, PLUS_EXPR,
8324 fold_convert_loc (loc, size_type_node, len),
8325 build_int_cst (size_type_node, 1));
8326 /* We use dest twice in building our expression. Save it from
8327 multiple expansions. */
8328 dest = builtin_save_expr (dest);
8329 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8330
8331 type = TREE_TYPE (TREE_TYPE (fndecl));
8332 dest = fold_build_pointer_plus_loc (loc, dest, len);
8333 dest = fold_convert_loc (loc, type, dest);
8334 dest = omit_one_operand_loc (loc, type, dest, call);
8335 return dest;
8336 }
8337
8338 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8339 If SLEN is not NULL, it represents the length of the source string.
8340 Return NULL_TREE if no simplification can be made. */
8341
8342 tree
8343 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8344 tree src, tree len, tree slen)
8345 {
8346 tree fn;
8347
8348 if (!validate_arg (dest, POINTER_TYPE)
8349 || !validate_arg (src, POINTER_TYPE)
8350 || !validate_arg (len, INTEGER_TYPE))
8351 return NULL_TREE;
8352
8353 /* If the LEN parameter is zero, return DEST. */
8354 if (integer_zerop (len))
8355 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8356
8357 /* We can't compare slen with len as constants below if len is not a
8358 constant. */
8359 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8360 return NULL_TREE;
8361
8362 if (!slen)
8363 slen = c_strlen (src, 1);
8364
8365 /* Now, we must be passed a constant src ptr parameter. */
8366 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8367 return NULL_TREE;
8368
8369 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8370
8371 /* We do not support simplification of this case, though we do
8372 support it when expanding trees into RTL. */
8373 /* FIXME: generate a call to __builtin_memset. */
8374 if (tree_int_cst_lt (slen, len))
8375 return NULL_TREE;
8376
8377 /* OK transform into builtin memcpy. */
8378 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8379 if (!fn)
8380 return NULL_TREE;
8381
8382 len = fold_convert_loc (loc, size_type_node, len);
8383 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8384 build_call_expr_loc (loc, fn, 3, dest, src, len));
8385 }
8386
8387 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8388 arguments to the call, and TYPE is its return type.
8389 Return NULL_TREE if no simplification can be made. */
8390
8391 static tree
8392 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8393 {
8394 if (!validate_arg (arg1, POINTER_TYPE)
8395 || !validate_arg (arg2, INTEGER_TYPE)
8396 || !validate_arg (len, INTEGER_TYPE))
8397 return NULL_TREE;
8398 else
8399 {
8400 const char *p1;
8401
8402 if (TREE_CODE (arg2) != INTEGER_CST
8403 || !host_integerp (len, 1))
8404 return NULL_TREE;
8405
8406 p1 = c_getstr (arg1);
8407 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8408 {
8409 char c;
8410 const char *r;
8411 tree tem;
8412
8413 if (target_char_cast (arg2, &c))
8414 return NULL_TREE;
8415
8416 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8417
8418 if (r == NULL)
8419 return build_int_cst (TREE_TYPE (arg1), 0);
8420
8421 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8422 return fold_convert_loc (loc, type, tem);
8423 }
8424 return NULL_TREE;
8425 }
8426 }
8427
8428 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8429 Return NULL_TREE if no simplification can be made. */
8430
8431 static tree
8432 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8433 {
8434 const char *p1, *p2;
8435
8436 if (!validate_arg (arg1, POINTER_TYPE)
8437 || !validate_arg (arg2, POINTER_TYPE)
8438 || !validate_arg (len, INTEGER_TYPE))
8439 return NULL_TREE;
8440
8441 /* If the LEN parameter is zero, return zero. */
8442 if (integer_zerop (len))
8443 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8444 arg1, arg2);
8445
8446 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8447 if (operand_equal_p (arg1, arg2, 0))
8448 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8449
8450 p1 = c_getstr (arg1);
8451 p2 = c_getstr (arg2);
8452
8453 /* If all arguments are constant, and the value of len is not greater
8454 than the lengths of arg1 and arg2, evaluate at compile-time. */
8455 if (host_integerp (len, 1) && p1 && p2
8456 && compare_tree_int (len, strlen (p1) + 1) <= 0
8457 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8458 {
8459 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8460
8461 if (r > 0)
8462 return integer_one_node;
8463 else if (r < 0)
8464 return integer_minus_one_node;
8465 else
8466 return integer_zero_node;
8467 }
8468
8469 /* If len parameter is one, return an expression corresponding to
8470 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8471 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8472 {
8473 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8474 tree cst_uchar_ptr_node
8475 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8476
8477 tree ind1
8478 = fold_convert_loc (loc, integer_type_node,
8479 build1 (INDIRECT_REF, cst_uchar_node,
8480 fold_convert_loc (loc,
8481 cst_uchar_ptr_node,
8482 arg1)));
8483 tree ind2
8484 = fold_convert_loc (loc, integer_type_node,
8485 build1 (INDIRECT_REF, cst_uchar_node,
8486 fold_convert_loc (loc,
8487 cst_uchar_ptr_node,
8488 arg2)));
8489 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8490 }
8491
8492 return NULL_TREE;
8493 }
8494
8495 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8496 Return NULL_TREE if no simplification can be made. */
8497
8498 static tree
8499 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8500 {
8501 const char *p1, *p2;
8502
8503 if (!validate_arg (arg1, POINTER_TYPE)
8504 || !validate_arg (arg2, POINTER_TYPE))
8505 return NULL_TREE;
8506
8507 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8508 if (operand_equal_p (arg1, arg2, 0))
8509 return integer_zero_node;
8510
8511 p1 = c_getstr (arg1);
8512 p2 = c_getstr (arg2);
8513
8514 if (p1 && p2)
8515 {
8516 const int i = strcmp (p1, p2);
8517 if (i < 0)
8518 return integer_minus_one_node;
8519 else if (i > 0)
8520 return integer_one_node;
8521 else
8522 return integer_zero_node;
8523 }
8524
8525 /* If the second arg is "", return *(const unsigned char*)arg1. */
8526 if (p2 && *p2 == '\0')
8527 {
8528 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8529 tree cst_uchar_ptr_node
8530 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8531
8532 return fold_convert_loc (loc, integer_type_node,
8533 build1 (INDIRECT_REF, cst_uchar_node,
8534 fold_convert_loc (loc,
8535 cst_uchar_ptr_node,
8536 arg1)));
8537 }
8538
8539 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8540 if (p1 && *p1 == '\0')
8541 {
8542 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8543 tree cst_uchar_ptr_node
8544 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8545
8546 tree temp
8547 = fold_convert_loc (loc, integer_type_node,
8548 build1 (INDIRECT_REF, cst_uchar_node,
8549 fold_convert_loc (loc,
8550 cst_uchar_ptr_node,
8551 arg2)));
8552 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8553 }
8554
8555 return NULL_TREE;
8556 }
8557
8558 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8559 Return NULL_TREE if no simplification can be made. */
8560
8561 static tree
8562 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8563 {
8564 const char *p1, *p2;
8565
8566 if (!validate_arg (arg1, POINTER_TYPE)
8567 || !validate_arg (arg2, POINTER_TYPE)
8568 || !validate_arg (len, INTEGER_TYPE))
8569 return NULL_TREE;
8570
8571 /* If the LEN parameter is zero, return zero. */
8572 if (integer_zerop (len))
8573 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8574 arg1, arg2);
8575
8576 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8577 if (operand_equal_p (arg1, arg2, 0))
8578 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8579
8580 p1 = c_getstr (arg1);
8581 p2 = c_getstr (arg2);
8582
8583 if (host_integerp (len, 1) && p1 && p2)
8584 {
8585 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8586 if (i > 0)
8587 return integer_one_node;
8588 else if (i < 0)
8589 return integer_minus_one_node;
8590 else
8591 return integer_zero_node;
8592 }
8593
8594 /* If the second arg is "", and the length is greater than zero,
8595 return *(const unsigned char*)arg1. */
8596 if (p2 && *p2 == '\0'
8597 && TREE_CODE (len) == INTEGER_CST
8598 && tree_int_cst_sgn (len) == 1)
8599 {
8600 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8601 tree cst_uchar_ptr_node
8602 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8603
8604 return fold_convert_loc (loc, integer_type_node,
8605 build1 (INDIRECT_REF, cst_uchar_node,
8606 fold_convert_loc (loc,
8607 cst_uchar_ptr_node,
8608 arg1)));
8609 }
8610
8611 /* If the first arg is "", and the length is greater than zero,
8612 return -*(const unsigned char*)arg2. */
8613 if (p1 && *p1 == '\0'
8614 && TREE_CODE (len) == INTEGER_CST
8615 && tree_int_cst_sgn (len) == 1)
8616 {
8617 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8618 tree cst_uchar_ptr_node
8619 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8620
8621 tree temp = fold_convert_loc (loc, integer_type_node,
8622 build1 (INDIRECT_REF, cst_uchar_node,
8623 fold_convert_loc (loc,
8624 cst_uchar_ptr_node,
8625 arg2)));
8626 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8627 }
8628
8629 /* If len parameter is one, return an expression corresponding to
8630 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8631 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8632 {
8633 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8634 tree cst_uchar_ptr_node
8635 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8636
8637 tree ind1 = fold_convert_loc (loc, integer_type_node,
8638 build1 (INDIRECT_REF, cst_uchar_node,
8639 fold_convert_loc (loc,
8640 cst_uchar_ptr_node,
8641 arg1)));
8642 tree ind2 = fold_convert_loc (loc, integer_type_node,
8643 build1 (INDIRECT_REF, cst_uchar_node,
8644 fold_convert_loc (loc,
8645 cst_uchar_ptr_node,
8646 arg2)));
8647 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8648 }
8649
8650 return NULL_TREE;
8651 }
8652
8653 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8654 ARG. Return NULL_TREE if no simplification can be made. */
8655
8656 static tree
8657 fold_builtin_signbit (location_t loc, tree arg, tree type)
8658 {
8659 if (!validate_arg (arg, REAL_TYPE))
8660 return NULL_TREE;
8661
8662 /* If ARG is a compile-time constant, determine the result. */
8663 if (TREE_CODE (arg) == REAL_CST
8664 && !TREE_OVERFLOW (arg))
8665 {
8666 REAL_VALUE_TYPE c;
8667
8668 c = TREE_REAL_CST (arg);
8669 return (REAL_VALUE_NEGATIVE (c)
8670 ? build_one_cst (type)
8671 : build_zero_cst (type));
8672 }
8673
8674 /* If ARG is non-negative, the result is always zero. */
8675 if (tree_expr_nonnegative_p (arg))
8676 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8677
8678 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8679 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8680 return fold_convert (type,
8681 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8682 build_real (TREE_TYPE (arg), dconst0)));
8683
8684 return NULL_TREE;
8685 }
8686
8687 /* Fold function call to builtin copysign, copysignf or copysignl with
8688 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8689 be made. */
8690
8691 static tree
8692 fold_builtin_copysign (location_t loc, tree fndecl,
8693 tree arg1, tree arg2, tree type)
8694 {
8695 tree tem;
8696
8697 if (!validate_arg (arg1, REAL_TYPE)
8698 || !validate_arg (arg2, REAL_TYPE))
8699 return NULL_TREE;
8700
8701 /* copysign(X,X) is X. */
8702 if (operand_equal_p (arg1, arg2, 0))
8703 return fold_convert_loc (loc, type, arg1);
8704
8705 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8706 if (TREE_CODE (arg1) == REAL_CST
8707 && TREE_CODE (arg2) == REAL_CST
8708 && !TREE_OVERFLOW (arg1)
8709 && !TREE_OVERFLOW (arg2))
8710 {
8711 REAL_VALUE_TYPE c1, c2;
8712
8713 c1 = TREE_REAL_CST (arg1);
8714 c2 = TREE_REAL_CST (arg2);
8715 /* c1.sign := c2.sign. */
8716 real_copysign (&c1, &c2);
8717 return build_real (type, c1);
8718 }
8719
8720 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8721 Remember to evaluate Y for side-effects. */
8722 if (tree_expr_nonnegative_p (arg2))
8723 return omit_one_operand_loc (loc, type,
8724 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8725 arg2);
8726
8727 /* Strip sign changing operations for the first argument. */
8728 tem = fold_strip_sign_ops (arg1);
8729 if (tem)
8730 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8731
8732 return NULL_TREE;
8733 }
8734
8735 /* Fold a call to builtin isascii with argument ARG. */
8736
8737 static tree
8738 fold_builtin_isascii (location_t loc, tree arg)
8739 {
8740 if (!validate_arg (arg, INTEGER_TYPE))
8741 return NULL_TREE;
8742 else
8743 {
8744 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8745 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8746 build_int_cst (integer_type_node,
8747 ~ (unsigned HOST_WIDE_INT) 0x7f));
8748 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8749 arg, integer_zero_node);
8750 }
8751 }
8752
8753 /* Fold a call to builtin toascii with argument ARG. */
8754
8755 static tree
8756 fold_builtin_toascii (location_t loc, tree arg)
8757 {
8758 if (!validate_arg (arg, INTEGER_TYPE))
8759 return NULL_TREE;
8760
8761 /* Transform toascii(c) -> (c & 0x7f). */
8762 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8763 build_int_cst (integer_type_node, 0x7f));
8764 }
8765
8766 /* Fold a call to builtin isdigit with argument ARG. */
8767
8768 static tree
8769 fold_builtin_isdigit (location_t loc, tree arg)
8770 {
8771 if (!validate_arg (arg, INTEGER_TYPE))
8772 return NULL_TREE;
8773 else
8774 {
8775 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8776 /* According to the C standard, isdigit is unaffected by locale.
8777 However, it definitely is affected by the target character set. */
8778 unsigned HOST_WIDE_INT target_digit0
8779 = lang_hooks.to_target_charset ('0');
8780
8781 if (target_digit0 == 0)
8782 return NULL_TREE;
8783
8784 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8785 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8786 build_int_cst (unsigned_type_node, target_digit0));
8787 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8788 build_int_cst (unsigned_type_node, 9));
8789 }
8790 }
8791
8792 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8793
8794 static tree
8795 fold_builtin_fabs (location_t loc, tree arg, tree type)
8796 {
8797 if (!validate_arg (arg, REAL_TYPE))
8798 return NULL_TREE;
8799
8800 arg = fold_convert_loc (loc, type, arg);
8801 if (TREE_CODE (arg) == REAL_CST)
8802 return fold_abs_const (arg, type);
8803 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8804 }
8805
8806 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8807
8808 static tree
8809 fold_builtin_abs (location_t loc, tree arg, tree type)
8810 {
8811 if (!validate_arg (arg, INTEGER_TYPE))
8812 return NULL_TREE;
8813
8814 arg = fold_convert_loc (loc, type, arg);
8815 if (TREE_CODE (arg) == INTEGER_CST)
8816 return fold_abs_const (arg, type);
8817 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8818 }
8819
8820 /* Fold a fma operation with arguments ARG[012]. */
8821
8822 tree
8823 fold_fma (location_t loc ATTRIBUTE_UNUSED,
8824 tree type, tree arg0, tree arg1, tree arg2)
8825 {
8826 if (TREE_CODE (arg0) == REAL_CST
8827 && TREE_CODE (arg1) == REAL_CST
8828 && TREE_CODE (arg2) == REAL_CST)
8829 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
8830
8831 return NULL_TREE;
8832 }
8833
8834 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8835
8836 static tree
8837 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8838 {
8839 if (validate_arg (arg0, REAL_TYPE)
8840 && validate_arg(arg1, REAL_TYPE)
8841 && validate_arg(arg2, REAL_TYPE))
8842 {
8843 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
8844 if (tem)
8845 return tem;
8846
8847 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8848 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8849 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8850 }
8851 return NULL_TREE;
8852 }
8853
8854 /* Fold a call to builtin fmin or fmax. */
8855
8856 static tree
8857 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8858 tree type, bool max)
8859 {
8860 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8861 {
8862 /* Calculate the result when the argument is a constant. */
8863 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8864
8865 if (res)
8866 return res;
8867
8868 /* If either argument is NaN, return the other one. Avoid the
8869 transformation if we get (and honor) a signalling NaN. Using
8870 omit_one_operand() ensures we create a non-lvalue. */
8871 if (TREE_CODE (arg0) == REAL_CST
8872 && real_isnan (&TREE_REAL_CST (arg0))
8873 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8874 || ! TREE_REAL_CST (arg0).signalling))
8875 return omit_one_operand_loc (loc, type, arg1, arg0);
8876 if (TREE_CODE (arg1) == REAL_CST
8877 && real_isnan (&TREE_REAL_CST (arg1))
8878 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
8879 || ! TREE_REAL_CST (arg1).signalling))
8880 return omit_one_operand_loc (loc, type, arg0, arg1);
8881
8882 /* Transform fmin/fmax(x,x) -> x. */
8883 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8884 return omit_one_operand_loc (loc, type, arg0, arg1);
8885
8886 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
8887 functions to return the numeric arg if the other one is NaN.
8888 These tree codes don't honor that, so only transform if
8889 -ffinite-math-only is set. C99 doesn't require -0.0 to be
8890 handled, so we don't have to worry about it either. */
8891 if (flag_finite_math_only)
8892 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
8893 fold_convert_loc (loc, type, arg0),
8894 fold_convert_loc (loc, type, arg1));
8895 }
8896 return NULL_TREE;
8897 }
8898
8899 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8900
8901 static tree
8902 fold_builtin_carg (location_t loc, tree arg, tree type)
8903 {
8904 if (validate_arg (arg, COMPLEX_TYPE)
8905 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8906 {
8907 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8908
8909 if (atan2_fn)
8910 {
8911 tree new_arg = builtin_save_expr (arg);
8912 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8913 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8914 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8915 }
8916 }
8917
8918 return NULL_TREE;
8919 }
8920
8921 /* Fold a call to builtin logb/ilogb. */
8922
8923 static tree
8924 fold_builtin_logb (location_t loc, tree arg, tree rettype)
8925 {
8926 if (! validate_arg (arg, REAL_TYPE))
8927 return NULL_TREE;
8928
8929 STRIP_NOPS (arg);
8930
8931 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8932 {
8933 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
8934
8935 switch (value->cl)
8936 {
8937 case rvc_nan:
8938 case rvc_inf:
8939 /* If arg is Inf or NaN and we're logb, return it. */
8940 if (TREE_CODE (rettype) == REAL_TYPE)
8941 return fold_convert_loc (loc, rettype, arg);
8942 /* Fall through... */
8943 case rvc_zero:
8944 /* Zero may set errno and/or raise an exception for logb, also
8945 for ilogb we don't know FP_ILOGB0. */
8946 return NULL_TREE;
8947 case rvc_normal:
8948 /* For normal numbers, proceed iff radix == 2. In GCC,
8949 normalized significands are in the range [0.5, 1.0). We
8950 want the exponent as if they were [1.0, 2.0) so get the
8951 exponent and subtract 1. */
8952 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8953 return fold_convert_loc (loc, rettype,
8954 build_int_cst (integer_type_node,
8955 REAL_EXP (value)-1));
8956 break;
8957 }
8958 }
8959
8960 return NULL_TREE;
8961 }
8962
8963 /* Fold a call to builtin significand, if radix == 2. */
8964
8965 static tree
8966 fold_builtin_significand (location_t loc, tree arg, tree rettype)
8967 {
8968 if (! validate_arg (arg, REAL_TYPE))
8969 return NULL_TREE;
8970
8971 STRIP_NOPS (arg);
8972
8973 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8974 {
8975 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
8976
8977 switch (value->cl)
8978 {
8979 case rvc_zero:
8980 case rvc_nan:
8981 case rvc_inf:
8982 /* If arg is +-0, +-Inf or +-NaN, then return it. */
8983 return fold_convert_loc (loc, rettype, arg);
8984 case rvc_normal:
8985 /* For normal numbers, proceed iff radix == 2. */
8986 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8987 {
8988 REAL_VALUE_TYPE result = *value;
8989 /* In GCC, normalized significands are in the range [0.5,
8990 1.0). We want them to be [1.0, 2.0) so set the
8991 exponent to 1. */
8992 SET_REAL_EXP (&result, 1);
8993 return build_real (rettype, result);
8994 }
8995 break;
8996 }
8997 }
8998
8999 return NULL_TREE;
9000 }
9001
9002 /* Fold a call to builtin frexp, we can assume the base is 2. */
9003
9004 static tree
9005 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9006 {
9007 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9008 return NULL_TREE;
9009
9010 STRIP_NOPS (arg0);
9011
9012 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9013 return NULL_TREE;
9014
9015 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9016
9017 /* Proceed if a valid pointer type was passed in. */
9018 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9019 {
9020 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9021 tree frac, exp;
9022
9023 switch (value->cl)
9024 {
9025 case rvc_zero:
9026 /* For +-0, return (*exp = 0, +-0). */
9027 exp = integer_zero_node;
9028 frac = arg0;
9029 break;
9030 case rvc_nan:
9031 case rvc_inf:
9032 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9033 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9034 case rvc_normal:
9035 {
9036 /* Since the frexp function always expects base 2, and in
9037 GCC normalized significands are already in the range
9038 [0.5, 1.0), we have exactly what frexp wants. */
9039 REAL_VALUE_TYPE frac_rvt = *value;
9040 SET_REAL_EXP (&frac_rvt, 0);
9041 frac = build_real (rettype, frac_rvt);
9042 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9043 }
9044 break;
9045 default:
9046 gcc_unreachable ();
9047 }
9048
9049 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9050 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9051 TREE_SIDE_EFFECTS (arg1) = 1;
9052 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9053 }
9054
9055 return NULL_TREE;
9056 }
9057
9058 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9059 then we can assume the base is two. If it's false, then we have to
9060 check the mode of the TYPE parameter in certain cases. */
9061
9062 static tree
9063 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9064 tree type, bool ldexp)
9065 {
9066 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9067 {
9068 STRIP_NOPS (arg0);
9069 STRIP_NOPS (arg1);
9070
9071 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9072 if (real_zerop (arg0) || integer_zerop (arg1)
9073 || (TREE_CODE (arg0) == REAL_CST
9074 && !real_isfinite (&TREE_REAL_CST (arg0))))
9075 return omit_one_operand_loc (loc, type, arg0, arg1);
9076
9077 /* If both arguments are constant, then try to evaluate it. */
9078 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9079 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9080 && host_integerp (arg1, 0))
9081 {
9082 /* Bound the maximum adjustment to twice the range of the
9083 mode's valid exponents. Use abs to ensure the range is
9084 positive as a sanity check. */
9085 const long max_exp_adj = 2 *
9086 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9087 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9088
9089 /* Get the user-requested adjustment. */
9090 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9091
9092 /* The requested adjustment must be inside this range. This
9093 is a preliminary cap to avoid things like overflow, we
9094 may still fail to compute the result for other reasons. */
9095 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9096 {
9097 REAL_VALUE_TYPE initial_result;
9098
9099 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9100
9101 /* Ensure we didn't overflow. */
9102 if (! real_isinf (&initial_result))
9103 {
9104 const REAL_VALUE_TYPE trunc_result
9105 = real_value_truncate (TYPE_MODE (type), initial_result);
9106
9107 /* Only proceed if the target mode can hold the
9108 resulting value. */
9109 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9110 return build_real (type, trunc_result);
9111 }
9112 }
9113 }
9114 }
9115
9116 return NULL_TREE;
9117 }
9118
9119 /* Fold a call to builtin modf. */
9120
9121 static tree
9122 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9123 {
9124 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9125 return NULL_TREE;
9126
9127 STRIP_NOPS (arg0);
9128
9129 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9130 return NULL_TREE;
9131
9132 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9133
9134 /* Proceed if a valid pointer type was passed in. */
9135 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9136 {
9137 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9138 REAL_VALUE_TYPE trunc, frac;
9139
9140 switch (value->cl)
9141 {
9142 case rvc_nan:
9143 case rvc_zero:
9144 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9145 trunc = frac = *value;
9146 break;
9147 case rvc_inf:
9148 /* For +-Inf, return (*arg1 = arg0, +-0). */
9149 frac = dconst0;
9150 frac.sign = value->sign;
9151 trunc = *value;
9152 break;
9153 case rvc_normal:
9154 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9155 real_trunc (&trunc, VOIDmode, value);
9156 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9157 /* If the original number was negative and already
9158 integral, then the fractional part is -0.0. */
9159 if (value->sign && frac.cl == rvc_zero)
9160 frac.sign = value->sign;
9161 break;
9162 }
9163
9164 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9165 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9166 build_real (rettype, trunc));
9167 TREE_SIDE_EFFECTS (arg1) = 1;
9168 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9169 build_real (rettype, frac));
9170 }
9171
9172 return NULL_TREE;
9173 }
9174
9175 /* Given a location LOC, an interclass builtin function decl FNDECL
9176 and its single argument ARG, return an folded expression computing
9177 the same, or NULL_TREE if we either couldn't or didn't want to fold
9178 (the latter happen if there's an RTL instruction available). */
9179
9180 static tree
9181 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9182 {
9183 enum machine_mode mode;
9184
9185 if (!validate_arg (arg, REAL_TYPE))
9186 return NULL_TREE;
9187
9188 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9189 return NULL_TREE;
9190
9191 mode = TYPE_MODE (TREE_TYPE (arg));
9192
9193 /* If there is no optab, try generic code. */
9194 switch (DECL_FUNCTION_CODE (fndecl))
9195 {
9196 tree result;
9197
9198 CASE_FLT_FN (BUILT_IN_ISINF):
9199 {
9200 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9201 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9202 tree const type = TREE_TYPE (arg);
9203 REAL_VALUE_TYPE r;
9204 char buf[128];
9205
9206 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9207 real_from_string (&r, buf);
9208 result = build_call_expr (isgr_fn, 2,
9209 fold_build1_loc (loc, ABS_EXPR, type, arg),
9210 build_real (type, r));
9211 return result;
9212 }
9213 CASE_FLT_FN (BUILT_IN_FINITE):
9214 case BUILT_IN_ISFINITE:
9215 {
9216 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9217 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9218 tree const type = TREE_TYPE (arg);
9219 REAL_VALUE_TYPE r;
9220 char buf[128];
9221
9222 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9223 real_from_string (&r, buf);
9224 result = build_call_expr (isle_fn, 2,
9225 fold_build1_loc (loc, ABS_EXPR, type, arg),
9226 build_real (type, r));
9227 /*result = fold_build2_loc (loc, UNGT_EXPR,
9228 TREE_TYPE (TREE_TYPE (fndecl)),
9229 fold_build1_loc (loc, ABS_EXPR, type, arg),
9230 build_real (type, r));
9231 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9232 TREE_TYPE (TREE_TYPE (fndecl)),
9233 result);*/
9234 return result;
9235 }
9236 case BUILT_IN_ISNORMAL:
9237 {
9238 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9239 islessequal(fabs(x),DBL_MAX). */
9240 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9241 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9242 tree const type = TREE_TYPE (arg);
9243 REAL_VALUE_TYPE rmax, rmin;
9244 char buf[128];
9245
9246 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9247 real_from_string (&rmax, buf);
9248 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9249 real_from_string (&rmin, buf);
9250 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9251 result = build_call_expr (isle_fn, 2, arg,
9252 build_real (type, rmax));
9253 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9254 build_call_expr (isge_fn, 2, arg,
9255 build_real (type, rmin)));
9256 return result;
9257 }
9258 default:
9259 break;
9260 }
9261
9262 return NULL_TREE;
9263 }
9264
9265 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9266 ARG is the argument for the call. */
9267
9268 static tree
9269 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9270 {
9271 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9272 REAL_VALUE_TYPE r;
9273
9274 if (!validate_arg (arg, REAL_TYPE))
9275 return NULL_TREE;
9276
9277 switch (builtin_index)
9278 {
9279 case BUILT_IN_ISINF:
9280 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9281 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9282
9283 if (TREE_CODE (arg) == REAL_CST)
9284 {
9285 r = TREE_REAL_CST (arg);
9286 if (real_isinf (&r))
9287 return real_compare (GT_EXPR, &r, &dconst0)
9288 ? integer_one_node : integer_minus_one_node;
9289 else
9290 return integer_zero_node;
9291 }
9292
9293 return NULL_TREE;
9294
9295 case BUILT_IN_ISINF_SIGN:
9296 {
9297 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9298 /* In a boolean context, GCC will fold the inner COND_EXPR to
9299 1. So e.g. "if (isinf_sign(x))" would be folded to just
9300 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9301 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9302 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9303 tree tmp = NULL_TREE;
9304
9305 arg = builtin_save_expr (arg);
9306
9307 if (signbit_fn && isinf_fn)
9308 {
9309 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9310 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9311
9312 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9313 signbit_call, integer_zero_node);
9314 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9315 isinf_call, integer_zero_node);
9316
9317 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9318 integer_minus_one_node, integer_one_node);
9319 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9320 isinf_call, tmp,
9321 integer_zero_node);
9322 }
9323
9324 return tmp;
9325 }
9326
9327 case BUILT_IN_ISFINITE:
9328 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9329 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9330 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9331
9332 if (TREE_CODE (arg) == REAL_CST)
9333 {
9334 r = TREE_REAL_CST (arg);
9335 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9336 }
9337
9338 return NULL_TREE;
9339
9340 case BUILT_IN_ISNAN:
9341 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9342 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9343
9344 if (TREE_CODE (arg) == REAL_CST)
9345 {
9346 r = TREE_REAL_CST (arg);
9347 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9348 }
9349
9350 arg = builtin_save_expr (arg);
9351 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9352
9353 default:
9354 gcc_unreachable ();
9355 }
9356 }
9357
9358 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9359 This builtin will generate code to return the appropriate floating
9360 point classification depending on the value of the floating point
9361 number passed in. The possible return values must be supplied as
9362 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9363 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9364 one floating point argument which is "type generic". */
9365
9366 static tree
9367 fold_builtin_fpclassify (location_t loc, tree exp)
9368 {
9369 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9370 arg, type, res, tmp;
9371 enum machine_mode mode;
9372 REAL_VALUE_TYPE r;
9373 char buf[128];
9374
9375 /* Verify the required arguments in the original call. */
9376 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9377 INTEGER_TYPE, INTEGER_TYPE,
9378 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9379 return NULL_TREE;
9380
9381 fp_nan = CALL_EXPR_ARG (exp, 0);
9382 fp_infinite = CALL_EXPR_ARG (exp, 1);
9383 fp_normal = CALL_EXPR_ARG (exp, 2);
9384 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9385 fp_zero = CALL_EXPR_ARG (exp, 4);
9386 arg = CALL_EXPR_ARG (exp, 5);
9387 type = TREE_TYPE (arg);
9388 mode = TYPE_MODE (type);
9389 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9390
9391 /* fpclassify(x) ->
9392 isnan(x) ? FP_NAN :
9393 (fabs(x) == Inf ? FP_INFINITE :
9394 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9395 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9396
9397 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9398 build_real (type, dconst0));
9399 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9400 tmp, fp_zero, fp_subnormal);
9401
9402 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9403 real_from_string (&r, buf);
9404 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9405 arg, build_real (type, r));
9406 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9407
9408 if (HONOR_INFINITIES (mode))
9409 {
9410 real_inf (&r);
9411 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9412 build_real (type, r));
9413 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9414 fp_infinite, res);
9415 }
9416
9417 if (HONOR_NANS (mode))
9418 {
9419 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9420 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9421 }
9422
9423 return res;
9424 }
9425
9426 /* Fold a call to an unordered comparison function such as
9427 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9428 being called and ARG0 and ARG1 are the arguments for the call.
9429 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9430 the opposite of the desired result. UNORDERED_CODE is used
9431 for modes that can hold NaNs and ORDERED_CODE is used for
9432 the rest. */
9433
9434 static tree
9435 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9436 enum tree_code unordered_code,
9437 enum tree_code ordered_code)
9438 {
9439 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9440 enum tree_code code;
9441 tree type0, type1;
9442 enum tree_code code0, code1;
9443 tree cmp_type = NULL_TREE;
9444
9445 type0 = TREE_TYPE (arg0);
9446 type1 = TREE_TYPE (arg1);
9447
9448 code0 = TREE_CODE (type0);
9449 code1 = TREE_CODE (type1);
9450
9451 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9452 /* Choose the wider of two real types. */
9453 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9454 ? type0 : type1;
9455 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9456 cmp_type = type0;
9457 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9458 cmp_type = type1;
9459
9460 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9461 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9462
9463 if (unordered_code == UNORDERED_EXPR)
9464 {
9465 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9466 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9467 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9468 }
9469
9470 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9471 : ordered_code;
9472 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9473 fold_build2_loc (loc, code, type, arg0, arg1));
9474 }
9475
9476 /* Fold a call to built-in function FNDECL with 0 arguments.
9477 IGNORE is true if the result of the function call is ignored. This
9478 function returns NULL_TREE if no simplification was possible. */
9479
9480 static tree
9481 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9482 {
9483 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9484 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9485 switch (fcode)
9486 {
9487 CASE_FLT_FN (BUILT_IN_INF):
9488 case BUILT_IN_INFD32:
9489 case BUILT_IN_INFD64:
9490 case BUILT_IN_INFD128:
9491 return fold_builtin_inf (loc, type, true);
9492
9493 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9494 return fold_builtin_inf (loc, type, false);
9495
9496 case BUILT_IN_CLASSIFY_TYPE:
9497 return fold_builtin_classify_type (NULL_TREE);
9498
9499 default:
9500 break;
9501 }
9502 return NULL_TREE;
9503 }
9504
9505 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9506 IGNORE is true if the result of the function call is ignored. This
9507 function returns NULL_TREE if no simplification was possible. */
9508
9509 static tree
9510 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9511 {
9512 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9513 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9514 switch (fcode)
9515 {
9516 case BUILT_IN_CONSTANT_P:
9517 {
9518 tree val = fold_builtin_constant_p (arg0);
9519
9520 /* Gimplification will pull the CALL_EXPR for the builtin out of
9521 an if condition. When not optimizing, we'll not CSE it back.
9522 To avoid link error types of regressions, return false now. */
9523 if (!val && !optimize)
9524 val = integer_zero_node;
9525
9526 return val;
9527 }
9528
9529 case BUILT_IN_CLASSIFY_TYPE:
9530 return fold_builtin_classify_type (arg0);
9531
9532 case BUILT_IN_STRLEN:
9533 return fold_builtin_strlen (loc, type, arg0);
9534
9535 CASE_FLT_FN (BUILT_IN_FABS):
9536 return fold_builtin_fabs (loc, arg0, type);
9537
9538 case BUILT_IN_ABS:
9539 case BUILT_IN_LABS:
9540 case BUILT_IN_LLABS:
9541 case BUILT_IN_IMAXABS:
9542 return fold_builtin_abs (loc, arg0, type);
9543
9544 CASE_FLT_FN (BUILT_IN_CONJ):
9545 if (validate_arg (arg0, COMPLEX_TYPE)
9546 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9547 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9548 break;
9549
9550 CASE_FLT_FN (BUILT_IN_CREAL):
9551 if (validate_arg (arg0, COMPLEX_TYPE)
9552 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9553 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9554 break;
9555
9556 CASE_FLT_FN (BUILT_IN_CIMAG):
9557 if (validate_arg (arg0, COMPLEX_TYPE)
9558 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9559 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9560 break;
9561
9562 CASE_FLT_FN (BUILT_IN_CCOS):
9563 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9564
9565 CASE_FLT_FN (BUILT_IN_CCOSH):
9566 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9567
9568 CASE_FLT_FN (BUILT_IN_CPROJ):
9569 return fold_builtin_cproj(loc, arg0, type);
9570
9571 CASE_FLT_FN (BUILT_IN_CSIN):
9572 if (validate_arg (arg0, COMPLEX_TYPE)
9573 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9574 return do_mpc_arg1 (arg0, type, mpc_sin);
9575 break;
9576
9577 CASE_FLT_FN (BUILT_IN_CSINH):
9578 if (validate_arg (arg0, COMPLEX_TYPE)
9579 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9580 return do_mpc_arg1 (arg0, type, mpc_sinh);
9581 break;
9582
9583 CASE_FLT_FN (BUILT_IN_CTAN):
9584 if (validate_arg (arg0, COMPLEX_TYPE)
9585 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9586 return do_mpc_arg1 (arg0, type, mpc_tan);
9587 break;
9588
9589 CASE_FLT_FN (BUILT_IN_CTANH):
9590 if (validate_arg (arg0, COMPLEX_TYPE)
9591 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9592 return do_mpc_arg1 (arg0, type, mpc_tanh);
9593 break;
9594
9595 CASE_FLT_FN (BUILT_IN_CLOG):
9596 if (validate_arg (arg0, COMPLEX_TYPE)
9597 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9598 return do_mpc_arg1 (arg0, type, mpc_log);
9599 break;
9600
9601 CASE_FLT_FN (BUILT_IN_CSQRT):
9602 if (validate_arg (arg0, COMPLEX_TYPE)
9603 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9604 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9605 break;
9606
9607 CASE_FLT_FN (BUILT_IN_CASIN):
9608 if (validate_arg (arg0, COMPLEX_TYPE)
9609 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9610 return do_mpc_arg1 (arg0, type, mpc_asin);
9611 break;
9612
9613 CASE_FLT_FN (BUILT_IN_CACOS):
9614 if (validate_arg (arg0, COMPLEX_TYPE)
9615 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9616 return do_mpc_arg1 (arg0, type, mpc_acos);
9617 break;
9618
9619 CASE_FLT_FN (BUILT_IN_CATAN):
9620 if (validate_arg (arg0, COMPLEX_TYPE)
9621 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9622 return do_mpc_arg1 (arg0, type, mpc_atan);
9623 break;
9624
9625 CASE_FLT_FN (BUILT_IN_CASINH):
9626 if (validate_arg (arg0, COMPLEX_TYPE)
9627 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9628 return do_mpc_arg1 (arg0, type, mpc_asinh);
9629 break;
9630
9631 CASE_FLT_FN (BUILT_IN_CACOSH):
9632 if (validate_arg (arg0, COMPLEX_TYPE)
9633 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9634 return do_mpc_arg1 (arg0, type, mpc_acosh);
9635 break;
9636
9637 CASE_FLT_FN (BUILT_IN_CATANH):
9638 if (validate_arg (arg0, COMPLEX_TYPE)
9639 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9640 return do_mpc_arg1 (arg0, type, mpc_atanh);
9641 break;
9642
9643 CASE_FLT_FN (BUILT_IN_CABS):
9644 return fold_builtin_cabs (loc, arg0, type, fndecl);
9645
9646 CASE_FLT_FN (BUILT_IN_CARG):
9647 return fold_builtin_carg (loc, arg0, type);
9648
9649 CASE_FLT_FN (BUILT_IN_SQRT):
9650 return fold_builtin_sqrt (loc, arg0, type);
9651
9652 CASE_FLT_FN (BUILT_IN_CBRT):
9653 return fold_builtin_cbrt (loc, arg0, type);
9654
9655 CASE_FLT_FN (BUILT_IN_ASIN):
9656 if (validate_arg (arg0, REAL_TYPE))
9657 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9658 &dconstm1, &dconst1, true);
9659 break;
9660
9661 CASE_FLT_FN (BUILT_IN_ACOS):
9662 if (validate_arg (arg0, REAL_TYPE))
9663 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9664 &dconstm1, &dconst1, true);
9665 break;
9666
9667 CASE_FLT_FN (BUILT_IN_ATAN):
9668 if (validate_arg (arg0, REAL_TYPE))
9669 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9670 break;
9671
9672 CASE_FLT_FN (BUILT_IN_ASINH):
9673 if (validate_arg (arg0, REAL_TYPE))
9674 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9675 break;
9676
9677 CASE_FLT_FN (BUILT_IN_ACOSH):
9678 if (validate_arg (arg0, REAL_TYPE))
9679 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9680 &dconst1, NULL, true);
9681 break;
9682
9683 CASE_FLT_FN (BUILT_IN_ATANH):
9684 if (validate_arg (arg0, REAL_TYPE))
9685 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9686 &dconstm1, &dconst1, false);
9687 break;
9688
9689 CASE_FLT_FN (BUILT_IN_SIN):
9690 if (validate_arg (arg0, REAL_TYPE))
9691 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9692 break;
9693
9694 CASE_FLT_FN (BUILT_IN_COS):
9695 return fold_builtin_cos (loc, arg0, type, fndecl);
9696
9697 CASE_FLT_FN (BUILT_IN_TAN):
9698 return fold_builtin_tan (arg0, type);
9699
9700 CASE_FLT_FN (BUILT_IN_CEXP):
9701 return fold_builtin_cexp (loc, arg0, type);
9702
9703 CASE_FLT_FN (BUILT_IN_CEXPI):
9704 if (validate_arg (arg0, REAL_TYPE))
9705 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9706 break;
9707
9708 CASE_FLT_FN (BUILT_IN_SINH):
9709 if (validate_arg (arg0, REAL_TYPE))
9710 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9711 break;
9712
9713 CASE_FLT_FN (BUILT_IN_COSH):
9714 return fold_builtin_cosh (loc, arg0, type, fndecl);
9715
9716 CASE_FLT_FN (BUILT_IN_TANH):
9717 if (validate_arg (arg0, REAL_TYPE))
9718 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9719 break;
9720
9721 CASE_FLT_FN (BUILT_IN_ERF):
9722 if (validate_arg (arg0, REAL_TYPE))
9723 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9724 break;
9725
9726 CASE_FLT_FN (BUILT_IN_ERFC):
9727 if (validate_arg (arg0, REAL_TYPE))
9728 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9729 break;
9730
9731 CASE_FLT_FN (BUILT_IN_TGAMMA):
9732 if (validate_arg (arg0, REAL_TYPE))
9733 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9734 break;
9735
9736 CASE_FLT_FN (BUILT_IN_EXP):
9737 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9738
9739 CASE_FLT_FN (BUILT_IN_EXP2):
9740 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9741
9742 CASE_FLT_FN (BUILT_IN_EXP10):
9743 CASE_FLT_FN (BUILT_IN_POW10):
9744 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9745
9746 CASE_FLT_FN (BUILT_IN_EXPM1):
9747 if (validate_arg (arg0, REAL_TYPE))
9748 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9749 break;
9750
9751 CASE_FLT_FN (BUILT_IN_LOG):
9752 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9753
9754 CASE_FLT_FN (BUILT_IN_LOG2):
9755 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9756
9757 CASE_FLT_FN (BUILT_IN_LOG10):
9758 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9759
9760 CASE_FLT_FN (BUILT_IN_LOG1P):
9761 if (validate_arg (arg0, REAL_TYPE))
9762 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9763 &dconstm1, NULL, false);
9764 break;
9765
9766 CASE_FLT_FN (BUILT_IN_J0):
9767 if (validate_arg (arg0, REAL_TYPE))
9768 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9769 NULL, NULL, 0);
9770 break;
9771
9772 CASE_FLT_FN (BUILT_IN_J1):
9773 if (validate_arg (arg0, REAL_TYPE))
9774 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9775 NULL, NULL, 0);
9776 break;
9777
9778 CASE_FLT_FN (BUILT_IN_Y0):
9779 if (validate_arg (arg0, REAL_TYPE))
9780 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9781 &dconst0, NULL, false);
9782 break;
9783
9784 CASE_FLT_FN (BUILT_IN_Y1):
9785 if (validate_arg (arg0, REAL_TYPE))
9786 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9787 &dconst0, NULL, false);
9788 break;
9789
9790 CASE_FLT_FN (BUILT_IN_NAN):
9791 case BUILT_IN_NAND32:
9792 case BUILT_IN_NAND64:
9793 case BUILT_IN_NAND128:
9794 return fold_builtin_nan (arg0, type, true);
9795
9796 CASE_FLT_FN (BUILT_IN_NANS):
9797 return fold_builtin_nan (arg0, type, false);
9798
9799 CASE_FLT_FN (BUILT_IN_FLOOR):
9800 return fold_builtin_floor (loc, fndecl, arg0);
9801
9802 CASE_FLT_FN (BUILT_IN_CEIL):
9803 return fold_builtin_ceil (loc, fndecl, arg0);
9804
9805 CASE_FLT_FN (BUILT_IN_TRUNC):
9806 return fold_builtin_trunc (loc, fndecl, arg0);
9807
9808 CASE_FLT_FN (BUILT_IN_ROUND):
9809 return fold_builtin_round (loc, fndecl, arg0);
9810
9811 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9812 CASE_FLT_FN (BUILT_IN_RINT):
9813 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9814
9815 CASE_FLT_FN (BUILT_IN_ICEIL):
9816 CASE_FLT_FN (BUILT_IN_LCEIL):
9817 CASE_FLT_FN (BUILT_IN_LLCEIL):
9818 CASE_FLT_FN (BUILT_IN_LFLOOR):
9819 CASE_FLT_FN (BUILT_IN_IFLOOR):
9820 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9821 CASE_FLT_FN (BUILT_IN_IROUND):
9822 CASE_FLT_FN (BUILT_IN_LROUND):
9823 CASE_FLT_FN (BUILT_IN_LLROUND):
9824 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9825
9826 CASE_FLT_FN (BUILT_IN_IRINT):
9827 CASE_FLT_FN (BUILT_IN_LRINT):
9828 CASE_FLT_FN (BUILT_IN_LLRINT):
9829 return fold_fixed_mathfn (loc, fndecl, arg0);
9830
9831 case BUILT_IN_BSWAP32:
9832 case BUILT_IN_BSWAP64:
9833 return fold_builtin_bswap (fndecl, arg0);
9834
9835 CASE_INT_FN (BUILT_IN_FFS):
9836 CASE_INT_FN (BUILT_IN_CLZ):
9837 CASE_INT_FN (BUILT_IN_CTZ):
9838 CASE_INT_FN (BUILT_IN_CLRSB):
9839 CASE_INT_FN (BUILT_IN_POPCOUNT):
9840 CASE_INT_FN (BUILT_IN_PARITY):
9841 return fold_builtin_bitop (fndecl, arg0);
9842
9843 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9844 return fold_builtin_signbit (loc, arg0, type);
9845
9846 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9847 return fold_builtin_significand (loc, arg0, type);
9848
9849 CASE_FLT_FN (BUILT_IN_ILOGB):
9850 CASE_FLT_FN (BUILT_IN_LOGB):
9851 return fold_builtin_logb (loc, arg0, type);
9852
9853 case BUILT_IN_ISASCII:
9854 return fold_builtin_isascii (loc, arg0);
9855
9856 case BUILT_IN_TOASCII:
9857 return fold_builtin_toascii (loc, arg0);
9858
9859 case BUILT_IN_ISDIGIT:
9860 return fold_builtin_isdigit (loc, arg0);
9861
9862 CASE_FLT_FN (BUILT_IN_FINITE):
9863 case BUILT_IN_FINITED32:
9864 case BUILT_IN_FINITED64:
9865 case BUILT_IN_FINITED128:
9866 case BUILT_IN_ISFINITE:
9867 {
9868 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9869 if (ret)
9870 return ret;
9871 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9872 }
9873
9874 CASE_FLT_FN (BUILT_IN_ISINF):
9875 case BUILT_IN_ISINFD32:
9876 case BUILT_IN_ISINFD64:
9877 case BUILT_IN_ISINFD128:
9878 {
9879 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9880 if (ret)
9881 return ret;
9882 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9883 }
9884
9885 case BUILT_IN_ISNORMAL:
9886 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9887
9888 case BUILT_IN_ISINF_SIGN:
9889 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9890
9891 CASE_FLT_FN (BUILT_IN_ISNAN):
9892 case BUILT_IN_ISNAND32:
9893 case BUILT_IN_ISNAND64:
9894 case BUILT_IN_ISNAND128:
9895 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9896
9897 case BUILT_IN_PRINTF:
9898 case BUILT_IN_PRINTF_UNLOCKED:
9899 case BUILT_IN_VPRINTF:
9900 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
9901
9902 case BUILT_IN_FREE:
9903 if (integer_zerop (arg0))
9904 return build_empty_stmt (loc);
9905 break;
9906
9907 default:
9908 break;
9909 }
9910
9911 return NULL_TREE;
9912
9913 }
9914
9915 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9916 IGNORE is true if the result of the function call is ignored. This
9917 function returns NULL_TREE if no simplification was possible. */
9918
9919 static tree
9920 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
9921 {
9922 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9923 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9924
9925 switch (fcode)
9926 {
9927 CASE_FLT_FN (BUILT_IN_JN):
9928 if (validate_arg (arg0, INTEGER_TYPE)
9929 && validate_arg (arg1, REAL_TYPE))
9930 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
9931 break;
9932
9933 CASE_FLT_FN (BUILT_IN_YN):
9934 if (validate_arg (arg0, INTEGER_TYPE)
9935 && validate_arg (arg1, REAL_TYPE))
9936 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
9937 &dconst0, false);
9938 break;
9939
9940 CASE_FLT_FN (BUILT_IN_DREM):
9941 CASE_FLT_FN (BUILT_IN_REMAINDER):
9942 if (validate_arg (arg0, REAL_TYPE)
9943 && validate_arg(arg1, REAL_TYPE))
9944 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
9945 break;
9946
9947 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9948 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9949 if (validate_arg (arg0, REAL_TYPE)
9950 && validate_arg(arg1, POINTER_TYPE))
9951 return do_mpfr_lgamma_r (arg0, arg1, type);
9952 break;
9953
9954 CASE_FLT_FN (BUILT_IN_ATAN2):
9955 if (validate_arg (arg0, REAL_TYPE)
9956 && validate_arg(arg1, REAL_TYPE))
9957 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9958 break;
9959
9960 CASE_FLT_FN (BUILT_IN_FDIM):
9961 if (validate_arg (arg0, REAL_TYPE)
9962 && validate_arg(arg1, REAL_TYPE))
9963 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9964 break;
9965
9966 CASE_FLT_FN (BUILT_IN_HYPOT):
9967 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
9968
9969 CASE_FLT_FN (BUILT_IN_CPOW):
9970 if (validate_arg (arg0, COMPLEX_TYPE)
9971 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9972 && validate_arg (arg1, COMPLEX_TYPE)
9973 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
9974 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
9975 break;
9976
9977 CASE_FLT_FN (BUILT_IN_LDEXP):
9978 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
9979 CASE_FLT_FN (BUILT_IN_SCALBN):
9980 CASE_FLT_FN (BUILT_IN_SCALBLN):
9981 return fold_builtin_load_exponent (loc, arg0, arg1,
9982 type, /*ldexp=*/false);
9983
9984 CASE_FLT_FN (BUILT_IN_FREXP):
9985 return fold_builtin_frexp (loc, arg0, arg1, type);
9986
9987 CASE_FLT_FN (BUILT_IN_MODF):
9988 return fold_builtin_modf (loc, arg0, arg1, type);
9989
9990 case BUILT_IN_BZERO:
9991 return fold_builtin_bzero (loc, arg0, arg1, ignore);
9992
9993 case BUILT_IN_FPUTS:
9994 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
9995
9996 case BUILT_IN_FPUTS_UNLOCKED:
9997 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
9998
9999 case BUILT_IN_STRSTR:
10000 return fold_builtin_strstr (loc, arg0, arg1, type);
10001
10002 case BUILT_IN_STRCAT:
10003 return fold_builtin_strcat (loc, arg0, arg1);
10004
10005 case BUILT_IN_STRSPN:
10006 return fold_builtin_strspn (loc, arg0, arg1);
10007
10008 case BUILT_IN_STRCSPN:
10009 return fold_builtin_strcspn (loc, arg0, arg1);
10010
10011 case BUILT_IN_STRCHR:
10012 case BUILT_IN_INDEX:
10013 return fold_builtin_strchr (loc, arg0, arg1, type);
10014
10015 case BUILT_IN_STRRCHR:
10016 case BUILT_IN_RINDEX:
10017 return fold_builtin_strrchr (loc, arg0, arg1, type);
10018
10019 case BUILT_IN_STRCPY:
10020 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10021
10022 case BUILT_IN_STPCPY:
10023 if (ignore)
10024 {
10025 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10026 if (!fn)
10027 break;
10028
10029 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10030 }
10031 else
10032 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10033 break;
10034
10035 case BUILT_IN_STRCMP:
10036 return fold_builtin_strcmp (loc, arg0, arg1);
10037
10038 case BUILT_IN_STRPBRK:
10039 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10040
10041 case BUILT_IN_EXPECT:
10042 return fold_builtin_expect (loc, arg0, arg1);
10043
10044 CASE_FLT_FN (BUILT_IN_POW):
10045 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10046
10047 CASE_FLT_FN (BUILT_IN_POWI):
10048 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10049
10050 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10051 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10052
10053 CASE_FLT_FN (BUILT_IN_FMIN):
10054 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10055
10056 CASE_FLT_FN (BUILT_IN_FMAX):
10057 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10058
10059 case BUILT_IN_ISGREATER:
10060 return fold_builtin_unordered_cmp (loc, fndecl,
10061 arg0, arg1, UNLE_EXPR, LE_EXPR);
10062 case BUILT_IN_ISGREATEREQUAL:
10063 return fold_builtin_unordered_cmp (loc, fndecl,
10064 arg0, arg1, UNLT_EXPR, LT_EXPR);
10065 case BUILT_IN_ISLESS:
10066 return fold_builtin_unordered_cmp (loc, fndecl,
10067 arg0, arg1, UNGE_EXPR, GE_EXPR);
10068 case BUILT_IN_ISLESSEQUAL:
10069 return fold_builtin_unordered_cmp (loc, fndecl,
10070 arg0, arg1, UNGT_EXPR, GT_EXPR);
10071 case BUILT_IN_ISLESSGREATER:
10072 return fold_builtin_unordered_cmp (loc, fndecl,
10073 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10074 case BUILT_IN_ISUNORDERED:
10075 return fold_builtin_unordered_cmp (loc, fndecl,
10076 arg0, arg1, UNORDERED_EXPR,
10077 NOP_EXPR);
10078
10079 /* We do the folding for va_start in the expander. */
10080 case BUILT_IN_VA_START:
10081 break;
10082
10083 case BUILT_IN_SPRINTF:
10084 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10085
10086 case BUILT_IN_OBJECT_SIZE:
10087 return fold_builtin_object_size (arg0, arg1);
10088
10089 case BUILT_IN_PRINTF:
10090 case BUILT_IN_PRINTF_UNLOCKED:
10091 case BUILT_IN_VPRINTF:
10092 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10093
10094 case BUILT_IN_PRINTF_CHK:
10095 case BUILT_IN_VPRINTF_CHK:
10096 if (!validate_arg (arg0, INTEGER_TYPE)
10097 || TREE_SIDE_EFFECTS (arg0))
10098 return NULL_TREE;
10099 else
10100 return fold_builtin_printf (loc, fndecl,
10101 arg1, NULL_TREE, ignore, fcode);
10102 break;
10103
10104 case BUILT_IN_FPRINTF:
10105 case BUILT_IN_FPRINTF_UNLOCKED:
10106 case BUILT_IN_VFPRINTF:
10107 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10108 ignore, fcode);
10109
10110 default:
10111 break;
10112 }
10113 return NULL_TREE;
10114 }
10115
10116 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10117 and ARG2. IGNORE is true if the result of the function call is ignored.
10118 This function returns NULL_TREE if no simplification was possible. */
10119
10120 static tree
10121 fold_builtin_3 (location_t loc, tree fndecl,
10122 tree arg0, tree arg1, tree arg2, bool ignore)
10123 {
10124 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10125 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10126 switch (fcode)
10127 {
10128
10129 CASE_FLT_FN (BUILT_IN_SINCOS):
10130 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10131
10132 CASE_FLT_FN (BUILT_IN_FMA):
10133 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10134 break;
10135
10136 CASE_FLT_FN (BUILT_IN_REMQUO):
10137 if (validate_arg (arg0, REAL_TYPE)
10138 && validate_arg(arg1, REAL_TYPE)
10139 && validate_arg(arg2, POINTER_TYPE))
10140 return do_mpfr_remquo (arg0, arg1, arg2);
10141 break;
10142
10143 case BUILT_IN_MEMSET:
10144 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10145
10146 case BUILT_IN_BCOPY:
10147 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10148 void_type_node, true, /*endp=*/3);
10149
10150 case BUILT_IN_MEMCPY:
10151 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10152 type, ignore, /*endp=*/0);
10153
10154 case BUILT_IN_MEMPCPY:
10155 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10156 type, ignore, /*endp=*/1);
10157
10158 case BUILT_IN_MEMMOVE:
10159 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10160 type, ignore, /*endp=*/3);
10161
10162 case BUILT_IN_STRNCAT:
10163 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10164
10165 case BUILT_IN_STRNCPY:
10166 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10167
10168 case BUILT_IN_STRNCMP:
10169 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10170
10171 case BUILT_IN_MEMCHR:
10172 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10173
10174 case BUILT_IN_BCMP:
10175 case BUILT_IN_MEMCMP:
10176 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10177
10178 case BUILT_IN_SPRINTF:
10179 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10180
10181 case BUILT_IN_SNPRINTF:
10182 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10183
10184 case BUILT_IN_STRCPY_CHK:
10185 case BUILT_IN_STPCPY_CHK:
10186 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10187 ignore, fcode);
10188
10189 case BUILT_IN_STRCAT_CHK:
10190 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10191
10192 case BUILT_IN_PRINTF_CHK:
10193 case BUILT_IN_VPRINTF_CHK:
10194 if (!validate_arg (arg0, INTEGER_TYPE)
10195 || TREE_SIDE_EFFECTS (arg0))
10196 return NULL_TREE;
10197 else
10198 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10199 break;
10200
10201 case BUILT_IN_FPRINTF:
10202 case BUILT_IN_FPRINTF_UNLOCKED:
10203 case BUILT_IN_VFPRINTF:
10204 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10205 ignore, fcode);
10206
10207 case BUILT_IN_FPRINTF_CHK:
10208 case BUILT_IN_VFPRINTF_CHK:
10209 if (!validate_arg (arg1, INTEGER_TYPE)
10210 || TREE_SIDE_EFFECTS (arg1))
10211 return NULL_TREE;
10212 else
10213 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10214 ignore, fcode);
10215
10216 default:
10217 break;
10218 }
10219 return NULL_TREE;
10220 }
10221
10222 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10223 ARG2, and ARG3. IGNORE is true if the result of the function call is
10224 ignored. This function returns NULL_TREE if no simplification was
10225 possible. */
10226
10227 static tree
10228 fold_builtin_4 (location_t loc, tree fndecl,
10229 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10230 {
10231 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10232
10233 switch (fcode)
10234 {
10235 case BUILT_IN_MEMCPY_CHK:
10236 case BUILT_IN_MEMPCPY_CHK:
10237 case BUILT_IN_MEMMOVE_CHK:
10238 case BUILT_IN_MEMSET_CHK:
10239 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10240 NULL_TREE, ignore,
10241 DECL_FUNCTION_CODE (fndecl));
10242
10243 case BUILT_IN_STRNCPY_CHK:
10244 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10245
10246 case BUILT_IN_STRNCAT_CHK:
10247 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10248
10249 case BUILT_IN_SNPRINTF:
10250 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
10251
10252 case BUILT_IN_FPRINTF_CHK:
10253 case BUILT_IN_VFPRINTF_CHK:
10254 if (!validate_arg (arg1, INTEGER_TYPE)
10255 || TREE_SIDE_EFFECTS (arg1))
10256 return NULL_TREE;
10257 else
10258 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10259 ignore, fcode);
10260 break;
10261
10262 default:
10263 break;
10264 }
10265 return NULL_TREE;
10266 }
10267
10268 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10269 arguments, where NARGS <= 4. IGNORE is true if the result of the
10270 function call is ignored. This function returns NULL_TREE if no
10271 simplification was possible. Note that this only folds builtins with
10272 fixed argument patterns. Foldings that do varargs-to-varargs
10273 transformations, or that match calls with more than 4 arguments,
10274 need to be handled with fold_builtin_varargs instead. */
10275
10276 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10277
10278 static tree
10279 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10280 {
10281 tree ret = NULL_TREE;
10282
10283 switch (nargs)
10284 {
10285 case 0:
10286 ret = fold_builtin_0 (loc, fndecl, ignore);
10287 break;
10288 case 1:
10289 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10290 break;
10291 case 2:
10292 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10293 break;
10294 case 3:
10295 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10296 break;
10297 case 4:
10298 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10299 ignore);
10300 break;
10301 default:
10302 break;
10303 }
10304 if (ret)
10305 {
10306 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10307 SET_EXPR_LOCATION (ret, loc);
10308 TREE_NO_WARNING (ret) = 1;
10309 return ret;
10310 }
10311 return NULL_TREE;
10312 }
10313
10314 /* Builtins with folding operations that operate on "..." arguments
10315 need special handling; we need to store the arguments in a convenient
10316 data structure before attempting any folding. Fortunately there are
10317 only a few builtins that fall into this category. FNDECL is the
10318 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10319 result of the function call is ignored. */
10320
10321 static tree
10322 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10323 bool ignore ATTRIBUTE_UNUSED)
10324 {
10325 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10326 tree ret = NULL_TREE;
10327
10328 switch (fcode)
10329 {
10330 case BUILT_IN_SPRINTF_CHK:
10331 case BUILT_IN_VSPRINTF_CHK:
10332 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10333 break;
10334
10335 case BUILT_IN_SNPRINTF_CHK:
10336 case BUILT_IN_VSNPRINTF_CHK:
10337 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10338 break;
10339
10340 case BUILT_IN_FPCLASSIFY:
10341 ret = fold_builtin_fpclassify (loc, exp);
10342 break;
10343
10344 default:
10345 break;
10346 }
10347 if (ret)
10348 {
10349 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10350 SET_EXPR_LOCATION (ret, loc);
10351 TREE_NO_WARNING (ret) = 1;
10352 return ret;
10353 }
10354 return NULL_TREE;
10355 }
10356
10357 /* Return true if FNDECL shouldn't be folded right now.
10358 If a built-in function has an inline attribute always_inline
10359 wrapper, defer folding it after always_inline functions have
10360 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10361 might not be performed. */
10362
10363 static bool
10364 avoid_folding_inline_builtin (tree fndecl)
10365 {
10366 return (DECL_DECLARED_INLINE_P (fndecl)
10367 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10368 && cfun
10369 && !cfun->always_inline_functions_inlined
10370 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10371 }
10372
10373 /* A wrapper function for builtin folding that prevents warnings for
10374 "statement without effect" and the like, caused by removing the
10375 call node earlier than the warning is generated. */
10376
10377 tree
10378 fold_call_expr (location_t loc, tree exp, bool ignore)
10379 {
10380 tree ret = NULL_TREE;
10381 tree fndecl = get_callee_fndecl (exp);
10382 if (fndecl
10383 && TREE_CODE (fndecl) == FUNCTION_DECL
10384 && DECL_BUILT_IN (fndecl)
10385 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10386 yet. Defer folding until we see all the arguments
10387 (after inlining). */
10388 && !CALL_EXPR_VA_ARG_PACK (exp))
10389 {
10390 int nargs = call_expr_nargs (exp);
10391
10392 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10393 instead last argument is __builtin_va_arg_pack (). Defer folding
10394 even in that case, until arguments are finalized. */
10395 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10396 {
10397 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10398 if (fndecl2
10399 && TREE_CODE (fndecl2) == FUNCTION_DECL
10400 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10401 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10402 return NULL_TREE;
10403 }
10404
10405 if (avoid_folding_inline_builtin (fndecl))
10406 return NULL_TREE;
10407
10408 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10409 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10410 CALL_EXPR_ARGP (exp), ignore);
10411 else
10412 {
10413 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10414 {
10415 tree *args = CALL_EXPR_ARGP (exp);
10416 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10417 }
10418 if (!ret)
10419 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10420 if (ret)
10421 return ret;
10422 }
10423 }
10424 return NULL_TREE;
10425 }
10426
10427 /* Conveniently construct a function call expression. FNDECL names the
10428 function to be called and N arguments are passed in the array
10429 ARGARRAY. */
10430
10431 tree
10432 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10433 {
10434 tree fntype = TREE_TYPE (fndecl);
10435 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10436
10437 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10438 }
10439
10440 /* Conveniently construct a function call expression. FNDECL names the
10441 function to be called and the arguments are passed in the vector
10442 VEC. */
10443
10444 tree
10445 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
10446 {
10447 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
10448 VEC_address (tree, vec));
10449 }
10450
10451
10452 /* Conveniently construct a function call expression. FNDECL names the
10453 function to be called, N is the number of arguments, and the "..."
10454 parameters are the argument expressions. */
10455
10456 tree
10457 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10458 {
10459 va_list ap;
10460 tree *argarray = XALLOCAVEC (tree, n);
10461 int i;
10462
10463 va_start (ap, n);
10464 for (i = 0; i < n; i++)
10465 argarray[i] = va_arg (ap, tree);
10466 va_end (ap);
10467 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10468 }
10469
10470 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10471 varargs macros aren't supported by all bootstrap compilers. */
10472
10473 tree
10474 build_call_expr (tree fndecl, int n, ...)
10475 {
10476 va_list ap;
10477 tree *argarray = XALLOCAVEC (tree, n);
10478 int i;
10479
10480 va_start (ap, n);
10481 for (i = 0; i < n; i++)
10482 argarray[i] = va_arg (ap, tree);
10483 va_end (ap);
10484 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10485 }
10486
10487 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10488 N arguments are passed in the array ARGARRAY. */
10489
10490 tree
10491 fold_builtin_call_array (location_t loc, tree type,
10492 tree fn,
10493 int n,
10494 tree *argarray)
10495 {
10496 tree ret = NULL_TREE;
10497 tree exp;
10498
10499 if (TREE_CODE (fn) == ADDR_EXPR)
10500 {
10501 tree fndecl = TREE_OPERAND (fn, 0);
10502 if (TREE_CODE (fndecl) == FUNCTION_DECL
10503 && DECL_BUILT_IN (fndecl))
10504 {
10505 /* If last argument is __builtin_va_arg_pack (), arguments to this
10506 function are not finalized yet. Defer folding until they are. */
10507 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10508 {
10509 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10510 if (fndecl2
10511 && TREE_CODE (fndecl2) == FUNCTION_DECL
10512 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10513 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10514 return build_call_array_loc (loc, type, fn, n, argarray);
10515 }
10516 if (avoid_folding_inline_builtin (fndecl))
10517 return build_call_array_loc (loc, type, fn, n, argarray);
10518 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10519 {
10520 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10521 if (ret)
10522 return ret;
10523
10524 return build_call_array_loc (loc, type, fn, n, argarray);
10525 }
10526 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10527 {
10528 /* First try the transformations that don't require consing up
10529 an exp. */
10530 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10531 if (ret)
10532 return ret;
10533 }
10534
10535 /* If we got this far, we need to build an exp. */
10536 exp = build_call_array_loc (loc, type, fn, n, argarray);
10537 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10538 return ret ? ret : exp;
10539 }
10540 }
10541
10542 return build_call_array_loc (loc, type, fn, n, argarray);
10543 }
10544
10545 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10546 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10547 of arguments in ARGS to be omitted. OLDNARGS is the number of
10548 elements in ARGS. */
10549
10550 static tree
10551 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10552 int skip, tree fndecl, int n, va_list newargs)
10553 {
10554 int nargs = oldnargs - skip + n;
10555 tree *buffer;
10556
10557 if (n > 0)
10558 {
10559 int i, j;
10560
10561 buffer = XALLOCAVEC (tree, nargs);
10562 for (i = 0; i < n; i++)
10563 buffer[i] = va_arg (newargs, tree);
10564 for (j = skip; j < oldnargs; j++, i++)
10565 buffer[i] = args[j];
10566 }
10567 else
10568 buffer = args + skip;
10569
10570 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10571 }
10572
10573 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10574 list ARGS along with N new arguments specified as the "..."
10575 parameters. SKIP is the number of arguments in ARGS to be omitted.
10576 OLDNARGS is the number of elements in ARGS. */
10577
10578 static tree
10579 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
10580 int skip, tree fndecl, int n, ...)
10581 {
10582 va_list ap;
10583 tree t;
10584
10585 va_start (ap, n);
10586 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
10587 va_end (ap);
10588
10589 return t;
10590 }
10591
10592 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10593 along with N new arguments specified as the "..." parameters. SKIP
10594 is the number of arguments in EXP to be omitted. This function is used
10595 to do varargs-to-varargs transformations. */
10596
10597 static tree
10598 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10599 {
10600 va_list ap;
10601 tree t;
10602
10603 va_start (ap, n);
10604 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10605 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10606 va_end (ap);
10607
10608 return t;
10609 }
10610
10611 /* Validate a single argument ARG against a tree code CODE representing
10612 a type. */
10613
10614 static bool
10615 validate_arg (const_tree arg, enum tree_code code)
10616 {
10617 if (!arg)
10618 return false;
10619 else if (code == POINTER_TYPE)
10620 return POINTER_TYPE_P (TREE_TYPE (arg));
10621 else if (code == INTEGER_TYPE)
10622 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10623 return code == TREE_CODE (TREE_TYPE (arg));
10624 }
10625
10626 /* This function validates the types of a function call argument list
10627 against a specified list of tree_codes. If the last specifier is a 0,
10628 that represents an ellipses, otherwise the last specifier must be a
10629 VOID_TYPE.
10630
10631 This is the GIMPLE version of validate_arglist. Eventually we want to
10632 completely convert builtins.c to work from GIMPLEs and the tree based
10633 validate_arglist will then be removed. */
10634
10635 bool
10636 validate_gimple_arglist (const_gimple call, ...)
10637 {
10638 enum tree_code code;
10639 bool res = 0;
10640 va_list ap;
10641 const_tree arg;
10642 size_t i;
10643
10644 va_start (ap, call);
10645 i = 0;
10646
10647 do
10648 {
10649 code = (enum tree_code) va_arg (ap, int);
10650 switch (code)
10651 {
10652 case 0:
10653 /* This signifies an ellipses, any further arguments are all ok. */
10654 res = true;
10655 goto end;
10656 case VOID_TYPE:
10657 /* This signifies an endlink, if no arguments remain, return
10658 true, otherwise return false. */
10659 res = (i == gimple_call_num_args (call));
10660 goto end;
10661 default:
10662 /* If no parameters remain or the parameter's code does not
10663 match the specified code, return false. Otherwise continue
10664 checking any remaining arguments. */
10665 arg = gimple_call_arg (call, i++);
10666 if (!validate_arg (arg, code))
10667 goto end;
10668 break;
10669 }
10670 }
10671 while (1);
10672
10673 /* We need gotos here since we can only have one VA_CLOSE in a
10674 function. */
10675 end: ;
10676 va_end (ap);
10677
10678 return res;
10679 }
10680
10681 /* This function validates the types of a function call argument list
10682 against a specified list of tree_codes. If the last specifier is a 0,
10683 that represents an ellipses, otherwise the last specifier must be a
10684 VOID_TYPE. */
10685
10686 bool
10687 validate_arglist (const_tree callexpr, ...)
10688 {
10689 enum tree_code code;
10690 bool res = 0;
10691 va_list ap;
10692 const_call_expr_arg_iterator iter;
10693 const_tree arg;
10694
10695 va_start (ap, callexpr);
10696 init_const_call_expr_arg_iterator (callexpr, &iter);
10697
10698 do
10699 {
10700 code = (enum tree_code) va_arg (ap, int);
10701 switch (code)
10702 {
10703 case 0:
10704 /* This signifies an ellipses, any further arguments are all ok. */
10705 res = true;
10706 goto end;
10707 case VOID_TYPE:
10708 /* This signifies an endlink, if no arguments remain, return
10709 true, otherwise return false. */
10710 res = !more_const_call_expr_args_p (&iter);
10711 goto end;
10712 default:
10713 /* If no parameters remain or the parameter's code does not
10714 match the specified code, return false. Otherwise continue
10715 checking any remaining arguments. */
10716 arg = next_const_call_expr_arg (&iter);
10717 if (!validate_arg (arg, code))
10718 goto end;
10719 break;
10720 }
10721 }
10722 while (1);
10723
10724 /* We need gotos here since we can only have one VA_CLOSE in a
10725 function. */
10726 end: ;
10727 va_end (ap);
10728
10729 return res;
10730 }
10731
10732 /* Default target-specific builtin expander that does nothing. */
10733
10734 rtx
10735 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10736 rtx target ATTRIBUTE_UNUSED,
10737 rtx subtarget ATTRIBUTE_UNUSED,
10738 enum machine_mode mode ATTRIBUTE_UNUSED,
10739 int ignore ATTRIBUTE_UNUSED)
10740 {
10741 return NULL_RTX;
10742 }
10743
10744 /* Returns true is EXP represents data that would potentially reside
10745 in a readonly section. */
10746
10747 static bool
10748 readonly_data_expr (tree exp)
10749 {
10750 STRIP_NOPS (exp);
10751
10752 if (TREE_CODE (exp) != ADDR_EXPR)
10753 return false;
10754
10755 exp = get_base_address (TREE_OPERAND (exp, 0));
10756 if (!exp)
10757 return false;
10758
10759 /* Make sure we call decl_readonly_section only for trees it
10760 can handle (since it returns true for everything it doesn't
10761 understand). */
10762 if (TREE_CODE (exp) == STRING_CST
10763 || TREE_CODE (exp) == CONSTRUCTOR
10764 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10765 return decl_readonly_section (exp, 0);
10766 else
10767 return false;
10768 }
10769
10770 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10771 to the call, and TYPE is its return type.
10772
10773 Return NULL_TREE if no simplification was possible, otherwise return the
10774 simplified form of the call as a tree.
10775
10776 The simplified form may be a constant or other expression which
10777 computes the same value, but in a more efficient manner (including
10778 calls to other builtin functions).
10779
10780 The call may contain arguments which need to be evaluated, but
10781 which are not useful to determine the result of the call. In
10782 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10783 COMPOUND_EXPR will be an argument which must be evaluated.
10784 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10785 COMPOUND_EXPR in the chain will contain the tree for the simplified
10786 form of the builtin function call. */
10787
10788 static tree
10789 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10790 {
10791 if (!validate_arg (s1, POINTER_TYPE)
10792 || !validate_arg (s2, POINTER_TYPE))
10793 return NULL_TREE;
10794 else
10795 {
10796 tree fn;
10797 const char *p1, *p2;
10798
10799 p2 = c_getstr (s2);
10800 if (p2 == NULL)
10801 return NULL_TREE;
10802
10803 p1 = c_getstr (s1);
10804 if (p1 != NULL)
10805 {
10806 const char *r = strstr (p1, p2);
10807 tree tem;
10808
10809 if (r == NULL)
10810 return build_int_cst (TREE_TYPE (s1), 0);
10811
10812 /* Return an offset into the constant string argument. */
10813 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10814 return fold_convert_loc (loc, type, tem);
10815 }
10816
10817 /* The argument is const char *, and the result is char *, so we need
10818 a type conversion here to avoid a warning. */
10819 if (p2[0] == '\0')
10820 return fold_convert_loc (loc, type, s1);
10821
10822 if (p2[1] != '\0')
10823 return NULL_TREE;
10824
10825 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10826 if (!fn)
10827 return NULL_TREE;
10828
10829 /* New argument list transforming strstr(s1, s2) to
10830 strchr(s1, s2[0]). */
10831 return build_call_expr_loc (loc, fn, 2, s1,
10832 build_int_cst (integer_type_node, p2[0]));
10833 }
10834 }
10835
10836 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10837 the call, and TYPE is its return type.
10838
10839 Return NULL_TREE if no simplification was possible, otherwise return the
10840 simplified form of the call as a tree.
10841
10842 The simplified form may be a constant or other expression which
10843 computes the same value, but in a more efficient manner (including
10844 calls to other builtin functions).
10845
10846 The call may contain arguments which need to be evaluated, but
10847 which are not useful to determine the result of the call. In
10848 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10849 COMPOUND_EXPR will be an argument which must be evaluated.
10850 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10851 COMPOUND_EXPR in the chain will contain the tree for the simplified
10852 form of the builtin function call. */
10853
10854 static tree
10855 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10856 {
10857 if (!validate_arg (s1, POINTER_TYPE)
10858 || !validate_arg (s2, INTEGER_TYPE))
10859 return NULL_TREE;
10860 else
10861 {
10862 const char *p1;
10863
10864 if (TREE_CODE (s2) != INTEGER_CST)
10865 return NULL_TREE;
10866
10867 p1 = c_getstr (s1);
10868 if (p1 != NULL)
10869 {
10870 char c;
10871 const char *r;
10872 tree tem;
10873
10874 if (target_char_cast (s2, &c))
10875 return NULL_TREE;
10876
10877 r = strchr (p1, c);
10878
10879 if (r == NULL)
10880 return build_int_cst (TREE_TYPE (s1), 0);
10881
10882 /* Return an offset into the constant string argument. */
10883 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10884 return fold_convert_loc (loc, type, tem);
10885 }
10886 return NULL_TREE;
10887 }
10888 }
10889
10890 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10891 the call, and TYPE is its return type.
10892
10893 Return NULL_TREE if no simplification was possible, otherwise return the
10894 simplified form of the call as a tree.
10895
10896 The simplified form may be a constant or other expression which
10897 computes the same value, but in a more efficient manner (including
10898 calls to other builtin functions).
10899
10900 The call may contain arguments which need to be evaluated, but
10901 which are not useful to determine the result of the call. In
10902 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10903 COMPOUND_EXPR will be an argument which must be evaluated.
10904 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10905 COMPOUND_EXPR in the chain will contain the tree for the simplified
10906 form of the builtin function call. */
10907
10908 static tree
10909 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10910 {
10911 if (!validate_arg (s1, POINTER_TYPE)
10912 || !validate_arg (s2, INTEGER_TYPE))
10913 return NULL_TREE;
10914 else
10915 {
10916 tree fn;
10917 const char *p1;
10918
10919 if (TREE_CODE (s2) != INTEGER_CST)
10920 return NULL_TREE;
10921
10922 p1 = c_getstr (s1);
10923 if (p1 != NULL)
10924 {
10925 char c;
10926 const char *r;
10927 tree tem;
10928
10929 if (target_char_cast (s2, &c))
10930 return NULL_TREE;
10931
10932 r = strrchr (p1, c);
10933
10934 if (r == NULL)
10935 return build_int_cst (TREE_TYPE (s1), 0);
10936
10937 /* Return an offset into the constant string argument. */
10938 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10939 return fold_convert_loc (loc, type, tem);
10940 }
10941
10942 if (! integer_zerop (s2))
10943 return NULL_TREE;
10944
10945 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10946 if (!fn)
10947 return NULL_TREE;
10948
10949 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10950 return build_call_expr_loc (loc, fn, 2, s1, s2);
10951 }
10952 }
10953
10954 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10955 to the call, and TYPE is its return type.
10956
10957 Return NULL_TREE if no simplification was possible, otherwise return the
10958 simplified form of the call as a tree.
10959
10960 The simplified form may be a constant or other expression which
10961 computes the same value, but in a more efficient manner (including
10962 calls to other builtin functions).
10963
10964 The call may contain arguments which need to be evaluated, but
10965 which are not useful to determine the result of the call. In
10966 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10967 COMPOUND_EXPR will be an argument which must be evaluated.
10968 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10969 COMPOUND_EXPR in the chain will contain the tree for the simplified
10970 form of the builtin function call. */
10971
10972 static tree
10973 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10974 {
10975 if (!validate_arg (s1, POINTER_TYPE)
10976 || !validate_arg (s2, POINTER_TYPE))
10977 return NULL_TREE;
10978 else
10979 {
10980 tree fn;
10981 const char *p1, *p2;
10982
10983 p2 = c_getstr (s2);
10984 if (p2 == NULL)
10985 return NULL_TREE;
10986
10987 p1 = c_getstr (s1);
10988 if (p1 != NULL)
10989 {
10990 const char *r = strpbrk (p1, p2);
10991 tree tem;
10992
10993 if (r == NULL)
10994 return build_int_cst (TREE_TYPE (s1), 0);
10995
10996 /* Return an offset into the constant string argument. */
10997 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10998 return fold_convert_loc (loc, type, tem);
10999 }
11000
11001 if (p2[0] == '\0')
11002 /* strpbrk(x, "") == NULL.
11003 Evaluate and ignore s1 in case it had side-effects. */
11004 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11005
11006 if (p2[1] != '\0')
11007 return NULL_TREE; /* Really call strpbrk. */
11008
11009 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11010 if (!fn)
11011 return NULL_TREE;
11012
11013 /* New argument list transforming strpbrk(s1, s2) to
11014 strchr(s1, s2[0]). */
11015 return build_call_expr_loc (loc, fn, 2, s1,
11016 build_int_cst (integer_type_node, p2[0]));
11017 }
11018 }
11019
11020 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11021 to the call.
11022
11023 Return NULL_TREE if no simplification was possible, otherwise return the
11024 simplified form of the call as a tree.
11025
11026 The simplified form may be a constant or other expression which
11027 computes the same value, but in a more efficient manner (including
11028 calls to other builtin functions).
11029
11030 The call may contain arguments which need to be evaluated, but
11031 which are not useful to determine the result of the call. In
11032 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11033 COMPOUND_EXPR will be an argument which must be evaluated.
11034 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11035 COMPOUND_EXPR in the chain will contain the tree for the simplified
11036 form of the builtin function call. */
11037
11038 static tree
11039 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11040 {
11041 if (!validate_arg (dst, POINTER_TYPE)
11042 || !validate_arg (src, POINTER_TYPE))
11043 return NULL_TREE;
11044 else
11045 {
11046 const char *p = c_getstr (src);
11047
11048 /* If the string length is zero, return the dst parameter. */
11049 if (p && *p == '\0')
11050 return dst;
11051
11052 if (optimize_insn_for_speed_p ())
11053 {
11054 /* See if we can store by pieces into (dst + strlen(dst)). */
11055 tree newdst, call;
11056 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11057 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11058
11059 if (!strlen_fn || !strcpy_fn)
11060 return NULL_TREE;
11061
11062 /* If we don't have a movstr we don't want to emit an strcpy
11063 call. We have to do that if the length of the source string
11064 isn't computable (in that case we can use memcpy probably
11065 later expanding to a sequence of mov instructions). If we
11066 have movstr instructions we can emit strcpy calls. */
11067 if (!HAVE_movstr)
11068 {
11069 tree len = c_strlen (src, 1);
11070 if (! len || TREE_SIDE_EFFECTS (len))
11071 return NULL_TREE;
11072 }
11073
11074 /* Stabilize the argument list. */
11075 dst = builtin_save_expr (dst);
11076
11077 /* Create strlen (dst). */
11078 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11079 /* Create (dst p+ strlen (dst)). */
11080
11081 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11082 newdst = builtin_save_expr (newdst);
11083
11084 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11085 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11086 }
11087 return NULL_TREE;
11088 }
11089 }
11090
11091 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11092 arguments to the call.
11093
11094 Return NULL_TREE if no simplification was possible, otherwise return the
11095 simplified form of the call as a tree.
11096
11097 The simplified form may be a constant or other expression which
11098 computes the same value, but in a more efficient manner (including
11099 calls to other builtin functions).
11100
11101 The call may contain arguments which need to be evaluated, but
11102 which are not useful to determine the result of the call. In
11103 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11104 COMPOUND_EXPR will be an argument which must be evaluated.
11105 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11106 COMPOUND_EXPR in the chain will contain the tree for the simplified
11107 form of the builtin function call. */
11108
11109 static tree
11110 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11111 {
11112 if (!validate_arg (dst, POINTER_TYPE)
11113 || !validate_arg (src, POINTER_TYPE)
11114 || !validate_arg (len, INTEGER_TYPE))
11115 return NULL_TREE;
11116 else
11117 {
11118 const char *p = c_getstr (src);
11119
11120 /* If the requested length is zero, or the src parameter string
11121 length is zero, return the dst parameter. */
11122 if (integer_zerop (len) || (p && *p == '\0'))
11123 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11124
11125 /* If the requested len is greater than or equal to the string
11126 length, call strcat. */
11127 if (TREE_CODE (len) == INTEGER_CST && p
11128 && compare_tree_int (len, strlen (p)) >= 0)
11129 {
11130 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11131
11132 /* If the replacement _DECL isn't initialized, don't do the
11133 transformation. */
11134 if (!fn)
11135 return NULL_TREE;
11136
11137 return build_call_expr_loc (loc, fn, 2, dst, src);
11138 }
11139 return NULL_TREE;
11140 }
11141 }
11142
11143 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11144 to the call.
11145
11146 Return NULL_TREE if no simplification was possible, otherwise return the
11147 simplified form of the call as a tree.
11148
11149 The simplified form may be a constant or other expression which
11150 computes the same value, but in a more efficient manner (including
11151 calls to other builtin functions).
11152
11153 The call may contain arguments which need to be evaluated, but
11154 which are not useful to determine the result of the call. In
11155 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11156 COMPOUND_EXPR will be an argument which must be evaluated.
11157 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11158 COMPOUND_EXPR in the chain will contain the tree for the simplified
11159 form of the builtin function call. */
11160
11161 static tree
11162 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11163 {
11164 if (!validate_arg (s1, POINTER_TYPE)
11165 || !validate_arg (s2, POINTER_TYPE))
11166 return NULL_TREE;
11167 else
11168 {
11169 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11170
11171 /* If both arguments are constants, evaluate at compile-time. */
11172 if (p1 && p2)
11173 {
11174 const size_t r = strspn (p1, p2);
11175 return size_int (r);
11176 }
11177
11178 /* If either argument is "", return NULL_TREE. */
11179 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11180 /* Evaluate and ignore both arguments in case either one has
11181 side-effects. */
11182 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11183 s1, s2);
11184 return NULL_TREE;
11185 }
11186 }
11187
11188 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11189 to the call.
11190
11191 Return NULL_TREE if no simplification was possible, otherwise return the
11192 simplified form of the call as a tree.
11193
11194 The simplified form may be a constant or other expression which
11195 computes the same value, but in a more efficient manner (including
11196 calls to other builtin functions).
11197
11198 The call may contain arguments which need to be evaluated, but
11199 which are not useful to determine the result of the call. In
11200 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11201 COMPOUND_EXPR will be an argument which must be evaluated.
11202 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11203 COMPOUND_EXPR in the chain will contain the tree for the simplified
11204 form of the builtin function call. */
11205
11206 static tree
11207 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11208 {
11209 if (!validate_arg (s1, POINTER_TYPE)
11210 || !validate_arg (s2, POINTER_TYPE))
11211 return NULL_TREE;
11212 else
11213 {
11214 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11215
11216 /* If both arguments are constants, evaluate at compile-time. */
11217 if (p1 && p2)
11218 {
11219 const size_t r = strcspn (p1, p2);
11220 return size_int (r);
11221 }
11222
11223 /* If the first argument is "", return NULL_TREE. */
11224 if (p1 && *p1 == '\0')
11225 {
11226 /* Evaluate and ignore argument s2 in case it has
11227 side-effects. */
11228 return omit_one_operand_loc (loc, size_type_node,
11229 size_zero_node, s2);
11230 }
11231
11232 /* If the second argument is "", return __builtin_strlen(s1). */
11233 if (p2 && *p2 == '\0')
11234 {
11235 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11236
11237 /* If the replacement _DECL isn't initialized, don't do the
11238 transformation. */
11239 if (!fn)
11240 return NULL_TREE;
11241
11242 return build_call_expr_loc (loc, fn, 1, s1);
11243 }
11244 return NULL_TREE;
11245 }
11246 }
11247
11248 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11249 to the call. IGNORE is true if the value returned
11250 by the builtin will be ignored. UNLOCKED is true is true if this
11251 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11252 the known length of the string. Return NULL_TREE if no simplification
11253 was possible. */
11254
11255 tree
11256 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11257 bool ignore, bool unlocked, tree len)
11258 {
11259 /* If we're using an unlocked function, assume the other unlocked
11260 functions exist explicitly. */
11261 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11262 : implicit_built_in_decls[BUILT_IN_FPUTC];
11263 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11264 : implicit_built_in_decls[BUILT_IN_FWRITE];
11265
11266 /* If the return value is used, don't do the transformation. */
11267 if (!ignore)
11268 return NULL_TREE;
11269
11270 /* Verify the arguments in the original call. */
11271 if (!validate_arg (arg0, POINTER_TYPE)
11272 || !validate_arg (arg1, POINTER_TYPE))
11273 return NULL_TREE;
11274
11275 if (! len)
11276 len = c_strlen (arg0, 0);
11277
11278 /* Get the length of the string passed to fputs. If the length
11279 can't be determined, punt. */
11280 if (!len
11281 || TREE_CODE (len) != INTEGER_CST)
11282 return NULL_TREE;
11283
11284 switch (compare_tree_int (len, 1))
11285 {
11286 case -1: /* length is 0, delete the call entirely . */
11287 return omit_one_operand_loc (loc, integer_type_node,
11288 integer_zero_node, arg1);;
11289
11290 case 0: /* length is 1, call fputc. */
11291 {
11292 const char *p = c_getstr (arg0);
11293
11294 if (p != NULL)
11295 {
11296 if (fn_fputc)
11297 return build_call_expr_loc (loc, fn_fputc, 2,
11298 build_int_cst
11299 (integer_type_node, p[0]), arg1);
11300 else
11301 return NULL_TREE;
11302 }
11303 }
11304 /* FALLTHROUGH */
11305 case 1: /* length is greater than 1, call fwrite. */
11306 {
11307 /* If optimizing for size keep fputs. */
11308 if (optimize_function_for_size_p (cfun))
11309 return NULL_TREE;
11310 /* New argument list transforming fputs(string, stream) to
11311 fwrite(string, 1, len, stream). */
11312 if (fn_fwrite)
11313 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11314 size_one_node, len, arg1);
11315 else
11316 return NULL_TREE;
11317 }
11318 default:
11319 gcc_unreachable ();
11320 }
11321 return NULL_TREE;
11322 }
11323
11324 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11325 produced. False otherwise. This is done so that we don't output the error
11326 or warning twice or three times. */
11327
11328 bool
11329 fold_builtin_next_arg (tree exp, bool va_start_p)
11330 {
11331 tree fntype = TREE_TYPE (current_function_decl);
11332 int nargs = call_expr_nargs (exp);
11333 tree arg;
11334
11335 if (!stdarg_p (fntype))
11336 {
11337 error ("%<va_start%> used in function with fixed args");
11338 return true;
11339 }
11340
11341 if (va_start_p)
11342 {
11343 if (va_start_p && (nargs != 2))
11344 {
11345 error ("wrong number of arguments to function %<va_start%>");
11346 return true;
11347 }
11348 arg = CALL_EXPR_ARG (exp, 1);
11349 }
11350 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11351 when we checked the arguments and if needed issued a warning. */
11352 else
11353 {
11354 if (nargs == 0)
11355 {
11356 /* Evidently an out of date version of <stdarg.h>; can't validate
11357 va_start's second argument, but can still work as intended. */
11358 warning (0, "%<__builtin_next_arg%> called without an argument");
11359 return true;
11360 }
11361 else if (nargs > 1)
11362 {
11363 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11364 return true;
11365 }
11366 arg = CALL_EXPR_ARG (exp, 0);
11367 }
11368
11369 if (TREE_CODE (arg) == SSA_NAME)
11370 arg = SSA_NAME_VAR (arg);
11371
11372 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11373 or __builtin_next_arg (0) the first time we see it, after checking
11374 the arguments and if needed issuing a warning. */
11375 if (!integer_zerop (arg))
11376 {
11377 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11378
11379 /* Strip off all nops for the sake of the comparison. This
11380 is not quite the same as STRIP_NOPS. It does more.
11381 We must also strip off INDIRECT_EXPR for C++ reference
11382 parameters. */
11383 while (CONVERT_EXPR_P (arg)
11384 || TREE_CODE (arg) == INDIRECT_REF)
11385 arg = TREE_OPERAND (arg, 0);
11386 if (arg != last_parm)
11387 {
11388 /* FIXME: Sometimes with the tree optimizers we can get the
11389 not the last argument even though the user used the last
11390 argument. We just warn and set the arg to be the last
11391 argument so that we will get wrong-code because of
11392 it. */
11393 warning (0, "second parameter of %<va_start%> not last named argument");
11394 }
11395
11396 /* Undefined by C99 7.15.1.4p4 (va_start):
11397 "If the parameter parmN is declared with the register storage
11398 class, with a function or array type, or with a type that is
11399 not compatible with the type that results after application of
11400 the default argument promotions, the behavior is undefined."
11401 */
11402 else if (DECL_REGISTER (arg))
11403 warning (0, "undefined behaviour when second parameter of "
11404 "%<va_start%> is declared with %<register%> storage");
11405
11406 /* We want to verify the second parameter just once before the tree
11407 optimizers are run and then avoid keeping it in the tree,
11408 as otherwise we could warn even for correct code like:
11409 void foo (int i, ...)
11410 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11411 if (va_start_p)
11412 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11413 else
11414 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11415 }
11416 return false;
11417 }
11418
11419
11420 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11421 ORIG may be null if this is a 2-argument call. We don't attempt to
11422 simplify calls with more than 3 arguments.
11423
11424 Return NULL_TREE if no simplification was possible, otherwise return the
11425 simplified form of the call as a tree. If IGNORED is true, it means that
11426 the caller does not use the returned value of the function. */
11427
11428 static tree
11429 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11430 tree orig, int ignored)
11431 {
11432 tree call, retval;
11433 const char *fmt_str = NULL;
11434
11435 /* Verify the required arguments in the original call. We deal with two
11436 types of sprintf() calls: 'sprintf (str, fmt)' and
11437 'sprintf (dest, "%s", orig)'. */
11438 if (!validate_arg (dest, POINTER_TYPE)
11439 || !validate_arg (fmt, POINTER_TYPE))
11440 return NULL_TREE;
11441 if (orig && !validate_arg (orig, POINTER_TYPE))
11442 return NULL_TREE;
11443
11444 /* Check whether the format is a literal string constant. */
11445 fmt_str = c_getstr (fmt);
11446 if (fmt_str == NULL)
11447 return NULL_TREE;
11448
11449 call = NULL_TREE;
11450 retval = NULL_TREE;
11451
11452 if (!init_target_chars ())
11453 return NULL_TREE;
11454
11455 /* If the format doesn't contain % args or %%, use strcpy. */
11456 if (strchr (fmt_str, target_percent) == NULL)
11457 {
11458 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11459
11460 if (!fn)
11461 return NULL_TREE;
11462
11463 /* Don't optimize sprintf (buf, "abc", ptr++). */
11464 if (orig)
11465 return NULL_TREE;
11466
11467 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11468 'format' is known to contain no % formats. */
11469 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11470 if (!ignored)
11471 retval = build_int_cst (integer_type_node, strlen (fmt_str));
11472 }
11473
11474 /* If the format is "%s", use strcpy if the result isn't used. */
11475 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11476 {
11477 tree fn;
11478 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11479
11480 if (!fn)
11481 return NULL_TREE;
11482
11483 /* Don't crash on sprintf (str1, "%s"). */
11484 if (!orig)
11485 return NULL_TREE;
11486
11487 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11488 if (!ignored)
11489 {
11490 retval = c_strlen (orig, 1);
11491 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11492 return NULL_TREE;
11493 }
11494 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11495 }
11496
11497 if (call && retval)
11498 {
11499 retval = fold_convert_loc
11500 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11501 retval);
11502 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11503 }
11504 else
11505 return call;
11506 }
11507
11508 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
11509 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
11510 attempt to simplify calls with more than 4 arguments.
11511
11512 Return NULL_TREE if no simplification was possible, otherwise return the
11513 simplified form of the call as a tree. If IGNORED is true, it means that
11514 the caller does not use the returned value of the function. */
11515
11516 static tree
11517 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
11518 tree orig, int ignored)
11519 {
11520 tree call, retval;
11521 const char *fmt_str = NULL;
11522 unsigned HOST_WIDE_INT destlen;
11523
11524 /* Verify the required arguments in the original call. We deal with two
11525 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
11526 'snprintf (dest, cst, "%s", orig)'. */
11527 if (!validate_arg (dest, POINTER_TYPE)
11528 || !validate_arg (destsize, INTEGER_TYPE)
11529 || !validate_arg (fmt, POINTER_TYPE))
11530 return NULL_TREE;
11531 if (orig && !validate_arg (orig, POINTER_TYPE))
11532 return NULL_TREE;
11533
11534 if (!host_integerp (destsize, 1))
11535 return NULL_TREE;
11536
11537 /* Check whether the format is a literal string constant. */
11538 fmt_str = c_getstr (fmt);
11539 if (fmt_str == NULL)
11540 return NULL_TREE;
11541
11542 call = NULL_TREE;
11543 retval = NULL_TREE;
11544
11545 if (!init_target_chars ())
11546 return NULL_TREE;
11547
11548 destlen = tree_low_cst (destsize, 1);
11549
11550 /* If the format doesn't contain % args or %%, use strcpy. */
11551 if (strchr (fmt_str, target_percent) == NULL)
11552 {
11553 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11554 size_t len = strlen (fmt_str);
11555
11556 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
11557 if (orig)
11558 return NULL_TREE;
11559
11560 /* We could expand this as
11561 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
11562 or to
11563 memcpy (str, fmt_with_nul_at_cstm1, cst);
11564 but in the former case that might increase code size
11565 and in the latter case grow .rodata section too much.
11566 So punt for now. */
11567 if (len >= destlen)
11568 return NULL_TREE;
11569
11570 if (!fn)
11571 return NULL_TREE;
11572
11573 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
11574 'format' is known to contain no % formats and
11575 strlen (fmt) < cst. */
11576 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11577
11578 if (!ignored)
11579 retval = build_int_cst (integer_type_node, strlen (fmt_str));
11580 }
11581
11582 /* If the format is "%s", use strcpy if the result isn't used. */
11583 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11584 {
11585 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11586 unsigned HOST_WIDE_INT origlen;
11587
11588 /* Don't crash on snprintf (str1, cst, "%s"). */
11589 if (!orig)
11590 return NULL_TREE;
11591
11592 retval = c_strlen (orig, 1);
11593 if (!retval || !host_integerp (retval, 1))
11594 return NULL_TREE;
11595
11596 origlen = tree_low_cst (retval, 1);
11597 /* We could expand this as
11598 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
11599 or to
11600 memcpy (str1, str2_with_nul_at_cstm1, cst);
11601 but in the former case that might increase code size
11602 and in the latter case grow .rodata section too much.
11603 So punt for now. */
11604 if (origlen >= destlen)
11605 return NULL_TREE;
11606
11607 /* Convert snprintf (str1, cst, "%s", str2) into
11608 strcpy (str1, str2) if strlen (str2) < cst. */
11609 if (!fn)
11610 return NULL_TREE;
11611
11612 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11613
11614 if (ignored)
11615 retval = NULL_TREE;
11616 }
11617
11618 if (call && retval)
11619 {
11620 tree fn = built_in_decls[BUILT_IN_SNPRINTF];
11621 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
11622 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11623 }
11624 else
11625 return call;
11626 }
11627
11628 /* Expand a call EXP to __builtin_object_size. */
11629
11630 rtx
11631 expand_builtin_object_size (tree exp)
11632 {
11633 tree ost;
11634 int object_size_type;
11635 tree fndecl = get_callee_fndecl (exp);
11636
11637 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11638 {
11639 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11640 exp, fndecl);
11641 expand_builtin_trap ();
11642 return const0_rtx;
11643 }
11644
11645 ost = CALL_EXPR_ARG (exp, 1);
11646 STRIP_NOPS (ost);
11647
11648 if (TREE_CODE (ost) != INTEGER_CST
11649 || tree_int_cst_sgn (ost) < 0
11650 || compare_tree_int (ost, 3) > 0)
11651 {
11652 error ("%Klast argument of %D is not integer constant between 0 and 3",
11653 exp, fndecl);
11654 expand_builtin_trap ();
11655 return const0_rtx;
11656 }
11657
11658 object_size_type = tree_low_cst (ost, 0);
11659
11660 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11661 }
11662
11663 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11664 FCODE is the BUILT_IN_* to use.
11665 Return NULL_RTX if we failed; the caller should emit a normal call,
11666 otherwise try to get the result in TARGET, if convenient (and in
11667 mode MODE if that's convenient). */
11668
11669 static rtx
11670 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11671 enum built_in_function fcode)
11672 {
11673 tree dest, src, len, size;
11674
11675 if (!validate_arglist (exp,
11676 POINTER_TYPE,
11677 fcode == BUILT_IN_MEMSET_CHK
11678 ? INTEGER_TYPE : POINTER_TYPE,
11679 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11680 return NULL_RTX;
11681
11682 dest = CALL_EXPR_ARG (exp, 0);
11683 src = CALL_EXPR_ARG (exp, 1);
11684 len = CALL_EXPR_ARG (exp, 2);
11685 size = CALL_EXPR_ARG (exp, 3);
11686
11687 if (! host_integerp (size, 1))
11688 return NULL_RTX;
11689
11690 if (host_integerp (len, 1) || integer_all_onesp (size))
11691 {
11692 tree fn;
11693
11694 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11695 {
11696 warning_at (tree_nonartificial_location (exp),
11697 0, "%Kcall to %D will always overflow destination buffer",
11698 exp, get_callee_fndecl (exp));
11699 return NULL_RTX;
11700 }
11701
11702 fn = NULL_TREE;
11703 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11704 mem{cpy,pcpy,move,set} is available. */
11705 switch (fcode)
11706 {
11707 case BUILT_IN_MEMCPY_CHK:
11708 fn = built_in_decls[BUILT_IN_MEMCPY];
11709 break;
11710 case BUILT_IN_MEMPCPY_CHK:
11711 fn = built_in_decls[BUILT_IN_MEMPCPY];
11712 break;
11713 case BUILT_IN_MEMMOVE_CHK:
11714 fn = built_in_decls[BUILT_IN_MEMMOVE];
11715 break;
11716 case BUILT_IN_MEMSET_CHK:
11717 fn = built_in_decls[BUILT_IN_MEMSET];
11718 break;
11719 default:
11720 break;
11721 }
11722
11723 if (! fn)
11724 return NULL_RTX;
11725
11726 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11727 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11728 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11729 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11730 }
11731 else if (fcode == BUILT_IN_MEMSET_CHK)
11732 return NULL_RTX;
11733 else
11734 {
11735 unsigned int dest_align = get_pointer_alignment (dest);
11736
11737 /* If DEST is not a pointer type, call the normal function. */
11738 if (dest_align == 0)
11739 return NULL_RTX;
11740
11741 /* If SRC and DEST are the same (and not volatile), do nothing. */
11742 if (operand_equal_p (src, dest, 0))
11743 {
11744 tree expr;
11745
11746 if (fcode != BUILT_IN_MEMPCPY_CHK)
11747 {
11748 /* Evaluate and ignore LEN in case it has side-effects. */
11749 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11750 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11751 }
11752
11753 expr = fold_build_pointer_plus (dest, len);
11754 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11755 }
11756
11757 /* __memmove_chk special case. */
11758 if (fcode == BUILT_IN_MEMMOVE_CHK)
11759 {
11760 unsigned int src_align = get_pointer_alignment (src);
11761
11762 if (src_align == 0)
11763 return NULL_RTX;
11764
11765 /* If src is categorized for a readonly section we can use
11766 normal __memcpy_chk. */
11767 if (readonly_data_expr (src))
11768 {
11769 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11770 if (!fn)
11771 return NULL_RTX;
11772 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11773 dest, src, len, size);
11774 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11775 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11776 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11777 }
11778 }
11779 return NULL_RTX;
11780 }
11781 }
11782
11783 /* Emit warning if a buffer overflow is detected at compile time. */
11784
11785 static void
11786 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11787 {
11788 int is_strlen = 0;
11789 tree len, size;
11790 location_t loc = tree_nonartificial_location (exp);
11791
11792 switch (fcode)
11793 {
11794 case BUILT_IN_STRCPY_CHK:
11795 case BUILT_IN_STPCPY_CHK:
11796 /* For __strcat_chk the warning will be emitted only if overflowing
11797 by at least strlen (dest) + 1 bytes. */
11798 case BUILT_IN_STRCAT_CHK:
11799 len = CALL_EXPR_ARG (exp, 1);
11800 size = CALL_EXPR_ARG (exp, 2);
11801 is_strlen = 1;
11802 break;
11803 case BUILT_IN_STRNCAT_CHK:
11804 case BUILT_IN_STRNCPY_CHK:
11805 len = CALL_EXPR_ARG (exp, 2);
11806 size = CALL_EXPR_ARG (exp, 3);
11807 break;
11808 case BUILT_IN_SNPRINTF_CHK:
11809 case BUILT_IN_VSNPRINTF_CHK:
11810 len = CALL_EXPR_ARG (exp, 1);
11811 size = CALL_EXPR_ARG (exp, 3);
11812 break;
11813 default:
11814 gcc_unreachable ();
11815 }
11816
11817 if (!len || !size)
11818 return;
11819
11820 if (! host_integerp (size, 1) || integer_all_onesp (size))
11821 return;
11822
11823 if (is_strlen)
11824 {
11825 len = c_strlen (len, 1);
11826 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11827 return;
11828 }
11829 else if (fcode == BUILT_IN_STRNCAT_CHK)
11830 {
11831 tree src = CALL_EXPR_ARG (exp, 1);
11832 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11833 return;
11834 src = c_strlen (src, 1);
11835 if (! src || ! host_integerp (src, 1))
11836 {
11837 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11838 exp, get_callee_fndecl (exp));
11839 return;
11840 }
11841 else if (tree_int_cst_lt (src, size))
11842 return;
11843 }
11844 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11845 return;
11846
11847 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11848 exp, get_callee_fndecl (exp));
11849 }
11850
11851 /* Emit warning if a buffer overflow is detected at compile time
11852 in __sprintf_chk/__vsprintf_chk calls. */
11853
11854 static void
11855 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11856 {
11857 tree size, len, fmt;
11858 const char *fmt_str;
11859 int nargs = call_expr_nargs (exp);
11860
11861 /* Verify the required arguments in the original call. */
11862
11863 if (nargs < 4)
11864 return;
11865 size = CALL_EXPR_ARG (exp, 2);
11866 fmt = CALL_EXPR_ARG (exp, 3);
11867
11868 if (! host_integerp (size, 1) || integer_all_onesp (size))
11869 return;
11870
11871 /* Check whether the format is a literal string constant. */
11872 fmt_str = c_getstr (fmt);
11873 if (fmt_str == NULL)
11874 return;
11875
11876 if (!init_target_chars ())
11877 return;
11878
11879 /* If the format doesn't contain % args or %%, we know its size. */
11880 if (strchr (fmt_str, target_percent) == 0)
11881 len = build_int_cstu (size_type_node, strlen (fmt_str));
11882 /* If the format is "%s" and first ... argument is a string literal,
11883 we know it too. */
11884 else if (fcode == BUILT_IN_SPRINTF_CHK
11885 && strcmp (fmt_str, target_percent_s) == 0)
11886 {
11887 tree arg;
11888
11889 if (nargs < 5)
11890 return;
11891 arg = CALL_EXPR_ARG (exp, 4);
11892 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11893 return;
11894
11895 len = c_strlen (arg, 1);
11896 if (!len || ! host_integerp (len, 1))
11897 return;
11898 }
11899 else
11900 return;
11901
11902 if (! tree_int_cst_lt (len, size))
11903 warning_at (tree_nonartificial_location (exp),
11904 0, "%Kcall to %D will always overflow destination buffer",
11905 exp, get_callee_fndecl (exp));
11906 }
11907
11908 /* Emit warning if a free is called with address of a variable. */
11909
11910 static void
11911 maybe_emit_free_warning (tree exp)
11912 {
11913 tree arg = CALL_EXPR_ARG (exp, 0);
11914
11915 STRIP_NOPS (arg);
11916 if (TREE_CODE (arg) != ADDR_EXPR)
11917 return;
11918
11919 arg = get_base_address (TREE_OPERAND (arg, 0));
11920 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11921 return;
11922
11923 if (SSA_VAR_P (arg))
11924 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11925 "%Kattempt to free a non-heap object %qD", exp, arg);
11926 else
11927 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11928 "%Kattempt to free a non-heap object", exp);
11929 }
11930
11931 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11932 if possible. */
11933
11934 tree
11935 fold_builtin_object_size (tree ptr, tree ost)
11936 {
11937 unsigned HOST_WIDE_INT bytes;
11938 int object_size_type;
11939
11940 if (!validate_arg (ptr, POINTER_TYPE)
11941 || !validate_arg (ost, INTEGER_TYPE))
11942 return NULL_TREE;
11943
11944 STRIP_NOPS (ost);
11945
11946 if (TREE_CODE (ost) != INTEGER_CST
11947 || tree_int_cst_sgn (ost) < 0
11948 || compare_tree_int (ost, 3) > 0)
11949 return NULL_TREE;
11950
11951 object_size_type = tree_low_cst (ost, 0);
11952
11953 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11954 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11955 and (size_t) 0 for types 2 and 3. */
11956 if (TREE_SIDE_EFFECTS (ptr))
11957 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11958
11959 if (TREE_CODE (ptr) == ADDR_EXPR)
11960 {
11961 bytes = compute_builtin_object_size (ptr, object_size_type);
11962 if (double_int_fits_to_tree_p (size_type_node,
11963 uhwi_to_double_int (bytes)))
11964 return build_int_cstu (size_type_node, bytes);
11965 }
11966 else if (TREE_CODE (ptr) == SSA_NAME)
11967 {
11968 /* If object size is not known yet, delay folding until
11969 later. Maybe subsequent passes will help determining
11970 it. */
11971 bytes = compute_builtin_object_size (ptr, object_size_type);
11972 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11973 && double_int_fits_to_tree_p (size_type_node,
11974 uhwi_to_double_int (bytes)))
11975 return build_int_cstu (size_type_node, bytes);
11976 }
11977
11978 return NULL_TREE;
11979 }
11980
11981 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11982 DEST, SRC, LEN, and SIZE are the arguments to the call.
11983 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11984 code of the builtin. If MAXLEN is not NULL, it is maximum length
11985 passed as third argument. */
11986
11987 tree
11988 fold_builtin_memory_chk (location_t loc, tree fndecl,
11989 tree dest, tree src, tree len, tree size,
11990 tree maxlen, bool ignore,
11991 enum built_in_function fcode)
11992 {
11993 tree fn;
11994
11995 if (!validate_arg (dest, POINTER_TYPE)
11996 || !validate_arg (src,
11997 (fcode == BUILT_IN_MEMSET_CHK
11998 ? INTEGER_TYPE : POINTER_TYPE))
11999 || !validate_arg (len, INTEGER_TYPE)
12000 || !validate_arg (size, INTEGER_TYPE))
12001 return NULL_TREE;
12002
12003 /* If SRC and DEST are the same (and not volatile), return DEST
12004 (resp. DEST+LEN for __mempcpy_chk). */
12005 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12006 {
12007 if (fcode != BUILT_IN_MEMPCPY_CHK)
12008 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12009 dest, len);
12010 else
12011 {
12012 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12013 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12014 }
12015 }
12016
12017 if (! host_integerp (size, 1))
12018 return NULL_TREE;
12019
12020 if (! integer_all_onesp (size))
12021 {
12022 if (! host_integerp (len, 1))
12023 {
12024 /* If LEN is not constant, try MAXLEN too.
12025 For MAXLEN only allow optimizing into non-_ocs function
12026 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12027 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12028 {
12029 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12030 {
12031 /* (void) __mempcpy_chk () can be optimized into
12032 (void) __memcpy_chk (). */
12033 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12034 if (!fn)
12035 return NULL_TREE;
12036
12037 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12038 }
12039 return NULL_TREE;
12040 }
12041 }
12042 else
12043 maxlen = len;
12044
12045 if (tree_int_cst_lt (size, maxlen))
12046 return NULL_TREE;
12047 }
12048
12049 fn = NULL_TREE;
12050 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12051 mem{cpy,pcpy,move,set} is available. */
12052 switch (fcode)
12053 {
12054 case BUILT_IN_MEMCPY_CHK:
12055 fn = built_in_decls[BUILT_IN_MEMCPY];
12056 break;
12057 case BUILT_IN_MEMPCPY_CHK:
12058 fn = built_in_decls[BUILT_IN_MEMPCPY];
12059 break;
12060 case BUILT_IN_MEMMOVE_CHK:
12061 fn = built_in_decls[BUILT_IN_MEMMOVE];
12062 break;
12063 case BUILT_IN_MEMSET_CHK:
12064 fn = built_in_decls[BUILT_IN_MEMSET];
12065 break;
12066 default:
12067 break;
12068 }
12069
12070 if (!fn)
12071 return NULL_TREE;
12072
12073 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12074 }
12075
12076 /* Fold a call to the __st[rp]cpy_chk builtin.
12077 DEST, SRC, and SIZE are the arguments to the call.
12078 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12079 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12080 strings passed as second argument. */
12081
12082 tree
12083 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12084 tree src, tree size,
12085 tree maxlen, bool ignore,
12086 enum built_in_function fcode)
12087 {
12088 tree len, fn;
12089
12090 if (!validate_arg (dest, POINTER_TYPE)
12091 || !validate_arg (src, POINTER_TYPE)
12092 || !validate_arg (size, INTEGER_TYPE))
12093 return NULL_TREE;
12094
12095 /* If SRC and DEST are the same (and not volatile), return DEST. */
12096 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12097 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12098
12099 if (! host_integerp (size, 1))
12100 return NULL_TREE;
12101
12102 if (! integer_all_onesp (size))
12103 {
12104 len = c_strlen (src, 1);
12105 if (! len || ! host_integerp (len, 1))
12106 {
12107 /* If LEN is not constant, try MAXLEN too.
12108 For MAXLEN only allow optimizing into non-_ocs function
12109 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12110 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12111 {
12112 if (fcode == BUILT_IN_STPCPY_CHK)
12113 {
12114 if (! ignore)
12115 return NULL_TREE;
12116
12117 /* If return value of __stpcpy_chk is ignored,
12118 optimize into __strcpy_chk. */
12119 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12120 if (!fn)
12121 return NULL_TREE;
12122
12123 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12124 }
12125
12126 if (! len || TREE_SIDE_EFFECTS (len))
12127 return NULL_TREE;
12128
12129 /* If c_strlen returned something, but not a constant,
12130 transform __strcpy_chk into __memcpy_chk. */
12131 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12132 if (!fn)
12133 return NULL_TREE;
12134
12135 len = fold_convert_loc (loc, size_type_node, len);
12136 len = size_binop_loc (loc, PLUS_EXPR, len,
12137 build_int_cst (size_type_node, 1));
12138 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12139 build_call_expr_loc (loc, fn, 4,
12140 dest, src, len, size));
12141 }
12142 }
12143 else
12144 maxlen = len;
12145
12146 if (! tree_int_cst_lt (maxlen, size))
12147 return NULL_TREE;
12148 }
12149
12150 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12151 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12152 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12153 if (!fn)
12154 return NULL_TREE;
12155
12156 return build_call_expr_loc (loc, fn, 2, dest, src);
12157 }
12158
12159 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12160 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12161 length passed as third argument. */
12162
12163 tree
12164 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12165 tree len, tree size, tree maxlen)
12166 {
12167 tree fn;
12168
12169 if (!validate_arg (dest, POINTER_TYPE)
12170 || !validate_arg (src, POINTER_TYPE)
12171 || !validate_arg (len, INTEGER_TYPE)
12172 || !validate_arg (size, INTEGER_TYPE))
12173 return NULL_TREE;
12174
12175 if (! host_integerp (size, 1))
12176 return NULL_TREE;
12177
12178 if (! integer_all_onesp (size))
12179 {
12180 if (! host_integerp (len, 1))
12181 {
12182 /* If LEN is not constant, try MAXLEN too.
12183 For MAXLEN only allow optimizing into non-_ocs function
12184 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12185 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12186 return NULL_TREE;
12187 }
12188 else
12189 maxlen = len;
12190
12191 if (tree_int_cst_lt (size, maxlen))
12192 return NULL_TREE;
12193 }
12194
12195 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12196 fn = built_in_decls[BUILT_IN_STRNCPY];
12197 if (!fn)
12198 return NULL_TREE;
12199
12200 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12201 }
12202
12203 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12204 are the arguments to the call. */
12205
12206 static tree
12207 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12208 tree src, tree size)
12209 {
12210 tree fn;
12211 const char *p;
12212
12213 if (!validate_arg (dest, POINTER_TYPE)
12214 || !validate_arg (src, POINTER_TYPE)
12215 || !validate_arg (size, INTEGER_TYPE))
12216 return NULL_TREE;
12217
12218 p = c_getstr (src);
12219 /* If the SRC parameter is "", return DEST. */
12220 if (p && *p == '\0')
12221 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12222
12223 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12224 return NULL_TREE;
12225
12226 /* If __builtin_strcat_chk is used, assume strcat is available. */
12227 fn = built_in_decls[BUILT_IN_STRCAT];
12228 if (!fn)
12229 return NULL_TREE;
12230
12231 return build_call_expr_loc (loc, fn, 2, dest, src);
12232 }
12233
12234 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12235 LEN, and SIZE. */
12236
12237 static tree
12238 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12239 tree dest, tree src, tree len, tree size)
12240 {
12241 tree fn;
12242 const char *p;
12243
12244 if (!validate_arg (dest, POINTER_TYPE)
12245 || !validate_arg (src, POINTER_TYPE)
12246 || !validate_arg (size, INTEGER_TYPE)
12247 || !validate_arg (size, INTEGER_TYPE))
12248 return NULL_TREE;
12249
12250 p = c_getstr (src);
12251 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12252 if (p && *p == '\0')
12253 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12254 else if (integer_zerop (len))
12255 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12256
12257 if (! host_integerp (size, 1))
12258 return NULL_TREE;
12259
12260 if (! integer_all_onesp (size))
12261 {
12262 tree src_len = c_strlen (src, 1);
12263 if (src_len
12264 && host_integerp (src_len, 1)
12265 && host_integerp (len, 1)
12266 && ! tree_int_cst_lt (len, src_len))
12267 {
12268 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12269 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12270 if (!fn)
12271 return NULL_TREE;
12272
12273 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12274 }
12275 return NULL_TREE;
12276 }
12277
12278 /* If __builtin_strncat_chk is used, assume strncat is available. */
12279 fn = built_in_decls[BUILT_IN_STRNCAT];
12280 if (!fn)
12281 return NULL_TREE;
12282
12283 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12284 }
12285
12286 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12287 Return NULL_TREE if a normal call should be emitted rather than
12288 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12289 or BUILT_IN_VSPRINTF_CHK. */
12290
12291 static tree
12292 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
12293 enum built_in_function fcode)
12294 {
12295 tree dest, size, len, fn, fmt, flag;
12296 const char *fmt_str;
12297
12298 /* Verify the required arguments in the original call. */
12299 if (nargs < 4)
12300 return NULL_TREE;
12301 dest = args[0];
12302 if (!validate_arg (dest, POINTER_TYPE))
12303 return NULL_TREE;
12304 flag = args[1];
12305 if (!validate_arg (flag, INTEGER_TYPE))
12306 return NULL_TREE;
12307 size = args[2];
12308 if (!validate_arg (size, INTEGER_TYPE))
12309 return NULL_TREE;
12310 fmt = args[3];
12311 if (!validate_arg (fmt, POINTER_TYPE))
12312 return NULL_TREE;
12313
12314 if (! host_integerp (size, 1))
12315 return NULL_TREE;
12316
12317 len = NULL_TREE;
12318
12319 if (!init_target_chars ())
12320 return NULL_TREE;
12321
12322 /* Check whether the format is a literal string constant. */
12323 fmt_str = c_getstr (fmt);
12324 if (fmt_str != NULL)
12325 {
12326 /* If the format doesn't contain % args or %%, we know the size. */
12327 if (strchr (fmt_str, target_percent) == 0)
12328 {
12329 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12330 len = build_int_cstu (size_type_node, strlen (fmt_str));
12331 }
12332 /* If the format is "%s" and first ... argument is a string literal,
12333 we know the size too. */
12334 else if (fcode == BUILT_IN_SPRINTF_CHK
12335 && strcmp (fmt_str, target_percent_s) == 0)
12336 {
12337 tree arg;
12338
12339 if (nargs == 5)
12340 {
12341 arg = args[4];
12342 if (validate_arg (arg, POINTER_TYPE))
12343 {
12344 len = c_strlen (arg, 1);
12345 if (! len || ! host_integerp (len, 1))
12346 len = NULL_TREE;
12347 }
12348 }
12349 }
12350 }
12351
12352 if (! integer_all_onesp (size))
12353 {
12354 if (! len || ! tree_int_cst_lt (len, size))
12355 return NULL_TREE;
12356 }
12357
12358 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12359 or if format doesn't contain % chars or is "%s". */
12360 if (! integer_zerop (flag))
12361 {
12362 if (fmt_str == NULL)
12363 return NULL_TREE;
12364 if (strchr (fmt_str, target_percent) != NULL
12365 && strcmp (fmt_str, target_percent_s))
12366 return NULL_TREE;
12367 }
12368
12369 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12370 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12371 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12372 if (!fn)
12373 return NULL_TREE;
12374
12375 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
12376 }
12377
12378 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12379 a normal call should be emitted rather than expanding the function
12380 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12381
12382 static tree
12383 fold_builtin_sprintf_chk (location_t loc, tree exp,
12384 enum built_in_function fcode)
12385 {
12386 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
12387 CALL_EXPR_ARGP (exp), fcode);
12388 }
12389
12390 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
12391 NULL_TREE if a normal call should be emitted rather than expanding
12392 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12393 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12394 passed as second argument. */
12395
12396 static tree
12397 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
12398 tree maxlen, enum built_in_function fcode)
12399 {
12400 tree dest, size, len, fn, fmt, flag;
12401 const char *fmt_str;
12402
12403 /* Verify the required arguments in the original call. */
12404 if (nargs < 5)
12405 return NULL_TREE;
12406 dest = args[0];
12407 if (!validate_arg (dest, POINTER_TYPE))
12408 return NULL_TREE;
12409 len = args[1];
12410 if (!validate_arg (len, INTEGER_TYPE))
12411 return NULL_TREE;
12412 flag = args[2];
12413 if (!validate_arg (flag, INTEGER_TYPE))
12414 return NULL_TREE;
12415 size = args[3];
12416 if (!validate_arg (size, INTEGER_TYPE))
12417 return NULL_TREE;
12418 fmt = args[4];
12419 if (!validate_arg (fmt, POINTER_TYPE))
12420 return NULL_TREE;
12421
12422 if (! host_integerp (size, 1))
12423 return NULL_TREE;
12424
12425 if (! integer_all_onesp (size))
12426 {
12427 if (! host_integerp (len, 1))
12428 {
12429 /* If LEN is not constant, try MAXLEN too.
12430 For MAXLEN only allow optimizing into non-_ocs function
12431 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12432 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12433 return NULL_TREE;
12434 }
12435 else
12436 maxlen = len;
12437
12438 if (tree_int_cst_lt (size, maxlen))
12439 return NULL_TREE;
12440 }
12441
12442 if (!init_target_chars ())
12443 return NULL_TREE;
12444
12445 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12446 or if format doesn't contain % chars or is "%s". */
12447 if (! integer_zerop (flag))
12448 {
12449 fmt_str = c_getstr (fmt);
12450 if (fmt_str == NULL)
12451 return NULL_TREE;
12452 if (strchr (fmt_str, target_percent) != NULL
12453 && strcmp (fmt_str, target_percent_s))
12454 return NULL_TREE;
12455 }
12456
12457 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12458 available. */
12459 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12460 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12461 if (!fn)
12462 return NULL_TREE;
12463
12464 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
12465 }
12466
12467 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12468 a normal call should be emitted rather than expanding the function
12469 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12470 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12471 passed as second argument. */
12472
12473 tree
12474 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12475 enum built_in_function fcode)
12476 {
12477 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
12478 CALL_EXPR_ARGP (exp), maxlen, fcode);
12479 }
12480
12481 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12482 FMT and ARG are the arguments to the call; we don't fold cases with
12483 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12484
12485 Return NULL_TREE if no simplification was possible, otherwise return the
12486 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12487 code of the function to be simplified. */
12488
12489 static tree
12490 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12491 tree arg, bool ignore,
12492 enum built_in_function fcode)
12493 {
12494 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12495 const char *fmt_str = NULL;
12496
12497 /* If the return value is used, don't do the transformation. */
12498 if (! ignore)
12499 return NULL_TREE;
12500
12501 /* Verify the required arguments in the original call. */
12502 if (!validate_arg (fmt, POINTER_TYPE))
12503 return NULL_TREE;
12504
12505 /* Check whether the format is a literal string constant. */
12506 fmt_str = c_getstr (fmt);
12507 if (fmt_str == NULL)
12508 return NULL_TREE;
12509
12510 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12511 {
12512 /* If we're using an unlocked function, assume the other
12513 unlocked functions exist explicitly. */
12514 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12515 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12516 }
12517 else
12518 {
12519 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12520 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12521 }
12522
12523 if (!init_target_chars ())
12524 return NULL_TREE;
12525
12526 if (strcmp (fmt_str, target_percent_s) == 0
12527 || strchr (fmt_str, target_percent) == NULL)
12528 {
12529 const char *str;
12530
12531 if (strcmp (fmt_str, target_percent_s) == 0)
12532 {
12533 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12534 return NULL_TREE;
12535
12536 if (!arg || !validate_arg (arg, POINTER_TYPE))
12537 return NULL_TREE;
12538
12539 str = c_getstr (arg);
12540 if (str == NULL)
12541 return NULL_TREE;
12542 }
12543 else
12544 {
12545 /* The format specifier doesn't contain any '%' characters. */
12546 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12547 && arg)
12548 return NULL_TREE;
12549 str = fmt_str;
12550 }
12551
12552 /* If the string was "", printf does nothing. */
12553 if (str[0] == '\0')
12554 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12555
12556 /* If the string has length of 1, call putchar. */
12557 if (str[1] == '\0')
12558 {
12559 /* Given printf("c"), (where c is any one character,)
12560 convert "c"[0] to an int and pass that to the replacement
12561 function. */
12562 newarg = build_int_cst (integer_type_node, str[0]);
12563 if (fn_putchar)
12564 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12565 }
12566 else
12567 {
12568 /* If the string was "string\n", call puts("string"). */
12569 size_t len = strlen (str);
12570 if ((unsigned char)str[len - 1] == target_newline
12571 && (size_t) (int) len == len
12572 && (int) len > 0)
12573 {
12574 char *newstr;
12575 tree offset_node, string_cst;
12576
12577 /* Create a NUL-terminated string that's one char shorter
12578 than the original, stripping off the trailing '\n'. */
12579 newarg = build_string_literal (len, str);
12580 string_cst = string_constant (newarg, &offset_node);
12581 gcc_checking_assert (string_cst
12582 && (TREE_STRING_LENGTH (string_cst)
12583 == (int) len)
12584 && integer_zerop (offset_node)
12585 && (unsigned char)
12586 TREE_STRING_POINTER (string_cst)[len - 1]
12587 == target_newline);
12588 /* build_string_literal creates a new STRING_CST,
12589 modify it in place to avoid double copying. */
12590 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
12591 newstr[len - 1] = '\0';
12592 if (fn_puts)
12593 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12594 }
12595 else
12596 /* We'd like to arrange to call fputs(string,stdout) here,
12597 but we need stdout and don't have a way to get it yet. */
12598 return NULL_TREE;
12599 }
12600 }
12601
12602 /* The other optimizations can be done only on the non-va_list variants. */
12603 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12604 return NULL_TREE;
12605
12606 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12607 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12608 {
12609 if (!arg || !validate_arg (arg, POINTER_TYPE))
12610 return NULL_TREE;
12611 if (fn_puts)
12612 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12613 }
12614
12615 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12616 else if (strcmp (fmt_str, target_percent_c) == 0)
12617 {
12618 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12619 return NULL_TREE;
12620 if (fn_putchar)
12621 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12622 }
12623
12624 if (!call)
12625 return NULL_TREE;
12626
12627 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12628 }
12629
12630 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12631 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12632 more than 3 arguments, and ARG may be null in the 2-argument case.
12633
12634 Return NULL_TREE if no simplification was possible, otherwise return the
12635 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12636 code of the function to be simplified. */
12637
12638 static tree
12639 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12640 tree fmt, tree arg, bool ignore,
12641 enum built_in_function fcode)
12642 {
12643 tree fn_fputc, fn_fputs, call = NULL_TREE;
12644 const char *fmt_str = NULL;
12645
12646 /* If the return value is used, don't do the transformation. */
12647 if (! ignore)
12648 return NULL_TREE;
12649
12650 /* Verify the required arguments in the original call. */
12651 if (!validate_arg (fp, POINTER_TYPE))
12652 return NULL_TREE;
12653 if (!validate_arg (fmt, POINTER_TYPE))
12654 return NULL_TREE;
12655
12656 /* Check whether the format is a literal string constant. */
12657 fmt_str = c_getstr (fmt);
12658 if (fmt_str == NULL)
12659 return NULL_TREE;
12660
12661 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12662 {
12663 /* If we're using an unlocked function, assume the other
12664 unlocked functions exist explicitly. */
12665 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12666 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12667 }
12668 else
12669 {
12670 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12671 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12672 }
12673
12674 if (!init_target_chars ())
12675 return NULL_TREE;
12676
12677 /* If the format doesn't contain % args or %%, use strcpy. */
12678 if (strchr (fmt_str, target_percent) == NULL)
12679 {
12680 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12681 && arg)
12682 return NULL_TREE;
12683
12684 /* If the format specifier was "", fprintf does nothing. */
12685 if (fmt_str[0] == '\0')
12686 {
12687 /* If FP has side-effects, just wait until gimplification is
12688 done. */
12689 if (TREE_SIDE_EFFECTS (fp))
12690 return NULL_TREE;
12691
12692 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12693 }
12694
12695 /* When "string" doesn't contain %, replace all cases of
12696 fprintf (fp, string) with fputs (string, fp). The fputs
12697 builtin will take care of special cases like length == 1. */
12698 if (fn_fputs)
12699 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12700 }
12701
12702 /* The other optimizations can be done only on the non-va_list variants. */
12703 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12704 return NULL_TREE;
12705
12706 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12707 else if (strcmp (fmt_str, target_percent_s) == 0)
12708 {
12709 if (!arg || !validate_arg (arg, POINTER_TYPE))
12710 return NULL_TREE;
12711 if (fn_fputs)
12712 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12713 }
12714
12715 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12716 else if (strcmp (fmt_str, target_percent_c) == 0)
12717 {
12718 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12719 return NULL_TREE;
12720 if (fn_fputc)
12721 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12722 }
12723
12724 if (!call)
12725 return NULL_TREE;
12726 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12727 }
12728
12729 /* Initialize format string characters in the target charset. */
12730
12731 static bool
12732 init_target_chars (void)
12733 {
12734 static bool init;
12735 if (!init)
12736 {
12737 target_newline = lang_hooks.to_target_charset ('\n');
12738 target_percent = lang_hooks.to_target_charset ('%');
12739 target_c = lang_hooks.to_target_charset ('c');
12740 target_s = lang_hooks.to_target_charset ('s');
12741 if (target_newline == 0 || target_percent == 0 || target_c == 0
12742 || target_s == 0)
12743 return false;
12744
12745 target_percent_c[0] = target_percent;
12746 target_percent_c[1] = target_c;
12747 target_percent_c[2] = '\0';
12748
12749 target_percent_s[0] = target_percent;
12750 target_percent_s[1] = target_s;
12751 target_percent_s[2] = '\0';
12752
12753 target_percent_s_newline[0] = target_percent;
12754 target_percent_s_newline[1] = target_s;
12755 target_percent_s_newline[2] = target_newline;
12756 target_percent_s_newline[3] = '\0';
12757
12758 init = true;
12759 }
12760 return true;
12761 }
12762
12763 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12764 and no overflow/underflow occurred. INEXACT is true if M was not
12765 exactly calculated. TYPE is the tree type for the result. This
12766 function assumes that you cleared the MPFR flags and then
12767 calculated M to see if anything subsequently set a flag prior to
12768 entering this function. Return NULL_TREE if any checks fail. */
12769
12770 static tree
12771 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12772 {
12773 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12774 overflow/underflow occurred. If -frounding-math, proceed iff the
12775 result of calling FUNC was exact. */
12776 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12777 && (!flag_rounding_math || !inexact))
12778 {
12779 REAL_VALUE_TYPE rr;
12780
12781 real_from_mpfr (&rr, m, type, GMP_RNDN);
12782 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12783 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12784 but the mpft_t is not, then we underflowed in the
12785 conversion. */
12786 if (real_isfinite (&rr)
12787 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12788 {
12789 REAL_VALUE_TYPE rmode;
12790
12791 real_convert (&rmode, TYPE_MODE (type), &rr);
12792 /* Proceed iff the specified mode can hold the value. */
12793 if (real_identical (&rmode, &rr))
12794 return build_real (type, rmode);
12795 }
12796 }
12797 return NULL_TREE;
12798 }
12799
12800 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12801 number and no overflow/underflow occurred. INEXACT is true if M
12802 was not exactly calculated. TYPE is the tree type for the result.
12803 This function assumes that you cleared the MPFR flags and then
12804 calculated M to see if anything subsequently set a flag prior to
12805 entering this function. Return NULL_TREE if any checks fail, if
12806 FORCE_CONVERT is true, then bypass the checks. */
12807
12808 static tree
12809 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12810 {
12811 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12812 overflow/underflow occurred. If -frounding-math, proceed iff the
12813 result of calling FUNC was exact. */
12814 if (force_convert
12815 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12816 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12817 && (!flag_rounding_math || !inexact)))
12818 {
12819 REAL_VALUE_TYPE re, im;
12820
12821 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12822 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12823 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12824 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12825 but the mpft_t is not, then we underflowed in the
12826 conversion. */
12827 if (force_convert
12828 || (real_isfinite (&re) && real_isfinite (&im)
12829 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12830 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12831 {
12832 REAL_VALUE_TYPE re_mode, im_mode;
12833
12834 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12835 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12836 /* Proceed iff the specified mode can hold the value. */
12837 if (force_convert
12838 || (real_identical (&re_mode, &re)
12839 && real_identical (&im_mode, &im)))
12840 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12841 build_real (TREE_TYPE (type), im_mode));
12842 }
12843 }
12844 return NULL_TREE;
12845 }
12846
12847 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12848 FUNC on it and return the resulting value as a tree with type TYPE.
12849 If MIN and/or MAX are not NULL, then the supplied ARG must be
12850 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12851 acceptable values, otherwise they are not. The mpfr precision is
12852 set to the precision of TYPE. We assume that function FUNC returns
12853 zero if the result could be calculated exactly within the requested
12854 precision. */
12855
12856 static tree
12857 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12858 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12859 bool inclusive)
12860 {
12861 tree result = NULL_TREE;
12862
12863 STRIP_NOPS (arg);
12864
12865 /* To proceed, MPFR must exactly represent the target floating point
12866 format, which only happens when the target base equals two. */
12867 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12868 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12869 {
12870 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12871
12872 if (real_isfinite (ra)
12873 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12874 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12875 {
12876 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12877 const int prec = fmt->p;
12878 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12879 int inexact;
12880 mpfr_t m;
12881
12882 mpfr_init2 (m, prec);
12883 mpfr_from_real (m, ra, GMP_RNDN);
12884 mpfr_clear_flags ();
12885 inexact = func (m, m, rnd);
12886 result = do_mpfr_ckconv (m, type, inexact);
12887 mpfr_clear (m);
12888 }
12889 }
12890
12891 return result;
12892 }
12893
12894 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12895 FUNC on it and return the resulting value as a tree with type TYPE.
12896 The mpfr precision is set to the precision of TYPE. We assume that
12897 function FUNC returns zero if the result could be calculated
12898 exactly within the requested precision. */
12899
12900 static tree
12901 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12902 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12903 {
12904 tree result = NULL_TREE;
12905
12906 STRIP_NOPS (arg1);
12907 STRIP_NOPS (arg2);
12908
12909 /* To proceed, MPFR must exactly represent the target floating point
12910 format, which only happens when the target base equals two. */
12911 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12912 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12913 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12914 {
12915 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12916 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12917
12918 if (real_isfinite (ra1) && real_isfinite (ra2))
12919 {
12920 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12921 const int prec = fmt->p;
12922 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12923 int inexact;
12924 mpfr_t m1, m2;
12925
12926 mpfr_inits2 (prec, m1, m2, NULL);
12927 mpfr_from_real (m1, ra1, GMP_RNDN);
12928 mpfr_from_real (m2, ra2, GMP_RNDN);
12929 mpfr_clear_flags ();
12930 inexact = func (m1, m1, m2, rnd);
12931 result = do_mpfr_ckconv (m1, type, inexact);
12932 mpfr_clears (m1, m2, NULL);
12933 }
12934 }
12935
12936 return result;
12937 }
12938
12939 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12940 FUNC on it and return the resulting value as a tree with type TYPE.
12941 The mpfr precision is set to the precision of TYPE. We assume that
12942 function FUNC returns zero if the result could be calculated
12943 exactly within the requested precision. */
12944
12945 static tree
12946 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12947 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12948 {
12949 tree result = NULL_TREE;
12950
12951 STRIP_NOPS (arg1);
12952 STRIP_NOPS (arg2);
12953 STRIP_NOPS (arg3);
12954
12955 /* To proceed, MPFR must exactly represent the target floating point
12956 format, which only happens when the target base equals two. */
12957 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12958 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12959 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12960 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12961 {
12962 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12963 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12964 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12965
12966 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12967 {
12968 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12969 const int prec = fmt->p;
12970 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12971 int inexact;
12972 mpfr_t m1, m2, m3;
12973
12974 mpfr_inits2 (prec, m1, m2, m3, NULL);
12975 mpfr_from_real (m1, ra1, GMP_RNDN);
12976 mpfr_from_real (m2, ra2, GMP_RNDN);
12977 mpfr_from_real (m3, ra3, GMP_RNDN);
12978 mpfr_clear_flags ();
12979 inexact = func (m1, m1, m2, m3, rnd);
12980 result = do_mpfr_ckconv (m1, type, inexact);
12981 mpfr_clears (m1, m2, m3, NULL);
12982 }
12983 }
12984
12985 return result;
12986 }
12987
12988 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12989 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12990 If ARG_SINP and ARG_COSP are NULL then the result is returned
12991 as a complex value.
12992 The type is taken from the type of ARG and is used for setting the
12993 precision of the calculation and results. */
12994
12995 static tree
12996 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12997 {
12998 tree const type = TREE_TYPE (arg);
12999 tree result = NULL_TREE;
13000
13001 STRIP_NOPS (arg);
13002
13003 /* To proceed, MPFR must exactly represent the target floating point
13004 format, which only happens when the target base equals two. */
13005 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13006 && TREE_CODE (arg) == REAL_CST
13007 && !TREE_OVERFLOW (arg))
13008 {
13009 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13010
13011 if (real_isfinite (ra))
13012 {
13013 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13014 const int prec = fmt->p;
13015 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13016 tree result_s, result_c;
13017 int inexact;
13018 mpfr_t m, ms, mc;
13019
13020 mpfr_inits2 (prec, m, ms, mc, NULL);
13021 mpfr_from_real (m, ra, GMP_RNDN);
13022 mpfr_clear_flags ();
13023 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13024 result_s = do_mpfr_ckconv (ms, type, inexact);
13025 result_c = do_mpfr_ckconv (mc, type, inexact);
13026 mpfr_clears (m, ms, mc, NULL);
13027 if (result_s && result_c)
13028 {
13029 /* If we are to return in a complex value do so. */
13030 if (!arg_sinp && !arg_cosp)
13031 return build_complex (build_complex_type (type),
13032 result_c, result_s);
13033
13034 /* Dereference the sin/cos pointer arguments. */
13035 arg_sinp = build_fold_indirect_ref (arg_sinp);
13036 arg_cosp = build_fold_indirect_ref (arg_cosp);
13037 /* Proceed if valid pointer type were passed in. */
13038 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13039 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13040 {
13041 /* Set the values. */
13042 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13043 result_s);
13044 TREE_SIDE_EFFECTS (result_s) = 1;
13045 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13046 result_c);
13047 TREE_SIDE_EFFECTS (result_c) = 1;
13048 /* Combine the assignments into a compound expr. */
13049 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13050 result_s, result_c));
13051 }
13052 }
13053 }
13054 }
13055 return result;
13056 }
13057
13058 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13059 two-argument mpfr order N Bessel function FUNC on them and return
13060 the resulting value as a tree with type TYPE. The mpfr precision
13061 is set to the precision of TYPE. We assume that function FUNC
13062 returns zero if the result could be calculated exactly within the
13063 requested precision. */
13064 static tree
13065 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13066 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13067 const REAL_VALUE_TYPE *min, bool inclusive)
13068 {
13069 tree result = NULL_TREE;
13070
13071 STRIP_NOPS (arg1);
13072 STRIP_NOPS (arg2);
13073
13074 /* To proceed, MPFR must exactly represent the target floating point
13075 format, which only happens when the target base equals two. */
13076 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13077 && host_integerp (arg1, 0)
13078 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13079 {
13080 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13081 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13082
13083 if (n == (long)n
13084 && real_isfinite (ra)
13085 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13086 {
13087 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13088 const int prec = fmt->p;
13089 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13090 int inexact;
13091 mpfr_t m;
13092
13093 mpfr_init2 (m, prec);
13094 mpfr_from_real (m, ra, GMP_RNDN);
13095 mpfr_clear_flags ();
13096 inexact = func (m, n, m, rnd);
13097 result = do_mpfr_ckconv (m, type, inexact);
13098 mpfr_clear (m);
13099 }
13100 }
13101
13102 return result;
13103 }
13104
13105 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13106 the pointer *(ARG_QUO) and return the result. The type is taken
13107 from the type of ARG0 and is used for setting the precision of the
13108 calculation and results. */
13109
13110 static tree
13111 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13112 {
13113 tree const type = TREE_TYPE (arg0);
13114 tree result = NULL_TREE;
13115
13116 STRIP_NOPS (arg0);
13117 STRIP_NOPS (arg1);
13118
13119 /* To proceed, MPFR must exactly represent the target floating point
13120 format, which only happens when the target base equals two. */
13121 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13122 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13123 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13124 {
13125 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13126 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13127
13128 if (real_isfinite (ra0) && real_isfinite (ra1))
13129 {
13130 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13131 const int prec = fmt->p;
13132 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13133 tree result_rem;
13134 long integer_quo;
13135 mpfr_t m0, m1;
13136
13137 mpfr_inits2 (prec, m0, m1, NULL);
13138 mpfr_from_real (m0, ra0, GMP_RNDN);
13139 mpfr_from_real (m1, ra1, GMP_RNDN);
13140 mpfr_clear_flags ();
13141 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13142 /* Remquo is independent of the rounding mode, so pass
13143 inexact=0 to do_mpfr_ckconv(). */
13144 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13145 mpfr_clears (m0, m1, NULL);
13146 if (result_rem)
13147 {
13148 /* MPFR calculates quo in the host's long so it may
13149 return more bits in quo than the target int can hold
13150 if sizeof(host long) > sizeof(target int). This can
13151 happen even for native compilers in LP64 mode. In
13152 these cases, modulo the quo value with the largest
13153 number that the target int can hold while leaving one
13154 bit for the sign. */
13155 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13156 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13157
13158 /* Dereference the quo pointer argument. */
13159 arg_quo = build_fold_indirect_ref (arg_quo);
13160 /* Proceed iff a valid pointer type was passed in. */
13161 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13162 {
13163 /* Set the value. */
13164 tree result_quo
13165 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13166 build_int_cst (TREE_TYPE (arg_quo),
13167 integer_quo));
13168 TREE_SIDE_EFFECTS (result_quo) = 1;
13169 /* Combine the quo assignment with the rem. */
13170 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13171 result_quo, result_rem));
13172 }
13173 }
13174 }
13175 }
13176 return result;
13177 }
13178
13179 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13180 resulting value as a tree with type TYPE. The mpfr precision is
13181 set to the precision of TYPE. We assume that this mpfr function
13182 returns zero if the result could be calculated exactly within the
13183 requested precision. In addition, the integer pointer represented
13184 by ARG_SG will be dereferenced and set to the appropriate signgam
13185 (-1,1) value. */
13186
13187 static tree
13188 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13189 {
13190 tree result = NULL_TREE;
13191
13192 STRIP_NOPS (arg);
13193
13194 /* To proceed, MPFR must exactly represent the target floating point
13195 format, which only happens when the target base equals two. Also
13196 verify ARG is a constant and that ARG_SG is an int pointer. */
13197 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13198 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13199 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13200 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13201 {
13202 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13203
13204 /* In addition to NaN and Inf, the argument cannot be zero or a
13205 negative integer. */
13206 if (real_isfinite (ra)
13207 && ra->cl != rvc_zero
13208 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13209 {
13210 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13211 const int prec = fmt->p;
13212 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13213 int inexact, sg;
13214 mpfr_t m;
13215 tree result_lg;
13216
13217 mpfr_init2 (m, prec);
13218 mpfr_from_real (m, ra, GMP_RNDN);
13219 mpfr_clear_flags ();
13220 inexact = mpfr_lgamma (m, &sg, m, rnd);
13221 result_lg = do_mpfr_ckconv (m, type, inexact);
13222 mpfr_clear (m);
13223 if (result_lg)
13224 {
13225 tree result_sg;
13226
13227 /* Dereference the arg_sg pointer argument. */
13228 arg_sg = build_fold_indirect_ref (arg_sg);
13229 /* Assign the signgam value into *arg_sg. */
13230 result_sg = fold_build2 (MODIFY_EXPR,
13231 TREE_TYPE (arg_sg), arg_sg,
13232 build_int_cst (TREE_TYPE (arg_sg), sg));
13233 TREE_SIDE_EFFECTS (result_sg) = 1;
13234 /* Combine the signgam assignment with the lgamma result. */
13235 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13236 result_sg, result_lg));
13237 }
13238 }
13239 }
13240
13241 return result;
13242 }
13243
13244 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13245 function FUNC on it and return the resulting value as a tree with
13246 type TYPE. The mpfr precision is set to the precision of TYPE. We
13247 assume that function FUNC returns zero if the result could be
13248 calculated exactly within the requested precision. */
13249
13250 static tree
13251 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13252 {
13253 tree result = NULL_TREE;
13254
13255 STRIP_NOPS (arg);
13256
13257 /* To proceed, MPFR must exactly represent the target floating point
13258 format, which only happens when the target base equals two. */
13259 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13260 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13261 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13262 {
13263 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13264 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13265
13266 if (real_isfinite (re) && real_isfinite (im))
13267 {
13268 const struct real_format *const fmt =
13269 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13270 const int prec = fmt->p;
13271 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13272 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13273 int inexact;
13274 mpc_t m;
13275
13276 mpc_init2 (m, prec);
13277 mpfr_from_real (mpc_realref(m), re, rnd);
13278 mpfr_from_real (mpc_imagref(m), im, rnd);
13279 mpfr_clear_flags ();
13280 inexact = func (m, m, crnd);
13281 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13282 mpc_clear (m);
13283 }
13284 }
13285
13286 return result;
13287 }
13288
13289 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13290 mpc function FUNC on it and return the resulting value as a tree
13291 with type TYPE. The mpfr precision is set to the precision of
13292 TYPE. We assume that function FUNC returns zero if the result
13293 could be calculated exactly within the requested precision. If
13294 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13295 in the arguments and/or results. */
13296
13297 tree
13298 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13299 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13300 {
13301 tree result = NULL_TREE;
13302
13303 STRIP_NOPS (arg0);
13304 STRIP_NOPS (arg1);
13305
13306 /* To proceed, MPFR must exactly represent the target floating point
13307 format, which only happens when the target base equals two. */
13308 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13309 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13310 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13311 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13312 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13313 {
13314 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13315 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13316 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13317 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13318
13319 if (do_nonfinite
13320 || (real_isfinite (re0) && real_isfinite (im0)
13321 && real_isfinite (re1) && real_isfinite (im1)))
13322 {
13323 const struct real_format *const fmt =
13324 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13325 const int prec = fmt->p;
13326 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13327 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13328 int inexact;
13329 mpc_t m0, m1;
13330
13331 mpc_init2 (m0, prec);
13332 mpc_init2 (m1, prec);
13333 mpfr_from_real (mpc_realref(m0), re0, rnd);
13334 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13335 mpfr_from_real (mpc_realref(m1), re1, rnd);
13336 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13337 mpfr_clear_flags ();
13338 inexact = func (m0, m0, m1, crnd);
13339 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13340 mpc_clear (m0);
13341 mpc_clear (m1);
13342 }
13343 }
13344
13345 return result;
13346 }
13347
13348 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13349 a normal call should be emitted rather than expanding the function
13350 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13351
13352 static tree
13353 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13354 {
13355 int nargs = gimple_call_num_args (stmt);
13356
13357 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
13358 (nargs > 0
13359 ? gimple_call_arg_ptr (stmt, 0)
13360 : &error_mark_node), fcode);
13361 }
13362
13363 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13364 a normal call should be emitted rather than expanding the function
13365 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13366 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13367 passed as second argument. */
13368
13369 tree
13370 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13371 enum built_in_function fcode)
13372 {
13373 int nargs = gimple_call_num_args (stmt);
13374
13375 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
13376 (nargs > 0
13377 ? gimple_call_arg_ptr (stmt, 0)
13378 : &error_mark_node), maxlen, fcode);
13379 }
13380
13381 /* Builtins with folding operations that operate on "..." arguments
13382 need special handling; we need to store the arguments in a convenient
13383 data structure before attempting any folding. Fortunately there are
13384 only a few builtins that fall into this category. FNDECL is the
13385 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13386 result of the function call is ignored. */
13387
13388 static tree
13389 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13390 bool ignore ATTRIBUTE_UNUSED)
13391 {
13392 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13393 tree ret = NULL_TREE;
13394
13395 switch (fcode)
13396 {
13397 case BUILT_IN_SPRINTF_CHK:
13398 case BUILT_IN_VSPRINTF_CHK:
13399 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13400 break;
13401
13402 case BUILT_IN_SNPRINTF_CHK:
13403 case BUILT_IN_VSNPRINTF_CHK:
13404 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13405
13406 default:
13407 break;
13408 }
13409 if (ret)
13410 {
13411 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13412 TREE_NO_WARNING (ret) = 1;
13413 return ret;
13414 }
13415 return NULL_TREE;
13416 }
13417
13418 /* A wrapper function for builtin folding that prevents warnings for
13419 "statement without effect" and the like, caused by removing the
13420 call node earlier than the warning is generated. */
13421
13422 tree
13423 fold_call_stmt (gimple stmt, bool ignore)
13424 {
13425 tree ret = NULL_TREE;
13426 tree fndecl = gimple_call_fndecl (stmt);
13427 location_t loc = gimple_location (stmt);
13428 if (fndecl
13429 && TREE_CODE (fndecl) == FUNCTION_DECL
13430 && DECL_BUILT_IN (fndecl)
13431 && !gimple_call_va_arg_pack_p (stmt))
13432 {
13433 int nargs = gimple_call_num_args (stmt);
13434 tree *args = (nargs > 0
13435 ? gimple_call_arg_ptr (stmt, 0)
13436 : &error_mark_node);
13437
13438 if (avoid_folding_inline_builtin (fndecl))
13439 return NULL_TREE;
13440 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13441 {
13442 return targetm.fold_builtin (fndecl, nargs, args, ignore);
13443 }
13444 else
13445 {
13446 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13447 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13448 if (!ret)
13449 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13450 if (ret)
13451 {
13452 /* Propagate location information from original call to
13453 expansion of builtin. Otherwise things like
13454 maybe_emit_chk_warning, that operate on the expansion
13455 of a builtin, will use the wrong location information. */
13456 if (gimple_has_location (stmt))
13457 {
13458 tree realret = ret;
13459 if (TREE_CODE (ret) == NOP_EXPR)
13460 realret = TREE_OPERAND (ret, 0);
13461 if (CAN_HAVE_LOCATION_P (realret)
13462 && !EXPR_HAS_LOCATION (realret))
13463 SET_EXPR_LOCATION (realret, loc);
13464 return realret;
13465 }
13466 return ret;
13467 }
13468 }
13469 }
13470 return NULL_TREE;
13471 }
13472
13473 /* Look up the function in built_in_decls that corresponds to DECL
13474 and set ASMSPEC as its user assembler name. DECL must be a
13475 function decl that declares a builtin. */
13476
13477 void
13478 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13479 {
13480 tree builtin;
13481 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13482 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13483 && asmspec != 0);
13484
13485 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13486 set_user_assembler_name (builtin, asmspec);
13487 switch (DECL_FUNCTION_CODE (decl))
13488 {
13489 case BUILT_IN_MEMCPY:
13490 init_block_move_fn (asmspec);
13491 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13492 break;
13493 case BUILT_IN_MEMSET:
13494 init_block_clear_fn (asmspec);
13495 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13496 break;
13497 case BUILT_IN_MEMMOVE:
13498 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13499 break;
13500 case BUILT_IN_MEMCMP:
13501 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13502 break;
13503 case BUILT_IN_ABORT:
13504 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13505 break;
13506 case BUILT_IN_FFS:
13507 if (INT_TYPE_SIZE < BITS_PER_WORD)
13508 {
13509 set_user_assembler_libfunc ("ffs", asmspec);
13510 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13511 MODE_INT, 0), "ffs");
13512 }
13513 break;
13514 default:
13515 break;
13516 }
13517 }
13518
13519 /* Return true if DECL is a builtin that expands to a constant or similarly
13520 simple code. */
13521 bool
13522 is_simple_builtin (tree decl)
13523 {
13524 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13525 switch (DECL_FUNCTION_CODE (decl))
13526 {
13527 /* Builtins that expand to constants. */
13528 case BUILT_IN_CONSTANT_P:
13529 case BUILT_IN_EXPECT:
13530 case BUILT_IN_OBJECT_SIZE:
13531 case BUILT_IN_UNREACHABLE:
13532 /* Simple register moves or loads from stack. */
13533 case BUILT_IN_ASSUME_ALIGNED:
13534 case BUILT_IN_RETURN_ADDRESS:
13535 case BUILT_IN_EXTRACT_RETURN_ADDR:
13536 case BUILT_IN_FROB_RETURN_ADDR:
13537 case BUILT_IN_RETURN:
13538 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
13539 case BUILT_IN_FRAME_ADDRESS:
13540 case BUILT_IN_VA_END:
13541 case BUILT_IN_STACK_SAVE:
13542 case BUILT_IN_STACK_RESTORE:
13543 /* Exception state returns or moves registers around. */
13544 case BUILT_IN_EH_FILTER:
13545 case BUILT_IN_EH_POINTER:
13546 case BUILT_IN_EH_COPY_VALUES:
13547 return true;
13548
13549 default:
13550 return false;
13551 }
13552
13553 return false;
13554 }
13555
13556 /* Return true if DECL is a builtin that is not expensive, i.e., they are
13557 most probably expanded inline into reasonably simple code. This is a
13558 superset of is_simple_builtin. */
13559 bool
13560 is_inexpensive_builtin (tree decl)
13561 {
13562 if (!decl)
13563 return false;
13564 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
13565 return true;
13566 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13567 switch (DECL_FUNCTION_CODE (decl))
13568 {
13569 case BUILT_IN_ABS:
13570 case BUILT_IN_ALLOCA:
13571 case BUILT_IN_BSWAP32:
13572 case BUILT_IN_BSWAP64:
13573 case BUILT_IN_CLZ:
13574 case BUILT_IN_CLZIMAX:
13575 case BUILT_IN_CLZL:
13576 case BUILT_IN_CLZLL:
13577 case BUILT_IN_CTZ:
13578 case BUILT_IN_CTZIMAX:
13579 case BUILT_IN_CTZL:
13580 case BUILT_IN_CTZLL:
13581 case BUILT_IN_FFS:
13582 case BUILT_IN_FFSIMAX:
13583 case BUILT_IN_FFSL:
13584 case BUILT_IN_FFSLL:
13585 case BUILT_IN_IMAXABS:
13586 case BUILT_IN_FINITE:
13587 case BUILT_IN_FINITEF:
13588 case BUILT_IN_FINITEL:
13589 case BUILT_IN_FINITED32:
13590 case BUILT_IN_FINITED64:
13591 case BUILT_IN_FINITED128:
13592 case BUILT_IN_FPCLASSIFY:
13593 case BUILT_IN_ISFINITE:
13594 case BUILT_IN_ISINF_SIGN:
13595 case BUILT_IN_ISINF:
13596 case BUILT_IN_ISINFF:
13597 case BUILT_IN_ISINFL:
13598 case BUILT_IN_ISINFD32:
13599 case BUILT_IN_ISINFD64:
13600 case BUILT_IN_ISINFD128:
13601 case BUILT_IN_ISNAN:
13602 case BUILT_IN_ISNANF:
13603 case BUILT_IN_ISNANL:
13604 case BUILT_IN_ISNAND32:
13605 case BUILT_IN_ISNAND64:
13606 case BUILT_IN_ISNAND128:
13607 case BUILT_IN_ISNORMAL:
13608 case BUILT_IN_ISGREATER:
13609 case BUILT_IN_ISGREATEREQUAL:
13610 case BUILT_IN_ISLESS:
13611 case BUILT_IN_ISLESSEQUAL:
13612 case BUILT_IN_ISLESSGREATER:
13613 case BUILT_IN_ISUNORDERED:
13614 case BUILT_IN_VA_ARG_PACK:
13615 case BUILT_IN_VA_ARG_PACK_LEN:
13616 case BUILT_IN_VA_COPY:
13617 case BUILT_IN_TRAP:
13618 case BUILT_IN_SAVEREGS:
13619 case BUILT_IN_POPCOUNTL:
13620 case BUILT_IN_POPCOUNTLL:
13621 case BUILT_IN_POPCOUNTIMAX:
13622 case BUILT_IN_POPCOUNT:
13623 case BUILT_IN_PARITYL:
13624 case BUILT_IN_PARITYLL:
13625 case BUILT_IN_PARITYIMAX:
13626 case BUILT_IN_PARITY:
13627 case BUILT_IN_LABS:
13628 case BUILT_IN_LLABS:
13629 case BUILT_IN_PREFETCH:
13630 return true;
13631
13632 default:
13633 return is_simple_builtin (decl);
13634 }
13635
13636 return false;
13637 }