re PR rtl-optimization/51040 (ICE: RTL check: access of elt 1 of 'not' with last...
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "realmpfr.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
47 #include "basic-block.h"
48 #include "tree-mudflap.h"
49 #include "tree-flow.h"
50 #include "value-prof.h"
51 #include "diagnostic-core.h"
52 #include "builtins.h"
53
54
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
57 #endif
58 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
59
60 struct target_builtins default_target_builtins;
61 #if SWITCHABLE_TARGET
62 struct target_builtins *this_target_builtins = &default_target_builtins;
63 #endif
64
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
68
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
71 {
72 #include "builtins.def"
73 };
74 #undef DEF_BUILTIN
75
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 builtin_info_type builtin_info;
79
80 static const char *c_getstr (tree);
81 static rtx c_readstr (const char *, enum machine_mode);
82 static int target_char_cast (tree, char *);
83 static rtx get_memory_rtx (tree, tree);
84 static int apply_args_size (void);
85 static int apply_result_size (void);
86 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
87 static rtx result_vector (int, rtx);
88 #endif
89 static void expand_builtin_update_setjmp_buf (rtx);
90 static void expand_builtin_prefetch (tree);
91 static rtx expand_builtin_apply_args (void);
92 static rtx expand_builtin_apply_args_1 (void);
93 static rtx expand_builtin_apply (rtx, rtx, rtx);
94 static void expand_builtin_return (rtx);
95 static enum type_class type_to_class (tree);
96 static rtx expand_builtin_classify_type (tree);
97 static void expand_errno_check (tree, rtx);
98 static rtx expand_builtin_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
102 static rtx expand_builtin_interclass_mathfn (tree, rtx);
103 static rtx expand_builtin_sincos (tree);
104 static rtx expand_builtin_cexpi (tree, rtx);
105 static rtx expand_builtin_int_roundingfn (tree, rtx);
106 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
107 static rtx expand_builtin_next_arg (void);
108 static rtx expand_builtin_va_start (tree);
109 static rtx expand_builtin_va_end (tree);
110 static rtx expand_builtin_va_copy (tree);
111 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_strcmp (tree, rtx);
113 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
114 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
115 static rtx expand_builtin_memcpy (tree, rtx);
116 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
118 enum machine_mode, int);
119 static rtx expand_builtin_strcpy (tree, rtx);
120 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
121 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strncpy (tree, rtx);
123 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
124 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
126 static rtx expand_builtin_bzero (tree);
127 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_alloca (tree, bool);
129 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
130 static rtx expand_builtin_frame_address (tree, tree);
131 static tree stabilize_va_list_loc (location_t, tree, int);
132 static rtx expand_builtin_expect (tree, rtx);
133 static tree fold_builtin_constant_p (tree);
134 static tree fold_builtin_expect (location_t, tree, tree);
135 static tree fold_builtin_classify_type (tree);
136 static tree fold_builtin_strlen (location_t, tree, tree);
137 static tree fold_builtin_inf (location_t, tree, int);
138 static tree fold_builtin_nan (tree, tree, int);
139 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
140 static bool validate_arg (const_tree, enum tree_code code);
141 static bool integer_valued_real_p (tree);
142 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
143 static bool readonly_data_expr (tree);
144 static rtx expand_builtin_fabs (tree, rtx, rtx);
145 static rtx expand_builtin_signbit (tree, rtx);
146 static tree fold_builtin_sqrt (location_t, tree, tree);
147 static tree fold_builtin_cbrt (location_t, tree, tree);
148 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
149 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
150 static tree fold_builtin_cos (location_t, tree, tree, tree);
151 static tree fold_builtin_cosh (location_t, tree, tree, tree);
152 static tree fold_builtin_tan (tree, tree);
153 static tree fold_builtin_trunc (location_t, tree, tree);
154 static tree fold_builtin_floor (location_t, tree, tree);
155 static tree fold_builtin_ceil (location_t, tree, tree);
156 static tree fold_builtin_round (location_t, tree, tree);
157 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
158 static tree fold_builtin_bitop (tree, tree);
159 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
160 static tree fold_builtin_strchr (location_t, tree, tree, tree);
161 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163 static tree fold_builtin_strcmp (location_t, tree, tree);
164 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
165 static tree fold_builtin_signbit (location_t, tree, tree);
166 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
167 static tree fold_builtin_isascii (location_t, tree);
168 static tree fold_builtin_toascii (location_t, tree);
169 static tree fold_builtin_isdigit (location_t, tree);
170 static tree fold_builtin_fabs (location_t, tree, tree);
171 static tree fold_builtin_abs (location_t, tree, tree);
172 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
173 enum tree_code);
174 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
175 static tree fold_builtin_0 (location_t, tree, bool);
176 static tree fold_builtin_1 (location_t, tree, tree, bool);
177 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
178 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
179 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
180 static tree fold_builtin_varargs (location_t, tree, tree, bool);
181
182 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
183 static tree fold_builtin_strstr (location_t, tree, tree, tree);
184 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
185 static tree fold_builtin_strcat (location_t, tree, tree);
186 static tree fold_builtin_strncat (location_t, tree, tree, tree);
187 static tree fold_builtin_strspn (location_t, tree, tree);
188 static tree fold_builtin_strcspn (location_t, tree, tree);
189 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
190 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
191
192 static rtx expand_builtin_object_size (tree);
193 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
194 enum built_in_function);
195 static void maybe_emit_chk_warning (tree, enum built_in_function);
196 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_free_warning (tree);
198 static tree fold_builtin_object_size (tree, tree);
199 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
200 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
201 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
202 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
203 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
204 enum built_in_function);
205 static bool init_target_chars (void);
206
207 static unsigned HOST_WIDE_INT target_newline;
208 static unsigned HOST_WIDE_INT target_percent;
209 static unsigned HOST_WIDE_INT target_c;
210 static unsigned HOST_WIDE_INT target_s;
211 static char target_percent_c[3];
212 static char target_percent_s[3];
213 static char target_percent_s_newline[4];
214 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
215 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
216 static tree do_mpfr_arg2 (tree, tree, tree,
217 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
218 static tree do_mpfr_arg3 (tree, tree, tree, tree,
219 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
220 static tree do_mpfr_sincos (tree, tree, tree);
221 static tree do_mpfr_bessel_n (tree, tree, tree,
222 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
223 const REAL_VALUE_TYPE *, bool);
224 static tree do_mpfr_remquo (tree, tree, tree);
225 static tree do_mpfr_lgamma_r (tree, tree, tree);
226 static void expand_builtin_sync_synchronize (void);
227
228 /* Return true if NAME starts with __builtin_ or __sync_. */
229
230 bool
231 is_builtin_name (const char *name)
232 {
233 if (strncmp (name, "__builtin_", 10) == 0)
234 return true;
235 if (strncmp (name, "__sync_", 7) == 0)
236 return true;
237 if (strncmp (name, "__atomic_", 9) == 0)
238 return true;
239 return false;
240 }
241
242
243 /* Return true if DECL is a function symbol representing a built-in. */
244
245 bool
246 is_builtin_fn (tree decl)
247 {
248 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
249 }
250
251
252 /* Return true if NODE should be considered for inline expansion regardless
253 of the optimization level. This means whenever a function is invoked with
254 its "internal" name, which normally contains the prefix "__builtin". */
255
256 static bool
257 called_as_built_in (tree node)
258 {
259 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
260 we want the name used to call the function, not the name it
261 will have. */
262 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
263 return is_builtin_name (name);
264 }
265
266 /* Compute values M and N such that M divides (address of EXP - N) and
267 such that N < M. Store N in *BITPOSP and return M.
268
269 Note that the address (and thus the alignment) computed here is based
270 on the address to which a symbol resolves, whereas DECL_ALIGN is based
271 on the address at which an object is actually located. These two
272 addresses are not always the same. For example, on ARM targets,
273 the address &foo of a Thumb function foo() has the lowest bit set,
274 whereas foo() itself starts on an even address. */
275
276 unsigned int
277 get_object_alignment_1 (tree exp, unsigned HOST_WIDE_INT *bitposp)
278 {
279 HOST_WIDE_INT bitsize, bitpos;
280 tree offset;
281 enum machine_mode mode;
282 int unsignedp, volatilep;
283 unsigned int align, inner;
284
285 /* Get the innermost object and the constant (bitpos) and possibly
286 variable (offset) offset of the access. */
287 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
288 &mode, &unsignedp, &volatilep, true);
289
290 /* Extract alignment information from the innermost object and
291 possibly adjust bitpos and offset. */
292 if (TREE_CODE (exp) == CONST_DECL)
293 exp = DECL_INITIAL (exp);
294 if (DECL_P (exp)
295 && TREE_CODE (exp) != LABEL_DECL)
296 {
297 if (TREE_CODE (exp) == FUNCTION_DECL)
298 {
299 /* Function addresses can encode extra information besides their
300 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
301 allows the low bit to be used as a virtual bit, we know
302 that the address itself must be 2-byte aligned. */
303 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
304 align = 2 * BITS_PER_UNIT;
305 else
306 align = BITS_PER_UNIT;
307 }
308 else
309 align = DECL_ALIGN (exp);
310 }
311 else if (CONSTANT_CLASS_P (exp))
312 {
313 align = TYPE_ALIGN (TREE_TYPE (exp));
314 #ifdef CONSTANT_ALIGNMENT
315 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
316 #endif
317 }
318 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
319 align = TYPE_ALIGN (TREE_TYPE (exp));
320 else if (TREE_CODE (exp) == INDIRECT_REF)
321 align = TYPE_ALIGN (TREE_TYPE (exp));
322 else if (TREE_CODE (exp) == MEM_REF)
323 {
324 tree addr = TREE_OPERAND (exp, 0);
325 struct ptr_info_def *pi;
326 if (TREE_CODE (addr) == BIT_AND_EXPR
327 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
328 {
329 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
330 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
331 align *= BITS_PER_UNIT;
332 addr = TREE_OPERAND (addr, 0);
333 }
334 else
335 align = BITS_PER_UNIT;
336 if (TREE_CODE (addr) == SSA_NAME
337 && (pi = SSA_NAME_PTR_INFO (addr)))
338 {
339 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
340 align = MAX (pi->align * BITS_PER_UNIT, align);
341 }
342 else if (TREE_CODE (addr) == ADDR_EXPR)
343 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0)));
344 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
345 }
346 else if (TREE_CODE (exp) == TARGET_MEM_REF)
347 {
348 struct ptr_info_def *pi;
349 tree addr = TMR_BASE (exp);
350 if (TREE_CODE (addr) == BIT_AND_EXPR
351 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
352 {
353 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
354 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
355 align *= BITS_PER_UNIT;
356 addr = TREE_OPERAND (addr, 0);
357 }
358 else
359 align = BITS_PER_UNIT;
360 if (TREE_CODE (addr) == SSA_NAME
361 && (pi = SSA_NAME_PTR_INFO (addr)))
362 {
363 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
364 align = MAX (pi->align * BITS_PER_UNIT, align);
365 }
366 else if (TREE_CODE (addr) == ADDR_EXPR)
367 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0)));
368 if (TMR_OFFSET (exp))
369 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
370 if (TMR_INDEX (exp) && TMR_STEP (exp))
371 {
372 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
373 align = MIN (align, (step & -step) * BITS_PER_UNIT);
374 }
375 else if (TMR_INDEX (exp))
376 align = BITS_PER_UNIT;
377 if (TMR_INDEX2 (exp))
378 align = BITS_PER_UNIT;
379 }
380 else
381 align = BITS_PER_UNIT;
382
383 /* If there is a non-constant offset part extract the maximum
384 alignment that can prevail. */
385 inner = ~0U;
386 while (offset)
387 {
388 tree next_offset;
389
390 if (TREE_CODE (offset) == PLUS_EXPR)
391 {
392 next_offset = TREE_OPERAND (offset, 0);
393 offset = TREE_OPERAND (offset, 1);
394 }
395 else
396 next_offset = NULL;
397 if (host_integerp (offset, 1))
398 {
399 /* Any overflow in calculating offset_bits won't change
400 the alignment. */
401 unsigned offset_bits
402 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
403
404 if (offset_bits)
405 inner = MIN (inner, (offset_bits & -offset_bits));
406 }
407 else if (TREE_CODE (offset) == MULT_EXPR
408 && host_integerp (TREE_OPERAND (offset, 1), 1))
409 {
410 /* Any overflow in calculating offset_factor won't change
411 the alignment. */
412 unsigned offset_factor
413 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
414 * BITS_PER_UNIT);
415
416 if (offset_factor)
417 inner = MIN (inner, (offset_factor & -offset_factor));
418 }
419 else
420 {
421 inner = MIN (inner, BITS_PER_UNIT);
422 break;
423 }
424 offset = next_offset;
425 }
426
427 /* Alignment is innermost object alignment adjusted by the constant
428 and non-constant offset parts. */
429 align = MIN (align, inner);
430 bitpos = bitpos & (align - 1);
431
432 *bitposp = bitpos;
433 return align;
434 }
435
436 /* Return the alignment in bits of EXP, an object. */
437
438 unsigned int
439 get_object_alignment (tree exp)
440 {
441 unsigned HOST_WIDE_INT bitpos = 0;
442 unsigned int align;
443
444 align = get_object_alignment_1 (exp, &bitpos);
445
446 /* align and bitpos now specify known low bits of the pointer.
447 ptr & (align - 1) == bitpos. */
448
449 if (bitpos != 0)
450 align = (bitpos & -bitpos);
451
452 return align;
453 }
454
455 /* Return the alignment in bits of EXP, a pointer valued expression.
456 The alignment returned is, by default, the alignment of the thing that
457 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
458
459 Otherwise, look at the expression to see if we can do better, i.e., if the
460 expression is actually pointing at an object whose alignment is tighter. */
461
462 unsigned int
463 get_pointer_alignment (tree exp)
464 {
465 STRIP_NOPS (exp);
466
467 if (TREE_CODE (exp) == ADDR_EXPR)
468 return get_object_alignment (TREE_OPERAND (exp, 0));
469 else if (TREE_CODE (exp) == SSA_NAME
470 && POINTER_TYPE_P (TREE_TYPE (exp)))
471 {
472 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
473 unsigned align;
474 if (!pi)
475 return BITS_PER_UNIT;
476 if (pi->misalign != 0)
477 align = (pi->misalign & -pi->misalign);
478 else
479 align = pi->align;
480 return align * BITS_PER_UNIT;
481 }
482
483 return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
484 }
485
486 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
487 way, because it could contain a zero byte in the middle.
488 TREE_STRING_LENGTH is the size of the character array, not the string.
489
490 ONLY_VALUE should be nonzero if the result is not going to be emitted
491 into the instruction stream and zero if it is going to be expanded.
492 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
493 is returned, otherwise NULL, since
494 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
495 evaluate the side-effects.
496
497 The value returned is of type `ssizetype'.
498
499 Unfortunately, string_constant can't access the values of const char
500 arrays with initializers, so neither can we do so here. */
501
502 tree
503 c_strlen (tree src, int only_value)
504 {
505 tree offset_node;
506 HOST_WIDE_INT offset;
507 int max;
508 const char *ptr;
509 location_t loc;
510
511 STRIP_NOPS (src);
512 if (TREE_CODE (src) == COND_EXPR
513 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
514 {
515 tree len1, len2;
516
517 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
518 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
519 if (tree_int_cst_equal (len1, len2))
520 return len1;
521 }
522
523 if (TREE_CODE (src) == COMPOUND_EXPR
524 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
525 return c_strlen (TREE_OPERAND (src, 1), only_value);
526
527 loc = EXPR_LOC_OR_HERE (src);
528
529 src = string_constant (src, &offset_node);
530 if (src == 0)
531 return NULL_TREE;
532
533 max = TREE_STRING_LENGTH (src) - 1;
534 ptr = TREE_STRING_POINTER (src);
535
536 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
537 {
538 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
539 compute the offset to the following null if we don't know where to
540 start searching for it. */
541 int i;
542
543 for (i = 0; i < max; i++)
544 if (ptr[i] == 0)
545 return NULL_TREE;
546
547 /* We don't know the starting offset, but we do know that the string
548 has no internal zero bytes. We can assume that the offset falls
549 within the bounds of the string; otherwise, the programmer deserves
550 what he gets. Subtract the offset from the length of the string,
551 and return that. This would perhaps not be valid if we were dealing
552 with named arrays in addition to literal string constants. */
553
554 return size_diffop_loc (loc, size_int (max), offset_node);
555 }
556
557 /* We have a known offset into the string. Start searching there for
558 a null character if we can represent it as a single HOST_WIDE_INT. */
559 if (offset_node == 0)
560 offset = 0;
561 else if (! host_integerp (offset_node, 0))
562 offset = -1;
563 else
564 offset = tree_low_cst (offset_node, 0);
565
566 /* If the offset is known to be out of bounds, warn, and call strlen at
567 runtime. */
568 if (offset < 0 || offset > max)
569 {
570 /* Suppress multiple warnings for propagated constant strings. */
571 if (! TREE_NO_WARNING (src))
572 {
573 warning_at (loc, 0, "offset outside bounds of constant string");
574 TREE_NO_WARNING (src) = 1;
575 }
576 return NULL_TREE;
577 }
578
579 /* Use strlen to search for the first zero byte. Since any strings
580 constructed with build_string will have nulls appended, we win even
581 if we get handed something like (char[4])"abcd".
582
583 Since OFFSET is our starting index into the string, no further
584 calculation is needed. */
585 return ssize_int (strlen (ptr + offset));
586 }
587
588 /* Return a char pointer for a C string if it is a string constant
589 or sum of string constant and integer constant. */
590
591 static const char *
592 c_getstr (tree src)
593 {
594 tree offset_node;
595
596 src = string_constant (src, &offset_node);
597 if (src == 0)
598 return 0;
599
600 if (offset_node == 0)
601 return TREE_STRING_POINTER (src);
602 else if (!host_integerp (offset_node, 1)
603 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
604 return 0;
605
606 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
607 }
608
609 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
610 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
611
612 static rtx
613 c_readstr (const char *str, enum machine_mode mode)
614 {
615 HOST_WIDE_INT c[2];
616 HOST_WIDE_INT ch;
617 unsigned int i, j;
618
619 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
620
621 c[0] = 0;
622 c[1] = 0;
623 ch = 1;
624 for (i = 0; i < GET_MODE_SIZE (mode); i++)
625 {
626 j = i;
627 if (WORDS_BIG_ENDIAN)
628 j = GET_MODE_SIZE (mode) - i - 1;
629 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
630 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
631 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
632 j *= BITS_PER_UNIT;
633 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
634
635 if (ch)
636 ch = (unsigned char) str[i];
637 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
638 }
639 return immed_double_const (c[0], c[1], mode);
640 }
641
642 /* Cast a target constant CST to target CHAR and if that value fits into
643 host char type, return zero and put that value into variable pointed to by
644 P. */
645
646 static int
647 target_char_cast (tree cst, char *p)
648 {
649 unsigned HOST_WIDE_INT val, hostval;
650
651 if (TREE_CODE (cst) != INTEGER_CST
652 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
653 return 1;
654
655 val = TREE_INT_CST_LOW (cst);
656 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
657 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
658
659 hostval = val;
660 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
661 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
662
663 if (val != hostval)
664 return 1;
665
666 *p = hostval;
667 return 0;
668 }
669
670 /* Similar to save_expr, but assumes that arbitrary code is not executed
671 in between the multiple evaluations. In particular, we assume that a
672 non-addressable local variable will not be modified. */
673
674 static tree
675 builtin_save_expr (tree exp)
676 {
677 if (TREE_CODE (exp) == SSA_NAME
678 || (TREE_ADDRESSABLE (exp) == 0
679 && (TREE_CODE (exp) == PARM_DECL
680 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
681 return exp;
682
683 return save_expr (exp);
684 }
685
686 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
687 times to get the address of either a higher stack frame, or a return
688 address located within it (depending on FNDECL_CODE). */
689
690 static rtx
691 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
692 {
693 int i;
694
695 #ifdef INITIAL_FRAME_ADDRESS_RTX
696 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
697 #else
698 rtx tem;
699
700 /* For a zero count with __builtin_return_address, we don't care what
701 frame address we return, because target-specific definitions will
702 override us. Therefore frame pointer elimination is OK, and using
703 the soft frame pointer is OK.
704
705 For a nonzero count, or a zero count with __builtin_frame_address,
706 we require a stable offset from the current frame pointer to the
707 previous one, so we must use the hard frame pointer, and
708 we must disable frame pointer elimination. */
709 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
710 tem = frame_pointer_rtx;
711 else
712 {
713 tem = hard_frame_pointer_rtx;
714
715 /* Tell reload not to eliminate the frame pointer. */
716 crtl->accesses_prior_frames = 1;
717 }
718 #endif
719
720 /* Some machines need special handling before we can access
721 arbitrary frames. For example, on the SPARC, we must first flush
722 all register windows to the stack. */
723 #ifdef SETUP_FRAME_ADDRESSES
724 if (count > 0)
725 SETUP_FRAME_ADDRESSES ();
726 #endif
727
728 /* On the SPARC, the return address is not in the frame, it is in a
729 register. There is no way to access it off of the current frame
730 pointer, but it can be accessed off the previous frame pointer by
731 reading the value from the register window save area. */
732 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
733 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
734 count--;
735 #endif
736
737 /* Scan back COUNT frames to the specified frame. */
738 for (i = 0; i < count; i++)
739 {
740 /* Assume the dynamic chain pointer is in the word that the
741 frame address points to, unless otherwise specified. */
742 #ifdef DYNAMIC_CHAIN_ADDRESS
743 tem = DYNAMIC_CHAIN_ADDRESS (tem);
744 #endif
745 tem = memory_address (Pmode, tem);
746 tem = gen_frame_mem (Pmode, tem);
747 tem = copy_to_reg (tem);
748 }
749
750 /* For __builtin_frame_address, return what we've got. But, on
751 the SPARC for example, we may have to add a bias. */
752 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
753 #ifdef FRAME_ADDR_RTX
754 return FRAME_ADDR_RTX (tem);
755 #else
756 return tem;
757 #endif
758
759 /* For __builtin_return_address, get the return address from that frame. */
760 #ifdef RETURN_ADDR_RTX
761 tem = RETURN_ADDR_RTX (count, tem);
762 #else
763 tem = memory_address (Pmode,
764 plus_constant (tem, GET_MODE_SIZE (Pmode)));
765 tem = gen_frame_mem (Pmode, tem);
766 #endif
767 return tem;
768 }
769
770 /* Alias set used for setjmp buffer. */
771 static alias_set_type setjmp_alias_set = -1;
772
773 /* Construct the leading half of a __builtin_setjmp call. Control will
774 return to RECEIVER_LABEL. This is also called directly by the SJLJ
775 exception handling code. */
776
777 void
778 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
779 {
780 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
781 rtx stack_save;
782 rtx mem;
783
784 if (setjmp_alias_set == -1)
785 setjmp_alias_set = new_alias_set ();
786
787 buf_addr = convert_memory_address (Pmode, buf_addr);
788
789 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
790
791 /* We store the frame pointer and the address of receiver_label in
792 the buffer and use the rest of it for the stack save area, which
793 is machine-dependent. */
794
795 mem = gen_rtx_MEM (Pmode, buf_addr);
796 set_mem_alias_set (mem, setjmp_alias_set);
797 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
798
799 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
800 set_mem_alias_set (mem, setjmp_alias_set);
801
802 emit_move_insn (validize_mem (mem),
803 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
804
805 stack_save = gen_rtx_MEM (sa_mode,
806 plus_constant (buf_addr,
807 2 * GET_MODE_SIZE (Pmode)));
808 set_mem_alias_set (stack_save, setjmp_alias_set);
809 emit_stack_save (SAVE_NONLOCAL, &stack_save);
810
811 /* If there is further processing to do, do it. */
812 #ifdef HAVE_builtin_setjmp_setup
813 if (HAVE_builtin_setjmp_setup)
814 emit_insn (gen_builtin_setjmp_setup (buf_addr));
815 #endif
816
817 /* We have a nonlocal label. */
818 cfun->has_nonlocal_label = 1;
819 }
820
821 /* Construct the trailing part of a __builtin_setjmp call. This is
822 also called directly by the SJLJ exception handling code. */
823
824 void
825 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
826 {
827 rtx chain;
828
829 /* Clobber the FP when we get here, so we have to make sure it's
830 marked as used by this function. */
831 emit_use (hard_frame_pointer_rtx);
832
833 /* Mark the static chain as clobbered here so life information
834 doesn't get messed up for it. */
835 chain = targetm.calls.static_chain (current_function_decl, true);
836 if (chain && REG_P (chain))
837 emit_clobber (chain);
838
839 /* Now put in the code to restore the frame pointer, and argument
840 pointer, if needed. */
841 #ifdef HAVE_nonlocal_goto
842 if (! HAVE_nonlocal_goto)
843 #endif
844 {
845 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
846 /* This might change the hard frame pointer in ways that aren't
847 apparent to early optimization passes, so force a clobber. */
848 emit_clobber (hard_frame_pointer_rtx);
849 }
850
851 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
852 if (fixed_regs[ARG_POINTER_REGNUM])
853 {
854 #ifdef ELIMINABLE_REGS
855 size_t i;
856 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
857
858 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
859 if (elim_regs[i].from == ARG_POINTER_REGNUM
860 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
861 break;
862
863 if (i == ARRAY_SIZE (elim_regs))
864 #endif
865 {
866 /* Now restore our arg pointer from the address at which it
867 was saved in our stack frame. */
868 emit_move_insn (crtl->args.internal_arg_pointer,
869 copy_to_reg (get_arg_pointer_save_area ()));
870 }
871 }
872 #endif
873
874 #ifdef HAVE_builtin_setjmp_receiver
875 if (HAVE_builtin_setjmp_receiver)
876 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
877 else
878 #endif
879 #ifdef HAVE_nonlocal_goto_receiver
880 if (HAVE_nonlocal_goto_receiver)
881 emit_insn (gen_nonlocal_goto_receiver ());
882 else
883 #endif
884 { /* Nothing */ }
885
886 /* We must not allow the code we just generated to be reordered by
887 scheduling. Specifically, the update of the frame pointer must
888 happen immediately, not later. */
889 emit_insn (gen_blockage ());
890 }
891
892 /* __builtin_longjmp is passed a pointer to an array of five words (not
893 all will be used on all machines). It operates similarly to the C
894 library function of the same name, but is more efficient. Much of
895 the code below is copied from the handling of non-local gotos. */
896
897 static void
898 expand_builtin_longjmp (rtx buf_addr, rtx value)
899 {
900 rtx fp, lab, stack, insn, last;
901 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
902
903 /* DRAP is needed for stack realign if longjmp is expanded to current
904 function */
905 if (SUPPORTS_STACK_ALIGNMENT)
906 crtl->need_drap = true;
907
908 if (setjmp_alias_set == -1)
909 setjmp_alias_set = new_alias_set ();
910
911 buf_addr = convert_memory_address (Pmode, buf_addr);
912
913 buf_addr = force_reg (Pmode, buf_addr);
914
915 /* We require that the user must pass a second argument of 1, because
916 that is what builtin_setjmp will return. */
917 gcc_assert (value == const1_rtx);
918
919 last = get_last_insn ();
920 #ifdef HAVE_builtin_longjmp
921 if (HAVE_builtin_longjmp)
922 emit_insn (gen_builtin_longjmp (buf_addr));
923 else
924 #endif
925 {
926 fp = gen_rtx_MEM (Pmode, buf_addr);
927 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
928 GET_MODE_SIZE (Pmode)));
929
930 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
931 2 * GET_MODE_SIZE (Pmode)));
932 set_mem_alias_set (fp, setjmp_alias_set);
933 set_mem_alias_set (lab, setjmp_alias_set);
934 set_mem_alias_set (stack, setjmp_alias_set);
935
936 /* Pick up FP, label, and SP from the block and jump. This code is
937 from expand_goto in stmt.c; see there for detailed comments. */
938 #ifdef HAVE_nonlocal_goto
939 if (HAVE_nonlocal_goto)
940 /* We have to pass a value to the nonlocal_goto pattern that will
941 get copied into the static_chain pointer, but it does not matter
942 what that value is, because builtin_setjmp does not use it. */
943 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
944 else
945 #endif
946 {
947 lab = copy_to_reg (lab);
948
949 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
950 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
951
952 emit_move_insn (hard_frame_pointer_rtx, fp);
953 emit_stack_restore (SAVE_NONLOCAL, stack);
954
955 emit_use (hard_frame_pointer_rtx);
956 emit_use (stack_pointer_rtx);
957 emit_indirect_jump (lab);
958 }
959 }
960
961 /* Search backwards and mark the jump insn as a non-local goto.
962 Note that this precludes the use of __builtin_longjmp to a
963 __builtin_setjmp target in the same function. However, we've
964 already cautioned the user that these functions are for
965 internal exception handling use only. */
966 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
967 {
968 gcc_assert (insn != last);
969
970 if (JUMP_P (insn))
971 {
972 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
973 break;
974 }
975 else if (CALL_P (insn))
976 break;
977 }
978 }
979
980 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
981 and the address of the save area. */
982
983 static rtx
984 expand_builtin_nonlocal_goto (tree exp)
985 {
986 tree t_label, t_save_area;
987 rtx r_label, r_save_area, r_fp, r_sp, insn;
988
989 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
990 return NULL_RTX;
991
992 t_label = CALL_EXPR_ARG (exp, 0);
993 t_save_area = CALL_EXPR_ARG (exp, 1);
994
995 r_label = expand_normal (t_label);
996 r_label = convert_memory_address (Pmode, r_label);
997 r_save_area = expand_normal (t_save_area);
998 r_save_area = convert_memory_address (Pmode, r_save_area);
999 /* Copy the address of the save location to a register just in case it was
1000 based on the frame pointer. */
1001 r_save_area = copy_to_reg (r_save_area);
1002 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1003 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1004 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
1005
1006 crtl->has_nonlocal_goto = 1;
1007
1008 #ifdef HAVE_nonlocal_goto
1009 /* ??? We no longer need to pass the static chain value, afaik. */
1010 if (HAVE_nonlocal_goto)
1011 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1012 else
1013 #endif
1014 {
1015 r_label = copy_to_reg (r_label);
1016
1017 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1018 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1019
1020 /* Restore frame pointer for containing function. */
1021 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1022 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1023
1024 /* USE of hard_frame_pointer_rtx added for consistency;
1025 not clear if really needed. */
1026 emit_use (hard_frame_pointer_rtx);
1027 emit_use (stack_pointer_rtx);
1028
1029 /* If the architecture is using a GP register, we must
1030 conservatively assume that the target function makes use of it.
1031 The prologue of functions with nonlocal gotos must therefore
1032 initialize the GP register to the appropriate value, and we
1033 must then make sure that this value is live at the point
1034 of the jump. (Note that this doesn't necessarily apply
1035 to targets with a nonlocal_goto pattern; they are free
1036 to implement it in their own way. Note also that this is
1037 a no-op if the GP register is a global invariant.) */
1038 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1039 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1040 emit_use (pic_offset_table_rtx);
1041
1042 emit_indirect_jump (r_label);
1043 }
1044
1045 /* Search backwards to the jump insn and mark it as a
1046 non-local goto. */
1047 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1048 {
1049 if (JUMP_P (insn))
1050 {
1051 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1052 break;
1053 }
1054 else if (CALL_P (insn))
1055 break;
1056 }
1057
1058 return const0_rtx;
1059 }
1060
1061 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1062 (not all will be used on all machines) that was passed to __builtin_setjmp.
1063 It updates the stack pointer in that block to correspond to the current
1064 stack pointer. */
1065
1066 static void
1067 expand_builtin_update_setjmp_buf (rtx buf_addr)
1068 {
1069 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1070 rtx stack_save
1071 = gen_rtx_MEM (sa_mode,
1072 memory_address
1073 (sa_mode,
1074 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1075
1076 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1077 }
1078
1079 /* Expand a call to __builtin_prefetch. For a target that does not support
1080 data prefetch, evaluate the memory address argument in case it has side
1081 effects. */
1082
1083 static void
1084 expand_builtin_prefetch (tree exp)
1085 {
1086 tree arg0, arg1, arg2;
1087 int nargs;
1088 rtx op0, op1, op2;
1089
1090 if (!validate_arglist (exp, POINTER_TYPE, 0))
1091 return;
1092
1093 arg0 = CALL_EXPR_ARG (exp, 0);
1094
1095 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1096 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1097 locality). */
1098 nargs = call_expr_nargs (exp);
1099 if (nargs > 1)
1100 arg1 = CALL_EXPR_ARG (exp, 1);
1101 else
1102 arg1 = integer_zero_node;
1103 if (nargs > 2)
1104 arg2 = CALL_EXPR_ARG (exp, 2);
1105 else
1106 arg2 = integer_three_node;
1107
1108 /* Argument 0 is an address. */
1109 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1110
1111 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1112 if (TREE_CODE (arg1) != INTEGER_CST)
1113 {
1114 error ("second argument to %<__builtin_prefetch%> must be a constant");
1115 arg1 = integer_zero_node;
1116 }
1117 op1 = expand_normal (arg1);
1118 /* Argument 1 must be either zero or one. */
1119 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1120 {
1121 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1122 " using zero");
1123 op1 = const0_rtx;
1124 }
1125
1126 /* Argument 2 (locality) must be a compile-time constant int. */
1127 if (TREE_CODE (arg2) != INTEGER_CST)
1128 {
1129 error ("third argument to %<__builtin_prefetch%> must be a constant");
1130 arg2 = integer_zero_node;
1131 }
1132 op2 = expand_normal (arg2);
1133 /* Argument 2 must be 0, 1, 2, or 3. */
1134 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1135 {
1136 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1137 op2 = const0_rtx;
1138 }
1139
1140 #ifdef HAVE_prefetch
1141 if (HAVE_prefetch)
1142 {
1143 struct expand_operand ops[3];
1144
1145 create_address_operand (&ops[0], op0);
1146 create_integer_operand (&ops[1], INTVAL (op1));
1147 create_integer_operand (&ops[2], INTVAL (op2));
1148 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1149 return;
1150 }
1151 #endif
1152
1153 /* Don't do anything with direct references to volatile memory, but
1154 generate code to handle other side effects. */
1155 if (!MEM_P (op0) && side_effects_p (op0))
1156 emit_insn (op0);
1157 }
1158
1159 /* Get a MEM rtx for expression EXP which is the address of an operand
1160 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1161 the maximum length of the block of memory that might be accessed or
1162 NULL if unknown. */
1163
1164 static rtx
1165 get_memory_rtx (tree exp, tree len)
1166 {
1167 tree orig_exp = exp;
1168 rtx addr, mem;
1169 HOST_WIDE_INT off;
1170
1171 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1172 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1173 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1174 exp = TREE_OPERAND (exp, 0);
1175
1176 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1177 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1178
1179 /* Get an expression we can use to find the attributes to assign to MEM.
1180 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1181 we can. First remove any nops. */
1182 while (CONVERT_EXPR_P (exp)
1183 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1184 exp = TREE_OPERAND (exp, 0);
1185
1186 off = 0;
1187 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1188 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1189 && host_integerp (TREE_OPERAND (exp, 1), 0)
1190 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1191 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1192 else if (TREE_CODE (exp) == ADDR_EXPR)
1193 exp = TREE_OPERAND (exp, 0);
1194 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1195 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1196 else
1197 exp = NULL;
1198
1199 /* Honor attributes derived from exp, except for the alias set
1200 (as builtin stringops may alias with anything) and the size
1201 (as stringops may access multiple array elements). */
1202 if (exp)
1203 {
1204 set_mem_attributes (mem, exp, 0);
1205
1206 if (off)
1207 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1208
1209 /* Allow the string and memory builtins to overflow from one
1210 field into another, see http://gcc.gnu.org/PR23561.
1211 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1212 memory accessed by the string or memory builtin will fit
1213 within the field. */
1214 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1215 {
1216 tree mem_expr = MEM_EXPR (mem);
1217 HOST_WIDE_INT offset = -1, length = -1;
1218 tree inner = exp;
1219
1220 while (TREE_CODE (inner) == ARRAY_REF
1221 || CONVERT_EXPR_P (inner)
1222 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1223 || TREE_CODE (inner) == SAVE_EXPR)
1224 inner = TREE_OPERAND (inner, 0);
1225
1226 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1227
1228 if (MEM_OFFSET_KNOWN_P (mem))
1229 offset = MEM_OFFSET (mem);
1230
1231 if (offset >= 0 && len && host_integerp (len, 0))
1232 length = tree_low_cst (len, 0);
1233
1234 while (TREE_CODE (inner) == COMPONENT_REF)
1235 {
1236 tree field = TREE_OPERAND (inner, 1);
1237 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1238 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1239
1240 /* Bitfields are generally not byte-addressable. */
1241 gcc_assert (!DECL_BIT_FIELD (field)
1242 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1243 % BITS_PER_UNIT) == 0
1244 && host_integerp (DECL_SIZE (field), 0)
1245 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1246 % BITS_PER_UNIT) == 0));
1247
1248 /* If we can prove that the memory starting at XEXP (mem, 0) and
1249 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1250 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1251 fields without DECL_SIZE_UNIT like flexible array members. */
1252 if (length >= 0
1253 && DECL_SIZE_UNIT (field)
1254 && host_integerp (DECL_SIZE_UNIT (field), 0))
1255 {
1256 HOST_WIDE_INT size
1257 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1258 if (offset <= size
1259 && length <= size
1260 && offset + length <= size)
1261 break;
1262 }
1263
1264 if (offset >= 0
1265 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1266 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1267 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1268 / BITS_PER_UNIT;
1269 else
1270 {
1271 offset = -1;
1272 length = -1;
1273 }
1274
1275 mem_expr = TREE_OPERAND (mem_expr, 0);
1276 inner = TREE_OPERAND (inner, 0);
1277 }
1278
1279 if (mem_expr == NULL)
1280 offset = -1;
1281 if (mem_expr != MEM_EXPR (mem))
1282 {
1283 set_mem_expr (mem, mem_expr);
1284 if (offset >= 0)
1285 set_mem_offset (mem, offset);
1286 else
1287 clear_mem_offset (mem);
1288 }
1289 }
1290 set_mem_alias_set (mem, 0);
1291 clear_mem_size (mem);
1292 }
1293
1294 return mem;
1295 }
1296 \f
1297 /* Built-in functions to perform an untyped call and return. */
1298
1299 #define apply_args_mode \
1300 (this_target_builtins->x_apply_args_mode)
1301 #define apply_result_mode \
1302 (this_target_builtins->x_apply_result_mode)
1303
1304 /* Return the size required for the block returned by __builtin_apply_args,
1305 and initialize apply_args_mode. */
1306
1307 static int
1308 apply_args_size (void)
1309 {
1310 static int size = -1;
1311 int align;
1312 unsigned int regno;
1313 enum machine_mode mode;
1314
1315 /* The values computed by this function never change. */
1316 if (size < 0)
1317 {
1318 /* The first value is the incoming arg-pointer. */
1319 size = GET_MODE_SIZE (Pmode);
1320
1321 /* The second value is the structure value address unless this is
1322 passed as an "invisible" first argument. */
1323 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1324 size += GET_MODE_SIZE (Pmode);
1325
1326 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1327 if (FUNCTION_ARG_REGNO_P (regno))
1328 {
1329 mode = targetm.calls.get_raw_arg_mode (regno);
1330
1331 gcc_assert (mode != VOIDmode);
1332
1333 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1334 if (size % align != 0)
1335 size = CEIL (size, align) * align;
1336 size += GET_MODE_SIZE (mode);
1337 apply_args_mode[regno] = mode;
1338 }
1339 else
1340 {
1341 apply_args_mode[regno] = VOIDmode;
1342 }
1343 }
1344 return size;
1345 }
1346
1347 /* Return the size required for the block returned by __builtin_apply,
1348 and initialize apply_result_mode. */
1349
1350 static int
1351 apply_result_size (void)
1352 {
1353 static int size = -1;
1354 int align, regno;
1355 enum machine_mode mode;
1356
1357 /* The values computed by this function never change. */
1358 if (size < 0)
1359 {
1360 size = 0;
1361
1362 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1363 if (targetm.calls.function_value_regno_p (regno))
1364 {
1365 mode = targetm.calls.get_raw_result_mode (regno);
1366
1367 gcc_assert (mode != VOIDmode);
1368
1369 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1370 if (size % align != 0)
1371 size = CEIL (size, align) * align;
1372 size += GET_MODE_SIZE (mode);
1373 apply_result_mode[regno] = mode;
1374 }
1375 else
1376 apply_result_mode[regno] = VOIDmode;
1377
1378 /* Allow targets that use untyped_call and untyped_return to override
1379 the size so that machine-specific information can be stored here. */
1380 #ifdef APPLY_RESULT_SIZE
1381 size = APPLY_RESULT_SIZE;
1382 #endif
1383 }
1384 return size;
1385 }
1386
1387 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1388 /* Create a vector describing the result block RESULT. If SAVEP is true,
1389 the result block is used to save the values; otherwise it is used to
1390 restore the values. */
1391
1392 static rtx
1393 result_vector (int savep, rtx result)
1394 {
1395 int regno, size, align, nelts;
1396 enum machine_mode mode;
1397 rtx reg, mem;
1398 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1399
1400 size = nelts = 0;
1401 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1402 if ((mode = apply_result_mode[regno]) != VOIDmode)
1403 {
1404 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1405 if (size % align != 0)
1406 size = CEIL (size, align) * align;
1407 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1408 mem = adjust_address (result, mode, size);
1409 savevec[nelts++] = (savep
1410 ? gen_rtx_SET (VOIDmode, mem, reg)
1411 : gen_rtx_SET (VOIDmode, reg, mem));
1412 size += GET_MODE_SIZE (mode);
1413 }
1414 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1415 }
1416 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1417
1418 /* Save the state required to perform an untyped call with the same
1419 arguments as were passed to the current function. */
1420
1421 static rtx
1422 expand_builtin_apply_args_1 (void)
1423 {
1424 rtx registers, tem;
1425 int size, align, regno;
1426 enum machine_mode mode;
1427 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1428
1429 /* Create a block where the arg-pointer, structure value address,
1430 and argument registers can be saved. */
1431 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1432
1433 /* Walk past the arg-pointer and structure value address. */
1434 size = GET_MODE_SIZE (Pmode);
1435 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1436 size += GET_MODE_SIZE (Pmode);
1437
1438 /* Save each register used in calling a function to the block. */
1439 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1440 if ((mode = apply_args_mode[regno]) != VOIDmode)
1441 {
1442 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1443 if (size % align != 0)
1444 size = CEIL (size, align) * align;
1445
1446 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1447
1448 emit_move_insn (adjust_address (registers, mode, size), tem);
1449 size += GET_MODE_SIZE (mode);
1450 }
1451
1452 /* Save the arg pointer to the block. */
1453 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1454 #ifdef STACK_GROWS_DOWNWARD
1455 /* We need the pointer as the caller actually passed them to us, not
1456 as we might have pretended they were passed. Make sure it's a valid
1457 operand, as emit_move_insn isn't expected to handle a PLUS. */
1458 tem
1459 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1460 NULL_RTX);
1461 #endif
1462 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1463
1464 size = GET_MODE_SIZE (Pmode);
1465
1466 /* Save the structure value address unless this is passed as an
1467 "invisible" first argument. */
1468 if (struct_incoming_value)
1469 {
1470 emit_move_insn (adjust_address (registers, Pmode, size),
1471 copy_to_reg (struct_incoming_value));
1472 size += GET_MODE_SIZE (Pmode);
1473 }
1474
1475 /* Return the address of the block. */
1476 return copy_addr_to_reg (XEXP (registers, 0));
1477 }
1478
1479 /* __builtin_apply_args returns block of memory allocated on
1480 the stack into which is stored the arg pointer, structure
1481 value address, static chain, and all the registers that might
1482 possibly be used in performing a function call. The code is
1483 moved to the start of the function so the incoming values are
1484 saved. */
1485
1486 static rtx
1487 expand_builtin_apply_args (void)
1488 {
1489 /* Don't do __builtin_apply_args more than once in a function.
1490 Save the result of the first call and reuse it. */
1491 if (apply_args_value != 0)
1492 return apply_args_value;
1493 {
1494 /* When this function is called, it means that registers must be
1495 saved on entry to this function. So we migrate the
1496 call to the first insn of this function. */
1497 rtx temp;
1498 rtx seq;
1499
1500 start_sequence ();
1501 temp = expand_builtin_apply_args_1 ();
1502 seq = get_insns ();
1503 end_sequence ();
1504
1505 apply_args_value = temp;
1506
1507 /* Put the insns after the NOTE that starts the function.
1508 If this is inside a start_sequence, make the outer-level insn
1509 chain current, so the code is placed at the start of the
1510 function. If internal_arg_pointer is a non-virtual pseudo,
1511 it needs to be placed after the function that initializes
1512 that pseudo. */
1513 push_topmost_sequence ();
1514 if (REG_P (crtl->args.internal_arg_pointer)
1515 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1516 emit_insn_before (seq, parm_birth_insn);
1517 else
1518 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1519 pop_topmost_sequence ();
1520 return temp;
1521 }
1522 }
1523
1524 /* Perform an untyped call and save the state required to perform an
1525 untyped return of whatever value was returned by the given function. */
1526
1527 static rtx
1528 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1529 {
1530 int size, align, regno;
1531 enum machine_mode mode;
1532 rtx incoming_args, result, reg, dest, src, call_insn;
1533 rtx old_stack_level = 0;
1534 rtx call_fusage = 0;
1535 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1536
1537 arguments = convert_memory_address (Pmode, arguments);
1538
1539 /* Create a block where the return registers can be saved. */
1540 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1541
1542 /* Fetch the arg pointer from the ARGUMENTS block. */
1543 incoming_args = gen_reg_rtx (Pmode);
1544 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1545 #ifndef STACK_GROWS_DOWNWARD
1546 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1547 incoming_args, 0, OPTAB_LIB_WIDEN);
1548 #endif
1549
1550 /* Push a new argument block and copy the arguments. Do not allow
1551 the (potential) memcpy call below to interfere with our stack
1552 manipulations. */
1553 do_pending_stack_adjust ();
1554 NO_DEFER_POP;
1555
1556 /* Save the stack with nonlocal if available. */
1557 #ifdef HAVE_save_stack_nonlocal
1558 if (HAVE_save_stack_nonlocal)
1559 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1560 else
1561 #endif
1562 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1563
1564 /* Allocate a block of memory onto the stack and copy the memory
1565 arguments to the outgoing arguments address. We can pass TRUE
1566 as the 4th argument because we just saved the stack pointer
1567 and will restore it right after the call. */
1568 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1569
1570 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1571 may have already set current_function_calls_alloca to true.
1572 current_function_calls_alloca won't be set if argsize is zero,
1573 so we have to guarantee need_drap is true here. */
1574 if (SUPPORTS_STACK_ALIGNMENT)
1575 crtl->need_drap = true;
1576
1577 dest = virtual_outgoing_args_rtx;
1578 #ifndef STACK_GROWS_DOWNWARD
1579 if (CONST_INT_P (argsize))
1580 dest = plus_constant (dest, -INTVAL (argsize));
1581 else
1582 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1583 #endif
1584 dest = gen_rtx_MEM (BLKmode, dest);
1585 set_mem_align (dest, PARM_BOUNDARY);
1586 src = gen_rtx_MEM (BLKmode, incoming_args);
1587 set_mem_align (src, PARM_BOUNDARY);
1588 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1589
1590 /* Refer to the argument block. */
1591 apply_args_size ();
1592 arguments = gen_rtx_MEM (BLKmode, arguments);
1593 set_mem_align (arguments, PARM_BOUNDARY);
1594
1595 /* Walk past the arg-pointer and structure value address. */
1596 size = GET_MODE_SIZE (Pmode);
1597 if (struct_value)
1598 size += GET_MODE_SIZE (Pmode);
1599
1600 /* Restore each of the registers previously saved. Make USE insns
1601 for each of these registers for use in making the call. */
1602 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1603 if ((mode = apply_args_mode[regno]) != VOIDmode)
1604 {
1605 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1606 if (size % align != 0)
1607 size = CEIL (size, align) * align;
1608 reg = gen_rtx_REG (mode, regno);
1609 emit_move_insn (reg, adjust_address (arguments, mode, size));
1610 use_reg (&call_fusage, reg);
1611 size += GET_MODE_SIZE (mode);
1612 }
1613
1614 /* Restore the structure value address unless this is passed as an
1615 "invisible" first argument. */
1616 size = GET_MODE_SIZE (Pmode);
1617 if (struct_value)
1618 {
1619 rtx value = gen_reg_rtx (Pmode);
1620 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1621 emit_move_insn (struct_value, value);
1622 if (REG_P (struct_value))
1623 use_reg (&call_fusage, struct_value);
1624 size += GET_MODE_SIZE (Pmode);
1625 }
1626
1627 /* All arguments and registers used for the call are set up by now! */
1628 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1629
1630 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1631 and we don't want to load it into a register as an optimization,
1632 because prepare_call_address already did it if it should be done. */
1633 if (GET_CODE (function) != SYMBOL_REF)
1634 function = memory_address (FUNCTION_MODE, function);
1635
1636 /* Generate the actual call instruction and save the return value. */
1637 #ifdef HAVE_untyped_call
1638 if (HAVE_untyped_call)
1639 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1640 result, result_vector (1, result)));
1641 else
1642 #endif
1643 #ifdef HAVE_call_value
1644 if (HAVE_call_value)
1645 {
1646 rtx valreg = 0;
1647
1648 /* Locate the unique return register. It is not possible to
1649 express a call that sets more than one return register using
1650 call_value; use untyped_call for that. In fact, untyped_call
1651 only needs to save the return registers in the given block. */
1652 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1653 if ((mode = apply_result_mode[regno]) != VOIDmode)
1654 {
1655 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1656
1657 valreg = gen_rtx_REG (mode, regno);
1658 }
1659
1660 emit_call_insn (GEN_CALL_VALUE (valreg,
1661 gen_rtx_MEM (FUNCTION_MODE, function),
1662 const0_rtx, NULL_RTX, const0_rtx));
1663
1664 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1665 }
1666 else
1667 #endif
1668 gcc_unreachable ();
1669
1670 /* Find the CALL insn we just emitted, and attach the register usage
1671 information. */
1672 call_insn = last_call_insn ();
1673 add_function_usage_to (call_insn, call_fusage);
1674
1675 /* Restore the stack. */
1676 #ifdef HAVE_save_stack_nonlocal
1677 if (HAVE_save_stack_nonlocal)
1678 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1679 else
1680 #endif
1681 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1682 fixup_args_size_notes (call_insn, get_last_insn(), 0);
1683
1684 OK_DEFER_POP;
1685
1686 /* Return the address of the result block. */
1687 result = copy_addr_to_reg (XEXP (result, 0));
1688 return convert_memory_address (ptr_mode, result);
1689 }
1690
1691 /* Perform an untyped return. */
1692
1693 static void
1694 expand_builtin_return (rtx result)
1695 {
1696 int size, align, regno;
1697 enum machine_mode mode;
1698 rtx reg;
1699 rtx call_fusage = 0;
1700
1701 result = convert_memory_address (Pmode, result);
1702
1703 apply_result_size ();
1704 result = gen_rtx_MEM (BLKmode, result);
1705
1706 #ifdef HAVE_untyped_return
1707 if (HAVE_untyped_return)
1708 {
1709 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1710 emit_barrier ();
1711 return;
1712 }
1713 #endif
1714
1715 /* Restore the return value and note that each value is used. */
1716 size = 0;
1717 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1718 if ((mode = apply_result_mode[regno]) != VOIDmode)
1719 {
1720 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1721 if (size % align != 0)
1722 size = CEIL (size, align) * align;
1723 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1724 emit_move_insn (reg, adjust_address (result, mode, size));
1725
1726 push_to_sequence (call_fusage);
1727 emit_use (reg);
1728 call_fusage = get_insns ();
1729 end_sequence ();
1730 size += GET_MODE_SIZE (mode);
1731 }
1732
1733 /* Put the USE insns before the return. */
1734 emit_insn (call_fusage);
1735
1736 /* Return whatever values was restored by jumping directly to the end
1737 of the function. */
1738 expand_naked_return ();
1739 }
1740
1741 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1742
1743 static enum type_class
1744 type_to_class (tree type)
1745 {
1746 switch (TREE_CODE (type))
1747 {
1748 case VOID_TYPE: return void_type_class;
1749 case INTEGER_TYPE: return integer_type_class;
1750 case ENUMERAL_TYPE: return enumeral_type_class;
1751 case BOOLEAN_TYPE: return boolean_type_class;
1752 case POINTER_TYPE: return pointer_type_class;
1753 case REFERENCE_TYPE: return reference_type_class;
1754 case OFFSET_TYPE: return offset_type_class;
1755 case REAL_TYPE: return real_type_class;
1756 case COMPLEX_TYPE: return complex_type_class;
1757 case FUNCTION_TYPE: return function_type_class;
1758 case METHOD_TYPE: return method_type_class;
1759 case RECORD_TYPE: return record_type_class;
1760 case UNION_TYPE:
1761 case QUAL_UNION_TYPE: return union_type_class;
1762 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1763 ? string_type_class : array_type_class);
1764 case LANG_TYPE: return lang_type_class;
1765 default: return no_type_class;
1766 }
1767 }
1768
1769 /* Expand a call EXP to __builtin_classify_type. */
1770
1771 static rtx
1772 expand_builtin_classify_type (tree exp)
1773 {
1774 if (call_expr_nargs (exp))
1775 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1776 return GEN_INT (no_type_class);
1777 }
1778
1779 /* This helper macro, meant to be used in mathfn_built_in below,
1780 determines which among a set of three builtin math functions is
1781 appropriate for a given type mode. The `F' and `L' cases are
1782 automatically generated from the `double' case. */
1783 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1784 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1785 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1786 fcodel = BUILT_IN_MATHFN##L ; break;
1787 /* Similar to above, but appends _R after any F/L suffix. */
1788 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1789 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1790 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1791 fcodel = BUILT_IN_MATHFN##L_R ; break;
1792
1793 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1794 if available. If IMPLICIT is true use the implicit builtin declaration,
1795 otherwise use the explicit declaration. If we can't do the conversion,
1796 return zero. */
1797
1798 static tree
1799 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1800 {
1801 enum built_in_function fcode, fcodef, fcodel, fcode2;
1802
1803 switch (fn)
1804 {
1805 CASE_MATHFN (BUILT_IN_ACOS)
1806 CASE_MATHFN (BUILT_IN_ACOSH)
1807 CASE_MATHFN (BUILT_IN_ASIN)
1808 CASE_MATHFN (BUILT_IN_ASINH)
1809 CASE_MATHFN (BUILT_IN_ATAN)
1810 CASE_MATHFN (BUILT_IN_ATAN2)
1811 CASE_MATHFN (BUILT_IN_ATANH)
1812 CASE_MATHFN (BUILT_IN_CBRT)
1813 CASE_MATHFN (BUILT_IN_CEIL)
1814 CASE_MATHFN (BUILT_IN_CEXPI)
1815 CASE_MATHFN (BUILT_IN_COPYSIGN)
1816 CASE_MATHFN (BUILT_IN_COS)
1817 CASE_MATHFN (BUILT_IN_COSH)
1818 CASE_MATHFN (BUILT_IN_DREM)
1819 CASE_MATHFN (BUILT_IN_ERF)
1820 CASE_MATHFN (BUILT_IN_ERFC)
1821 CASE_MATHFN (BUILT_IN_EXP)
1822 CASE_MATHFN (BUILT_IN_EXP10)
1823 CASE_MATHFN (BUILT_IN_EXP2)
1824 CASE_MATHFN (BUILT_IN_EXPM1)
1825 CASE_MATHFN (BUILT_IN_FABS)
1826 CASE_MATHFN (BUILT_IN_FDIM)
1827 CASE_MATHFN (BUILT_IN_FLOOR)
1828 CASE_MATHFN (BUILT_IN_FMA)
1829 CASE_MATHFN (BUILT_IN_FMAX)
1830 CASE_MATHFN (BUILT_IN_FMIN)
1831 CASE_MATHFN (BUILT_IN_FMOD)
1832 CASE_MATHFN (BUILT_IN_FREXP)
1833 CASE_MATHFN (BUILT_IN_GAMMA)
1834 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1835 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1836 CASE_MATHFN (BUILT_IN_HYPOT)
1837 CASE_MATHFN (BUILT_IN_ILOGB)
1838 CASE_MATHFN (BUILT_IN_ICEIL)
1839 CASE_MATHFN (BUILT_IN_IFLOOR)
1840 CASE_MATHFN (BUILT_IN_INF)
1841 CASE_MATHFN (BUILT_IN_IRINT)
1842 CASE_MATHFN (BUILT_IN_IROUND)
1843 CASE_MATHFN (BUILT_IN_ISINF)
1844 CASE_MATHFN (BUILT_IN_J0)
1845 CASE_MATHFN (BUILT_IN_J1)
1846 CASE_MATHFN (BUILT_IN_JN)
1847 CASE_MATHFN (BUILT_IN_LCEIL)
1848 CASE_MATHFN (BUILT_IN_LDEXP)
1849 CASE_MATHFN (BUILT_IN_LFLOOR)
1850 CASE_MATHFN (BUILT_IN_LGAMMA)
1851 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1852 CASE_MATHFN (BUILT_IN_LLCEIL)
1853 CASE_MATHFN (BUILT_IN_LLFLOOR)
1854 CASE_MATHFN (BUILT_IN_LLRINT)
1855 CASE_MATHFN (BUILT_IN_LLROUND)
1856 CASE_MATHFN (BUILT_IN_LOG)
1857 CASE_MATHFN (BUILT_IN_LOG10)
1858 CASE_MATHFN (BUILT_IN_LOG1P)
1859 CASE_MATHFN (BUILT_IN_LOG2)
1860 CASE_MATHFN (BUILT_IN_LOGB)
1861 CASE_MATHFN (BUILT_IN_LRINT)
1862 CASE_MATHFN (BUILT_IN_LROUND)
1863 CASE_MATHFN (BUILT_IN_MODF)
1864 CASE_MATHFN (BUILT_IN_NAN)
1865 CASE_MATHFN (BUILT_IN_NANS)
1866 CASE_MATHFN (BUILT_IN_NEARBYINT)
1867 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1868 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1869 CASE_MATHFN (BUILT_IN_POW)
1870 CASE_MATHFN (BUILT_IN_POWI)
1871 CASE_MATHFN (BUILT_IN_POW10)
1872 CASE_MATHFN (BUILT_IN_REMAINDER)
1873 CASE_MATHFN (BUILT_IN_REMQUO)
1874 CASE_MATHFN (BUILT_IN_RINT)
1875 CASE_MATHFN (BUILT_IN_ROUND)
1876 CASE_MATHFN (BUILT_IN_SCALB)
1877 CASE_MATHFN (BUILT_IN_SCALBLN)
1878 CASE_MATHFN (BUILT_IN_SCALBN)
1879 CASE_MATHFN (BUILT_IN_SIGNBIT)
1880 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1881 CASE_MATHFN (BUILT_IN_SIN)
1882 CASE_MATHFN (BUILT_IN_SINCOS)
1883 CASE_MATHFN (BUILT_IN_SINH)
1884 CASE_MATHFN (BUILT_IN_SQRT)
1885 CASE_MATHFN (BUILT_IN_TAN)
1886 CASE_MATHFN (BUILT_IN_TANH)
1887 CASE_MATHFN (BUILT_IN_TGAMMA)
1888 CASE_MATHFN (BUILT_IN_TRUNC)
1889 CASE_MATHFN (BUILT_IN_Y0)
1890 CASE_MATHFN (BUILT_IN_Y1)
1891 CASE_MATHFN (BUILT_IN_YN)
1892
1893 default:
1894 return NULL_TREE;
1895 }
1896
1897 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1898 fcode2 = fcode;
1899 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1900 fcode2 = fcodef;
1901 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1902 fcode2 = fcodel;
1903 else
1904 return NULL_TREE;
1905
1906 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1907 return NULL_TREE;
1908
1909 return builtin_decl_explicit (fcode2);
1910 }
1911
1912 /* Like mathfn_built_in_1(), but always use the implicit array. */
1913
1914 tree
1915 mathfn_built_in (tree type, enum built_in_function fn)
1916 {
1917 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1918 }
1919
1920 /* If errno must be maintained, expand the RTL to check if the result,
1921 TARGET, of a built-in function call, EXP, is NaN, and if so set
1922 errno to EDOM. */
1923
1924 static void
1925 expand_errno_check (tree exp, rtx target)
1926 {
1927 rtx lab = gen_label_rtx ();
1928
1929 /* Test the result; if it is NaN, set errno=EDOM because
1930 the argument was not in the domain. */
1931 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1932 NULL_RTX, NULL_RTX, lab,
1933 /* The jump is very likely. */
1934 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1935
1936 #ifdef TARGET_EDOM
1937 /* If this built-in doesn't throw an exception, set errno directly. */
1938 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1939 {
1940 #ifdef GEN_ERRNO_RTX
1941 rtx errno_rtx = GEN_ERRNO_RTX;
1942 #else
1943 rtx errno_rtx
1944 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1945 #endif
1946 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1947 emit_label (lab);
1948 return;
1949 }
1950 #endif
1951
1952 /* Make sure the library call isn't expanded as a tail call. */
1953 CALL_EXPR_TAILCALL (exp) = 0;
1954
1955 /* We can't set errno=EDOM directly; let the library call do it.
1956 Pop the arguments right away in case the call gets deleted. */
1957 NO_DEFER_POP;
1958 expand_call (exp, target, 0);
1959 OK_DEFER_POP;
1960 emit_label (lab);
1961 }
1962
1963 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1964 Return NULL_RTX if a normal call should be emitted rather than expanding
1965 the function in-line. EXP is the expression that is a call to the builtin
1966 function; if convenient, the result should be placed in TARGET.
1967 SUBTARGET may be used as the target for computing one of EXP's operands. */
1968
1969 static rtx
1970 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1971 {
1972 optab builtin_optab;
1973 rtx op0, insns;
1974 tree fndecl = get_callee_fndecl (exp);
1975 enum machine_mode mode;
1976 bool errno_set = false;
1977 tree arg;
1978
1979 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1980 return NULL_RTX;
1981
1982 arg = CALL_EXPR_ARG (exp, 0);
1983
1984 switch (DECL_FUNCTION_CODE (fndecl))
1985 {
1986 CASE_FLT_FN (BUILT_IN_SQRT):
1987 errno_set = ! tree_expr_nonnegative_p (arg);
1988 builtin_optab = sqrt_optab;
1989 break;
1990 CASE_FLT_FN (BUILT_IN_EXP):
1991 errno_set = true; builtin_optab = exp_optab; break;
1992 CASE_FLT_FN (BUILT_IN_EXP10):
1993 CASE_FLT_FN (BUILT_IN_POW10):
1994 errno_set = true; builtin_optab = exp10_optab; break;
1995 CASE_FLT_FN (BUILT_IN_EXP2):
1996 errno_set = true; builtin_optab = exp2_optab; break;
1997 CASE_FLT_FN (BUILT_IN_EXPM1):
1998 errno_set = true; builtin_optab = expm1_optab; break;
1999 CASE_FLT_FN (BUILT_IN_LOGB):
2000 errno_set = true; builtin_optab = logb_optab; break;
2001 CASE_FLT_FN (BUILT_IN_LOG):
2002 errno_set = true; builtin_optab = log_optab; break;
2003 CASE_FLT_FN (BUILT_IN_LOG10):
2004 errno_set = true; builtin_optab = log10_optab; break;
2005 CASE_FLT_FN (BUILT_IN_LOG2):
2006 errno_set = true; builtin_optab = log2_optab; break;
2007 CASE_FLT_FN (BUILT_IN_LOG1P):
2008 errno_set = true; builtin_optab = log1p_optab; break;
2009 CASE_FLT_FN (BUILT_IN_ASIN):
2010 builtin_optab = asin_optab; break;
2011 CASE_FLT_FN (BUILT_IN_ACOS):
2012 builtin_optab = acos_optab; break;
2013 CASE_FLT_FN (BUILT_IN_TAN):
2014 builtin_optab = tan_optab; break;
2015 CASE_FLT_FN (BUILT_IN_ATAN):
2016 builtin_optab = atan_optab; break;
2017 CASE_FLT_FN (BUILT_IN_FLOOR):
2018 builtin_optab = floor_optab; break;
2019 CASE_FLT_FN (BUILT_IN_CEIL):
2020 builtin_optab = ceil_optab; break;
2021 CASE_FLT_FN (BUILT_IN_TRUNC):
2022 builtin_optab = btrunc_optab; break;
2023 CASE_FLT_FN (BUILT_IN_ROUND):
2024 builtin_optab = round_optab; break;
2025 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2026 builtin_optab = nearbyint_optab;
2027 if (flag_trapping_math)
2028 break;
2029 /* Else fallthrough and expand as rint. */
2030 CASE_FLT_FN (BUILT_IN_RINT):
2031 builtin_optab = rint_optab; break;
2032 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2033 builtin_optab = significand_optab; break;
2034 default:
2035 gcc_unreachable ();
2036 }
2037
2038 /* Make a suitable register to place result in. */
2039 mode = TYPE_MODE (TREE_TYPE (exp));
2040
2041 if (! flag_errno_math || ! HONOR_NANS (mode))
2042 errno_set = false;
2043
2044 /* Before working hard, check whether the instruction is available. */
2045 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2046 && (!errno_set || !optimize_insn_for_size_p ()))
2047 {
2048 target = gen_reg_rtx (mode);
2049
2050 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2051 need to expand the argument again. This way, we will not perform
2052 side-effects more the once. */
2053 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2054
2055 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2056
2057 start_sequence ();
2058
2059 /* Compute into TARGET.
2060 Set TARGET to wherever the result comes back. */
2061 target = expand_unop (mode, builtin_optab, op0, target, 0);
2062
2063 if (target != 0)
2064 {
2065 if (errno_set)
2066 expand_errno_check (exp, target);
2067
2068 /* Output the entire sequence. */
2069 insns = get_insns ();
2070 end_sequence ();
2071 emit_insn (insns);
2072 return target;
2073 }
2074
2075 /* If we were unable to expand via the builtin, stop the sequence
2076 (without outputting the insns) and call to the library function
2077 with the stabilized argument list. */
2078 end_sequence ();
2079 }
2080
2081 return expand_call (exp, target, target == const0_rtx);
2082 }
2083
2084 /* Expand a call to the builtin binary math functions (pow and atan2).
2085 Return NULL_RTX if a normal call should be emitted rather than expanding the
2086 function in-line. EXP is the expression that is a call to the builtin
2087 function; if convenient, the result should be placed in TARGET.
2088 SUBTARGET may be used as the target for computing one of EXP's
2089 operands. */
2090
2091 static rtx
2092 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2093 {
2094 optab builtin_optab;
2095 rtx op0, op1, insns;
2096 int op1_type = REAL_TYPE;
2097 tree fndecl = get_callee_fndecl (exp);
2098 tree arg0, arg1;
2099 enum machine_mode mode;
2100 bool errno_set = true;
2101
2102 switch (DECL_FUNCTION_CODE (fndecl))
2103 {
2104 CASE_FLT_FN (BUILT_IN_SCALBN):
2105 CASE_FLT_FN (BUILT_IN_SCALBLN):
2106 CASE_FLT_FN (BUILT_IN_LDEXP):
2107 op1_type = INTEGER_TYPE;
2108 default:
2109 break;
2110 }
2111
2112 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2113 return NULL_RTX;
2114
2115 arg0 = CALL_EXPR_ARG (exp, 0);
2116 arg1 = CALL_EXPR_ARG (exp, 1);
2117
2118 switch (DECL_FUNCTION_CODE (fndecl))
2119 {
2120 CASE_FLT_FN (BUILT_IN_POW):
2121 builtin_optab = pow_optab; break;
2122 CASE_FLT_FN (BUILT_IN_ATAN2):
2123 builtin_optab = atan2_optab; break;
2124 CASE_FLT_FN (BUILT_IN_SCALB):
2125 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2126 return 0;
2127 builtin_optab = scalb_optab; break;
2128 CASE_FLT_FN (BUILT_IN_SCALBN):
2129 CASE_FLT_FN (BUILT_IN_SCALBLN):
2130 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2131 return 0;
2132 /* Fall through... */
2133 CASE_FLT_FN (BUILT_IN_LDEXP):
2134 builtin_optab = ldexp_optab; break;
2135 CASE_FLT_FN (BUILT_IN_FMOD):
2136 builtin_optab = fmod_optab; break;
2137 CASE_FLT_FN (BUILT_IN_REMAINDER):
2138 CASE_FLT_FN (BUILT_IN_DREM):
2139 builtin_optab = remainder_optab; break;
2140 default:
2141 gcc_unreachable ();
2142 }
2143
2144 /* Make a suitable register to place result in. */
2145 mode = TYPE_MODE (TREE_TYPE (exp));
2146
2147 /* Before working hard, check whether the instruction is available. */
2148 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2149 return NULL_RTX;
2150
2151 target = gen_reg_rtx (mode);
2152
2153 if (! flag_errno_math || ! HONOR_NANS (mode))
2154 errno_set = false;
2155
2156 if (errno_set && optimize_insn_for_size_p ())
2157 return 0;
2158
2159 /* Always stabilize the argument list. */
2160 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2161 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2162
2163 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2164 op1 = expand_normal (arg1);
2165
2166 start_sequence ();
2167
2168 /* Compute into TARGET.
2169 Set TARGET to wherever the result comes back. */
2170 target = expand_binop (mode, builtin_optab, op0, op1,
2171 target, 0, OPTAB_DIRECT);
2172
2173 /* If we were unable to expand via the builtin, stop the sequence
2174 (without outputting the insns) and call to the library function
2175 with the stabilized argument list. */
2176 if (target == 0)
2177 {
2178 end_sequence ();
2179 return expand_call (exp, target, target == const0_rtx);
2180 }
2181
2182 if (errno_set)
2183 expand_errno_check (exp, target);
2184
2185 /* Output the entire sequence. */
2186 insns = get_insns ();
2187 end_sequence ();
2188 emit_insn (insns);
2189
2190 return target;
2191 }
2192
2193 /* Expand a call to the builtin trinary math functions (fma).
2194 Return NULL_RTX if a normal call should be emitted rather than expanding the
2195 function in-line. EXP is the expression that is a call to the builtin
2196 function; if convenient, the result should be placed in TARGET.
2197 SUBTARGET may be used as the target for computing one of EXP's
2198 operands. */
2199
2200 static rtx
2201 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2202 {
2203 optab builtin_optab;
2204 rtx op0, op1, op2, insns;
2205 tree fndecl = get_callee_fndecl (exp);
2206 tree arg0, arg1, arg2;
2207 enum machine_mode mode;
2208
2209 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2210 return NULL_RTX;
2211
2212 arg0 = CALL_EXPR_ARG (exp, 0);
2213 arg1 = CALL_EXPR_ARG (exp, 1);
2214 arg2 = CALL_EXPR_ARG (exp, 2);
2215
2216 switch (DECL_FUNCTION_CODE (fndecl))
2217 {
2218 CASE_FLT_FN (BUILT_IN_FMA):
2219 builtin_optab = fma_optab; break;
2220 default:
2221 gcc_unreachable ();
2222 }
2223
2224 /* Make a suitable register to place result in. */
2225 mode = TYPE_MODE (TREE_TYPE (exp));
2226
2227 /* Before working hard, check whether the instruction is available. */
2228 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2229 return NULL_RTX;
2230
2231 target = gen_reg_rtx (mode);
2232
2233 /* Always stabilize the argument list. */
2234 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2235 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2236 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2237
2238 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2239 op1 = expand_normal (arg1);
2240 op2 = expand_normal (arg2);
2241
2242 start_sequence ();
2243
2244 /* Compute into TARGET.
2245 Set TARGET to wherever the result comes back. */
2246 target = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2247 target, 0);
2248
2249 /* If we were unable to expand via the builtin, stop the sequence
2250 (without outputting the insns) and call to the library function
2251 with the stabilized argument list. */
2252 if (target == 0)
2253 {
2254 end_sequence ();
2255 return expand_call (exp, target, target == const0_rtx);
2256 }
2257
2258 /* Output the entire sequence. */
2259 insns = get_insns ();
2260 end_sequence ();
2261 emit_insn (insns);
2262
2263 return target;
2264 }
2265
2266 /* Expand a call to the builtin sin and cos math functions.
2267 Return NULL_RTX if a normal call should be emitted rather than expanding the
2268 function in-line. EXP is the expression that is a call to the builtin
2269 function; if convenient, the result should be placed in TARGET.
2270 SUBTARGET may be used as the target for computing one of EXP's
2271 operands. */
2272
2273 static rtx
2274 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2275 {
2276 optab builtin_optab;
2277 rtx op0, insns;
2278 tree fndecl = get_callee_fndecl (exp);
2279 enum machine_mode mode;
2280 tree arg;
2281
2282 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2283 return NULL_RTX;
2284
2285 arg = CALL_EXPR_ARG (exp, 0);
2286
2287 switch (DECL_FUNCTION_CODE (fndecl))
2288 {
2289 CASE_FLT_FN (BUILT_IN_SIN):
2290 CASE_FLT_FN (BUILT_IN_COS):
2291 builtin_optab = sincos_optab; break;
2292 default:
2293 gcc_unreachable ();
2294 }
2295
2296 /* Make a suitable register to place result in. */
2297 mode = TYPE_MODE (TREE_TYPE (exp));
2298
2299 /* Check if sincos insn is available, otherwise fallback
2300 to sin or cos insn. */
2301 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2302 switch (DECL_FUNCTION_CODE (fndecl))
2303 {
2304 CASE_FLT_FN (BUILT_IN_SIN):
2305 builtin_optab = sin_optab; break;
2306 CASE_FLT_FN (BUILT_IN_COS):
2307 builtin_optab = cos_optab; break;
2308 default:
2309 gcc_unreachable ();
2310 }
2311
2312 /* Before working hard, check whether the instruction is available. */
2313 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2314 {
2315 target = gen_reg_rtx (mode);
2316
2317 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2318 need to expand the argument again. This way, we will not perform
2319 side-effects more the once. */
2320 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2321
2322 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2323
2324 start_sequence ();
2325
2326 /* Compute into TARGET.
2327 Set TARGET to wherever the result comes back. */
2328 if (builtin_optab == sincos_optab)
2329 {
2330 int result;
2331
2332 switch (DECL_FUNCTION_CODE (fndecl))
2333 {
2334 CASE_FLT_FN (BUILT_IN_SIN):
2335 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2336 break;
2337 CASE_FLT_FN (BUILT_IN_COS):
2338 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2339 break;
2340 default:
2341 gcc_unreachable ();
2342 }
2343 gcc_assert (result);
2344 }
2345 else
2346 {
2347 target = expand_unop (mode, builtin_optab, op0, target, 0);
2348 }
2349
2350 if (target != 0)
2351 {
2352 /* Output the entire sequence. */
2353 insns = get_insns ();
2354 end_sequence ();
2355 emit_insn (insns);
2356 return target;
2357 }
2358
2359 /* If we were unable to expand via the builtin, stop the sequence
2360 (without outputting the insns) and call to the library function
2361 with the stabilized argument list. */
2362 end_sequence ();
2363 }
2364
2365 target = expand_call (exp, target, target == const0_rtx);
2366
2367 return target;
2368 }
2369
2370 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2371 return an RTL instruction code that implements the functionality.
2372 If that isn't possible or available return CODE_FOR_nothing. */
2373
2374 static enum insn_code
2375 interclass_mathfn_icode (tree arg, tree fndecl)
2376 {
2377 bool errno_set = false;
2378 optab builtin_optab = 0;
2379 enum machine_mode mode;
2380
2381 switch (DECL_FUNCTION_CODE (fndecl))
2382 {
2383 CASE_FLT_FN (BUILT_IN_ILOGB):
2384 errno_set = true; builtin_optab = ilogb_optab; break;
2385 CASE_FLT_FN (BUILT_IN_ISINF):
2386 builtin_optab = isinf_optab; break;
2387 case BUILT_IN_ISNORMAL:
2388 case BUILT_IN_ISFINITE:
2389 CASE_FLT_FN (BUILT_IN_FINITE):
2390 case BUILT_IN_FINITED32:
2391 case BUILT_IN_FINITED64:
2392 case BUILT_IN_FINITED128:
2393 case BUILT_IN_ISINFD32:
2394 case BUILT_IN_ISINFD64:
2395 case BUILT_IN_ISINFD128:
2396 /* These builtins have no optabs (yet). */
2397 break;
2398 default:
2399 gcc_unreachable ();
2400 }
2401
2402 /* There's no easy way to detect the case we need to set EDOM. */
2403 if (flag_errno_math && errno_set)
2404 return CODE_FOR_nothing;
2405
2406 /* Optab mode depends on the mode of the input argument. */
2407 mode = TYPE_MODE (TREE_TYPE (arg));
2408
2409 if (builtin_optab)
2410 return optab_handler (builtin_optab, mode);
2411 return CODE_FOR_nothing;
2412 }
2413
2414 /* Expand a call to one of the builtin math functions that operate on
2415 floating point argument and output an integer result (ilogb, isinf,
2416 isnan, etc).
2417 Return 0 if a normal call should be emitted rather than expanding the
2418 function in-line. EXP is the expression that is a call to the builtin
2419 function; if convenient, the result should be placed in TARGET. */
2420
2421 static rtx
2422 expand_builtin_interclass_mathfn (tree exp, rtx target)
2423 {
2424 enum insn_code icode = CODE_FOR_nothing;
2425 rtx op0;
2426 tree fndecl = get_callee_fndecl (exp);
2427 enum machine_mode mode;
2428 tree arg;
2429
2430 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2431 return NULL_RTX;
2432
2433 arg = CALL_EXPR_ARG (exp, 0);
2434 icode = interclass_mathfn_icode (arg, fndecl);
2435 mode = TYPE_MODE (TREE_TYPE (arg));
2436
2437 if (icode != CODE_FOR_nothing)
2438 {
2439 struct expand_operand ops[1];
2440 rtx last = get_last_insn ();
2441 tree orig_arg = arg;
2442
2443 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2444 need to expand the argument again. This way, we will not perform
2445 side-effects more the once. */
2446 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2447
2448 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2449
2450 if (mode != GET_MODE (op0))
2451 op0 = convert_to_mode (mode, op0, 0);
2452
2453 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2454 if (maybe_legitimize_operands (icode, 0, 1, ops)
2455 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2456 return ops[0].value;
2457
2458 delete_insns_since (last);
2459 CALL_EXPR_ARG (exp, 0) = orig_arg;
2460 }
2461
2462 return NULL_RTX;
2463 }
2464
2465 /* Expand a call to the builtin sincos math function.
2466 Return NULL_RTX if a normal call should be emitted rather than expanding the
2467 function in-line. EXP is the expression that is a call to the builtin
2468 function. */
2469
2470 static rtx
2471 expand_builtin_sincos (tree exp)
2472 {
2473 rtx op0, op1, op2, target1, target2;
2474 enum machine_mode mode;
2475 tree arg, sinp, cosp;
2476 int result;
2477 location_t loc = EXPR_LOCATION (exp);
2478 tree alias_type, alias_off;
2479
2480 if (!validate_arglist (exp, REAL_TYPE,
2481 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2482 return NULL_RTX;
2483
2484 arg = CALL_EXPR_ARG (exp, 0);
2485 sinp = CALL_EXPR_ARG (exp, 1);
2486 cosp = CALL_EXPR_ARG (exp, 2);
2487
2488 /* Make a suitable register to place result in. */
2489 mode = TYPE_MODE (TREE_TYPE (arg));
2490
2491 /* Check if sincos insn is available, otherwise emit the call. */
2492 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2493 return NULL_RTX;
2494
2495 target1 = gen_reg_rtx (mode);
2496 target2 = gen_reg_rtx (mode);
2497
2498 op0 = expand_normal (arg);
2499 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2500 alias_off = build_int_cst (alias_type, 0);
2501 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2502 sinp, alias_off));
2503 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2504 cosp, alias_off));
2505
2506 /* Compute into target1 and target2.
2507 Set TARGET to wherever the result comes back. */
2508 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2509 gcc_assert (result);
2510
2511 /* Move target1 and target2 to the memory locations indicated
2512 by op1 and op2. */
2513 emit_move_insn (op1, target1);
2514 emit_move_insn (op2, target2);
2515
2516 return const0_rtx;
2517 }
2518
2519 /* Expand a call to the internal cexpi builtin to the sincos math function.
2520 EXP is the expression that is a call to the builtin function; if convenient,
2521 the result should be placed in TARGET. */
2522
2523 static rtx
2524 expand_builtin_cexpi (tree exp, rtx target)
2525 {
2526 tree fndecl = get_callee_fndecl (exp);
2527 tree arg, type;
2528 enum machine_mode mode;
2529 rtx op0, op1, op2;
2530 location_t loc = EXPR_LOCATION (exp);
2531
2532 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2533 return NULL_RTX;
2534
2535 arg = CALL_EXPR_ARG (exp, 0);
2536 type = TREE_TYPE (arg);
2537 mode = TYPE_MODE (TREE_TYPE (arg));
2538
2539 /* Try expanding via a sincos optab, fall back to emitting a libcall
2540 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2541 is only generated from sincos, cexp or if we have either of them. */
2542 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2543 {
2544 op1 = gen_reg_rtx (mode);
2545 op2 = gen_reg_rtx (mode);
2546
2547 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2548
2549 /* Compute into op1 and op2. */
2550 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2551 }
2552 else if (TARGET_HAS_SINCOS)
2553 {
2554 tree call, fn = NULL_TREE;
2555 tree top1, top2;
2556 rtx op1a, op2a;
2557
2558 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2559 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2560 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2561 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2562 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2563 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2564 else
2565 gcc_unreachable ();
2566
2567 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2568 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2569 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2570 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2571 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2572 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2573
2574 /* Make sure not to fold the sincos call again. */
2575 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2576 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2577 call, 3, arg, top1, top2));
2578 }
2579 else
2580 {
2581 tree call, fn = NULL_TREE, narg;
2582 tree ctype = build_complex_type (type);
2583
2584 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2585 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2586 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2587 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2588 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2589 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2590 else
2591 gcc_unreachable ();
2592
2593 /* If we don't have a decl for cexp create one. This is the
2594 friendliest fallback if the user calls __builtin_cexpi
2595 without full target C99 function support. */
2596 if (fn == NULL_TREE)
2597 {
2598 tree fntype;
2599 const char *name = NULL;
2600
2601 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2602 name = "cexpf";
2603 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2604 name = "cexp";
2605 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2606 name = "cexpl";
2607
2608 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2609 fn = build_fn_decl (name, fntype);
2610 }
2611
2612 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2613 build_real (type, dconst0), arg);
2614
2615 /* Make sure not to fold the cexp call again. */
2616 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2617 return expand_expr (build_call_nary (ctype, call, 1, narg),
2618 target, VOIDmode, EXPAND_NORMAL);
2619 }
2620
2621 /* Now build the proper return type. */
2622 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2623 make_tree (TREE_TYPE (arg), op2),
2624 make_tree (TREE_TYPE (arg), op1)),
2625 target, VOIDmode, EXPAND_NORMAL);
2626 }
2627
2628 /* Conveniently construct a function call expression. FNDECL names the
2629 function to be called, N is the number of arguments, and the "..."
2630 parameters are the argument expressions. Unlike build_call_exr
2631 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2632
2633 static tree
2634 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2635 {
2636 va_list ap;
2637 tree fntype = TREE_TYPE (fndecl);
2638 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2639
2640 va_start (ap, n);
2641 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2642 va_end (ap);
2643 SET_EXPR_LOCATION (fn, loc);
2644 return fn;
2645 }
2646
2647 /* Expand a call to one of the builtin rounding functions gcc defines
2648 as an extension (lfloor and lceil). As these are gcc extensions we
2649 do not need to worry about setting errno to EDOM.
2650 If expanding via optab fails, lower expression to (int)(floor(x)).
2651 EXP is the expression that is a call to the builtin function;
2652 if convenient, the result should be placed in TARGET. */
2653
2654 static rtx
2655 expand_builtin_int_roundingfn (tree exp, rtx target)
2656 {
2657 convert_optab builtin_optab;
2658 rtx op0, insns, tmp;
2659 tree fndecl = get_callee_fndecl (exp);
2660 enum built_in_function fallback_fn;
2661 tree fallback_fndecl;
2662 enum machine_mode mode;
2663 tree arg;
2664
2665 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2666 gcc_unreachable ();
2667
2668 arg = CALL_EXPR_ARG (exp, 0);
2669
2670 switch (DECL_FUNCTION_CODE (fndecl))
2671 {
2672 CASE_FLT_FN (BUILT_IN_ICEIL):
2673 CASE_FLT_FN (BUILT_IN_LCEIL):
2674 CASE_FLT_FN (BUILT_IN_LLCEIL):
2675 builtin_optab = lceil_optab;
2676 fallback_fn = BUILT_IN_CEIL;
2677 break;
2678
2679 CASE_FLT_FN (BUILT_IN_IFLOOR):
2680 CASE_FLT_FN (BUILT_IN_LFLOOR):
2681 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2682 builtin_optab = lfloor_optab;
2683 fallback_fn = BUILT_IN_FLOOR;
2684 break;
2685
2686 default:
2687 gcc_unreachable ();
2688 }
2689
2690 /* Make a suitable register to place result in. */
2691 mode = TYPE_MODE (TREE_TYPE (exp));
2692
2693 target = gen_reg_rtx (mode);
2694
2695 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2696 need to expand the argument again. This way, we will not perform
2697 side-effects more the once. */
2698 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2699
2700 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2701
2702 start_sequence ();
2703
2704 /* Compute into TARGET. */
2705 if (expand_sfix_optab (target, op0, builtin_optab))
2706 {
2707 /* Output the entire sequence. */
2708 insns = get_insns ();
2709 end_sequence ();
2710 emit_insn (insns);
2711 return target;
2712 }
2713
2714 /* If we were unable to expand via the builtin, stop the sequence
2715 (without outputting the insns). */
2716 end_sequence ();
2717
2718 /* Fall back to floating point rounding optab. */
2719 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2720
2721 /* For non-C99 targets we may end up without a fallback fndecl here
2722 if the user called __builtin_lfloor directly. In this case emit
2723 a call to the floor/ceil variants nevertheless. This should result
2724 in the best user experience for not full C99 targets. */
2725 if (fallback_fndecl == NULL_TREE)
2726 {
2727 tree fntype;
2728 const char *name = NULL;
2729
2730 switch (DECL_FUNCTION_CODE (fndecl))
2731 {
2732 case BUILT_IN_ICEIL:
2733 case BUILT_IN_LCEIL:
2734 case BUILT_IN_LLCEIL:
2735 name = "ceil";
2736 break;
2737 case BUILT_IN_ICEILF:
2738 case BUILT_IN_LCEILF:
2739 case BUILT_IN_LLCEILF:
2740 name = "ceilf";
2741 break;
2742 case BUILT_IN_ICEILL:
2743 case BUILT_IN_LCEILL:
2744 case BUILT_IN_LLCEILL:
2745 name = "ceill";
2746 break;
2747 case BUILT_IN_IFLOOR:
2748 case BUILT_IN_LFLOOR:
2749 case BUILT_IN_LLFLOOR:
2750 name = "floor";
2751 break;
2752 case BUILT_IN_IFLOORF:
2753 case BUILT_IN_LFLOORF:
2754 case BUILT_IN_LLFLOORF:
2755 name = "floorf";
2756 break;
2757 case BUILT_IN_IFLOORL:
2758 case BUILT_IN_LFLOORL:
2759 case BUILT_IN_LLFLOORL:
2760 name = "floorl";
2761 break;
2762 default:
2763 gcc_unreachable ();
2764 }
2765
2766 fntype = build_function_type_list (TREE_TYPE (arg),
2767 TREE_TYPE (arg), NULL_TREE);
2768 fallback_fndecl = build_fn_decl (name, fntype);
2769 }
2770
2771 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2772
2773 tmp = expand_normal (exp);
2774
2775 /* Truncate the result of floating point optab to integer
2776 via expand_fix (). */
2777 target = gen_reg_rtx (mode);
2778 expand_fix (target, tmp, 0);
2779
2780 return target;
2781 }
2782
2783 /* Expand a call to one of the builtin math functions doing integer
2784 conversion (lrint).
2785 Return 0 if a normal call should be emitted rather than expanding the
2786 function in-line. EXP is the expression that is a call to the builtin
2787 function; if convenient, the result should be placed in TARGET. */
2788
2789 static rtx
2790 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2791 {
2792 convert_optab builtin_optab;
2793 rtx op0, insns;
2794 tree fndecl = get_callee_fndecl (exp);
2795 tree arg;
2796 enum machine_mode mode;
2797
2798 /* There's no easy way to detect the case we need to set EDOM. */
2799 if (flag_errno_math)
2800 return NULL_RTX;
2801
2802 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2803 gcc_unreachable ();
2804
2805 arg = CALL_EXPR_ARG (exp, 0);
2806
2807 switch (DECL_FUNCTION_CODE (fndecl))
2808 {
2809 CASE_FLT_FN (BUILT_IN_IRINT):
2810 CASE_FLT_FN (BUILT_IN_LRINT):
2811 CASE_FLT_FN (BUILT_IN_LLRINT):
2812 builtin_optab = lrint_optab; break;
2813
2814 CASE_FLT_FN (BUILT_IN_IROUND):
2815 CASE_FLT_FN (BUILT_IN_LROUND):
2816 CASE_FLT_FN (BUILT_IN_LLROUND):
2817 builtin_optab = lround_optab; break;
2818
2819 default:
2820 gcc_unreachable ();
2821 }
2822
2823 /* Make a suitable register to place result in. */
2824 mode = TYPE_MODE (TREE_TYPE (exp));
2825
2826 target = gen_reg_rtx (mode);
2827
2828 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2829 need to expand the argument again. This way, we will not perform
2830 side-effects more the once. */
2831 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2832
2833 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2834
2835 start_sequence ();
2836
2837 if (expand_sfix_optab (target, op0, builtin_optab))
2838 {
2839 /* Output the entire sequence. */
2840 insns = get_insns ();
2841 end_sequence ();
2842 emit_insn (insns);
2843 return target;
2844 }
2845
2846 /* If we were unable to expand via the builtin, stop the sequence
2847 (without outputting the insns) and call to the library function
2848 with the stabilized argument list. */
2849 end_sequence ();
2850
2851 target = expand_call (exp, target, target == const0_rtx);
2852
2853 return target;
2854 }
2855
2856 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2857 a normal call should be emitted rather than expanding the function
2858 in-line. EXP is the expression that is a call to the builtin
2859 function; if convenient, the result should be placed in TARGET. */
2860
2861 static rtx
2862 expand_builtin_powi (tree exp, rtx target)
2863 {
2864 tree arg0, arg1;
2865 rtx op0, op1;
2866 enum machine_mode mode;
2867 enum machine_mode mode2;
2868
2869 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2870 return NULL_RTX;
2871
2872 arg0 = CALL_EXPR_ARG (exp, 0);
2873 arg1 = CALL_EXPR_ARG (exp, 1);
2874 mode = TYPE_MODE (TREE_TYPE (exp));
2875
2876 /* Emit a libcall to libgcc. */
2877
2878 /* Mode of the 2nd argument must match that of an int. */
2879 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2880
2881 if (target == NULL_RTX)
2882 target = gen_reg_rtx (mode);
2883
2884 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2885 if (GET_MODE (op0) != mode)
2886 op0 = convert_to_mode (mode, op0, 0);
2887 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2888 if (GET_MODE (op1) != mode2)
2889 op1 = convert_to_mode (mode2, op1, 0);
2890
2891 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2892 target, LCT_CONST, mode, 2,
2893 op0, mode, op1, mode2);
2894
2895 return target;
2896 }
2897
2898 /* Expand expression EXP which is a call to the strlen builtin. Return
2899 NULL_RTX if we failed the caller should emit a normal call, otherwise
2900 try to get the result in TARGET, if convenient. */
2901
2902 static rtx
2903 expand_builtin_strlen (tree exp, rtx target,
2904 enum machine_mode target_mode)
2905 {
2906 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2907 return NULL_RTX;
2908 else
2909 {
2910 struct expand_operand ops[4];
2911 rtx pat;
2912 tree len;
2913 tree src = CALL_EXPR_ARG (exp, 0);
2914 rtx src_reg, before_strlen;
2915 enum machine_mode insn_mode = target_mode;
2916 enum insn_code icode = CODE_FOR_nothing;
2917 unsigned int align;
2918
2919 /* If the length can be computed at compile-time, return it. */
2920 len = c_strlen (src, 0);
2921 if (len)
2922 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2923
2924 /* If the length can be computed at compile-time and is constant
2925 integer, but there are side-effects in src, evaluate
2926 src for side-effects, then return len.
2927 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2928 can be optimized into: i++; x = 3; */
2929 len = c_strlen (src, 1);
2930 if (len && TREE_CODE (len) == INTEGER_CST)
2931 {
2932 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2933 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2934 }
2935
2936 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2937
2938 /* If SRC is not a pointer type, don't do this operation inline. */
2939 if (align == 0)
2940 return NULL_RTX;
2941
2942 /* Bail out if we can't compute strlen in the right mode. */
2943 while (insn_mode != VOIDmode)
2944 {
2945 icode = optab_handler (strlen_optab, insn_mode);
2946 if (icode != CODE_FOR_nothing)
2947 break;
2948
2949 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2950 }
2951 if (insn_mode == VOIDmode)
2952 return NULL_RTX;
2953
2954 /* Make a place to hold the source address. We will not expand
2955 the actual source until we are sure that the expansion will
2956 not fail -- there are trees that cannot be expanded twice. */
2957 src_reg = gen_reg_rtx (Pmode);
2958
2959 /* Mark the beginning of the strlen sequence so we can emit the
2960 source operand later. */
2961 before_strlen = get_last_insn ();
2962
2963 create_output_operand (&ops[0], target, insn_mode);
2964 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2965 create_integer_operand (&ops[2], 0);
2966 create_integer_operand (&ops[3], align);
2967 if (!maybe_expand_insn (icode, 4, ops))
2968 return NULL_RTX;
2969
2970 /* Now that we are assured of success, expand the source. */
2971 start_sequence ();
2972 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2973 if (pat != src_reg)
2974 {
2975 #ifdef POINTERS_EXTEND_UNSIGNED
2976 if (GET_MODE (pat) != Pmode)
2977 pat = convert_to_mode (Pmode, pat,
2978 POINTERS_EXTEND_UNSIGNED);
2979 #endif
2980 emit_move_insn (src_reg, pat);
2981 }
2982 pat = get_insns ();
2983 end_sequence ();
2984
2985 if (before_strlen)
2986 emit_insn_after (pat, before_strlen);
2987 else
2988 emit_insn_before (pat, get_insns ());
2989
2990 /* Return the value in the proper mode for this function. */
2991 if (GET_MODE (ops[0].value) == target_mode)
2992 target = ops[0].value;
2993 else if (target != 0)
2994 convert_move (target, ops[0].value, 0);
2995 else
2996 target = convert_to_mode (target_mode, ops[0].value, 0);
2997
2998 return target;
2999 }
3000 }
3001
3002 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3003 bytes from constant string DATA + OFFSET and return it as target
3004 constant. */
3005
3006 static rtx
3007 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3008 enum machine_mode mode)
3009 {
3010 const char *str = (const char *) data;
3011
3012 gcc_assert (offset >= 0
3013 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3014 <= strlen (str) + 1));
3015
3016 return c_readstr (str + offset, mode);
3017 }
3018
3019 /* Expand a call EXP to the memcpy builtin.
3020 Return NULL_RTX if we failed, the caller should emit a normal call,
3021 otherwise try to get the result in TARGET, if convenient (and in
3022 mode MODE if that's convenient). */
3023
3024 static rtx
3025 expand_builtin_memcpy (tree exp, rtx target)
3026 {
3027 if (!validate_arglist (exp,
3028 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3029 return NULL_RTX;
3030 else
3031 {
3032 tree dest = CALL_EXPR_ARG (exp, 0);
3033 tree src = CALL_EXPR_ARG (exp, 1);
3034 tree len = CALL_EXPR_ARG (exp, 2);
3035 const char *src_str;
3036 unsigned int src_align = get_pointer_alignment (src);
3037 unsigned int dest_align = get_pointer_alignment (dest);
3038 rtx dest_mem, src_mem, dest_addr, len_rtx;
3039 HOST_WIDE_INT expected_size = -1;
3040 unsigned int expected_align = 0;
3041
3042 /* If DEST is not a pointer type, call the normal function. */
3043 if (dest_align == 0)
3044 return NULL_RTX;
3045
3046 /* If either SRC is not a pointer type, don't do this
3047 operation in-line. */
3048 if (src_align == 0)
3049 return NULL_RTX;
3050
3051 if (currently_expanding_gimple_stmt)
3052 stringop_block_profile (currently_expanding_gimple_stmt,
3053 &expected_align, &expected_size);
3054
3055 if (expected_align < dest_align)
3056 expected_align = dest_align;
3057 dest_mem = get_memory_rtx (dest, len);
3058 set_mem_align (dest_mem, dest_align);
3059 len_rtx = expand_normal (len);
3060 src_str = c_getstr (src);
3061
3062 /* If SRC is a string constant and block move would be done
3063 by pieces, we can avoid loading the string from memory
3064 and only stored the computed constants. */
3065 if (src_str
3066 && CONST_INT_P (len_rtx)
3067 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3068 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3069 CONST_CAST (char *, src_str),
3070 dest_align, false))
3071 {
3072 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3073 builtin_memcpy_read_str,
3074 CONST_CAST (char *, src_str),
3075 dest_align, false, 0);
3076 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3077 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3078 return dest_mem;
3079 }
3080
3081 src_mem = get_memory_rtx (src, len);
3082 set_mem_align (src_mem, src_align);
3083
3084 /* Copy word part most expediently. */
3085 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3086 CALL_EXPR_TAILCALL (exp)
3087 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3088 expected_align, expected_size);
3089
3090 if (dest_addr == 0)
3091 {
3092 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3093 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3094 }
3095 return dest_addr;
3096 }
3097 }
3098
3099 /* Expand a call EXP to the mempcpy builtin.
3100 Return NULL_RTX if we failed; the caller should emit a normal call,
3101 otherwise try to get the result in TARGET, if convenient (and in
3102 mode MODE if that's convenient). If ENDP is 0 return the
3103 destination pointer, if ENDP is 1 return the end pointer ala
3104 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3105 stpcpy. */
3106
3107 static rtx
3108 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3109 {
3110 if (!validate_arglist (exp,
3111 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3112 return NULL_RTX;
3113 else
3114 {
3115 tree dest = CALL_EXPR_ARG (exp, 0);
3116 tree src = CALL_EXPR_ARG (exp, 1);
3117 tree len = CALL_EXPR_ARG (exp, 2);
3118 return expand_builtin_mempcpy_args (dest, src, len,
3119 target, mode, /*endp=*/ 1);
3120 }
3121 }
3122
3123 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3124 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3125 so that this can also be called without constructing an actual CALL_EXPR.
3126 The other arguments and return value are the same as for
3127 expand_builtin_mempcpy. */
3128
3129 static rtx
3130 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3131 rtx target, enum machine_mode mode, int endp)
3132 {
3133 /* If return value is ignored, transform mempcpy into memcpy. */
3134 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3135 {
3136 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3137 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3138 dest, src, len);
3139 return expand_expr (result, target, mode, EXPAND_NORMAL);
3140 }
3141 else
3142 {
3143 const char *src_str;
3144 unsigned int src_align = get_pointer_alignment (src);
3145 unsigned int dest_align = get_pointer_alignment (dest);
3146 rtx dest_mem, src_mem, len_rtx;
3147
3148 /* If either SRC or DEST is not a pointer type, don't do this
3149 operation in-line. */
3150 if (dest_align == 0 || src_align == 0)
3151 return NULL_RTX;
3152
3153 /* If LEN is not constant, call the normal function. */
3154 if (! host_integerp (len, 1))
3155 return NULL_RTX;
3156
3157 len_rtx = expand_normal (len);
3158 src_str = c_getstr (src);
3159
3160 /* If SRC is a string constant and block move would be done
3161 by pieces, we can avoid loading the string from memory
3162 and only stored the computed constants. */
3163 if (src_str
3164 && CONST_INT_P (len_rtx)
3165 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3166 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3167 CONST_CAST (char *, src_str),
3168 dest_align, false))
3169 {
3170 dest_mem = get_memory_rtx (dest, len);
3171 set_mem_align (dest_mem, dest_align);
3172 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3173 builtin_memcpy_read_str,
3174 CONST_CAST (char *, src_str),
3175 dest_align, false, endp);
3176 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3177 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3178 return dest_mem;
3179 }
3180
3181 if (CONST_INT_P (len_rtx)
3182 && can_move_by_pieces (INTVAL (len_rtx),
3183 MIN (dest_align, src_align)))
3184 {
3185 dest_mem = get_memory_rtx (dest, len);
3186 set_mem_align (dest_mem, dest_align);
3187 src_mem = get_memory_rtx (src, len);
3188 set_mem_align (src_mem, src_align);
3189 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3190 MIN (dest_align, src_align), endp);
3191 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3192 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3193 return dest_mem;
3194 }
3195
3196 return NULL_RTX;
3197 }
3198 }
3199
3200 #ifndef HAVE_movstr
3201 # define HAVE_movstr 0
3202 # define CODE_FOR_movstr CODE_FOR_nothing
3203 #endif
3204
3205 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3206 we failed, the caller should emit a normal call, otherwise try to
3207 get the result in TARGET, if convenient. If ENDP is 0 return the
3208 destination pointer, if ENDP is 1 return the end pointer ala
3209 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3210 stpcpy. */
3211
3212 static rtx
3213 expand_movstr (tree dest, tree src, rtx target, int endp)
3214 {
3215 struct expand_operand ops[3];
3216 rtx dest_mem;
3217 rtx src_mem;
3218
3219 if (!HAVE_movstr)
3220 return NULL_RTX;
3221
3222 dest_mem = get_memory_rtx (dest, NULL);
3223 src_mem = get_memory_rtx (src, NULL);
3224 if (!endp)
3225 {
3226 target = force_reg (Pmode, XEXP (dest_mem, 0));
3227 dest_mem = replace_equiv_address (dest_mem, target);
3228 }
3229
3230 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3231 create_fixed_operand (&ops[1], dest_mem);
3232 create_fixed_operand (&ops[2], src_mem);
3233 expand_insn (CODE_FOR_movstr, 3, ops);
3234
3235 if (endp && target != const0_rtx)
3236 {
3237 target = ops[0].value;
3238 /* movstr is supposed to set end to the address of the NUL
3239 terminator. If the caller requested a mempcpy-like return value,
3240 adjust it. */
3241 if (endp == 1)
3242 {
3243 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), target), 1);
3244 emit_move_insn (target, force_operand (tem, NULL_RTX));
3245 }
3246 }
3247 return target;
3248 }
3249
3250 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3251 NULL_RTX if we failed the caller should emit a normal call, otherwise
3252 try to get the result in TARGET, if convenient (and in mode MODE if that's
3253 convenient). */
3254
3255 static rtx
3256 expand_builtin_strcpy (tree exp, rtx target)
3257 {
3258 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3259 {
3260 tree dest = CALL_EXPR_ARG (exp, 0);
3261 tree src = CALL_EXPR_ARG (exp, 1);
3262 return expand_builtin_strcpy_args (dest, src, target);
3263 }
3264 return NULL_RTX;
3265 }
3266
3267 /* Helper function to do the actual work for expand_builtin_strcpy. The
3268 arguments to the builtin_strcpy call DEST and SRC are broken out
3269 so that this can also be called without constructing an actual CALL_EXPR.
3270 The other arguments and return value are the same as for
3271 expand_builtin_strcpy. */
3272
3273 static rtx
3274 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3275 {
3276 return expand_movstr (dest, src, target, /*endp=*/0);
3277 }
3278
3279 /* Expand a call EXP to the stpcpy builtin.
3280 Return NULL_RTX if we failed the caller should emit a normal call,
3281 otherwise try to get the result in TARGET, if convenient (and in
3282 mode MODE if that's convenient). */
3283
3284 static rtx
3285 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3286 {
3287 tree dst, src;
3288 location_t loc = EXPR_LOCATION (exp);
3289
3290 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3291 return NULL_RTX;
3292
3293 dst = CALL_EXPR_ARG (exp, 0);
3294 src = CALL_EXPR_ARG (exp, 1);
3295
3296 /* If return value is ignored, transform stpcpy into strcpy. */
3297 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3298 {
3299 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3300 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3301 return expand_expr (result, target, mode, EXPAND_NORMAL);
3302 }
3303 else
3304 {
3305 tree len, lenp1;
3306 rtx ret;
3307
3308 /* Ensure we get an actual string whose length can be evaluated at
3309 compile-time, not an expression containing a string. This is
3310 because the latter will potentially produce pessimized code
3311 when used to produce the return value. */
3312 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3313 return expand_movstr (dst, src, target, /*endp=*/2);
3314
3315 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3316 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3317 target, mode, /*endp=*/2);
3318
3319 if (ret)
3320 return ret;
3321
3322 if (TREE_CODE (len) == INTEGER_CST)
3323 {
3324 rtx len_rtx = expand_normal (len);
3325
3326 if (CONST_INT_P (len_rtx))
3327 {
3328 ret = expand_builtin_strcpy_args (dst, src, target);
3329
3330 if (ret)
3331 {
3332 if (! target)
3333 {
3334 if (mode != VOIDmode)
3335 target = gen_reg_rtx (mode);
3336 else
3337 target = gen_reg_rtx (GET_MODE (ret));
3338 }
3339 if (GET_MODE (target) != GET_MODE (ret))
3340 ret = gen_lowpart (GET_MODE (target), ret);
3341
3342 ret = plus_constant (ret, INTVAL (len_rtx));
3343 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3344 gcc_assert (ret);
3345
3346 return target;
3347 }
3348 }
3349 }
3350
3351 return expand_movstr (dst, src, target, /*endp=*/2);
3352 }
3353 }
3354
3355 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3356 bytes from constant string DATA + OFFSET and return it as target
3357 constant. */
3358
3359 rtx
3360 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3361 enum machine_mode mode)
3362 {
3363 const char *str = (const char *) data;
3364
3365 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3366 return const0_rtx;
3367
3368 return c_readstr (str + offset, mode);
3369 }
3370
3371 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3372 NULL_RTX if we failed the caller should emit a normal call. */
3373
3374 static rtx
3375 expand_builtin_strncpy (tree exp, rtx target)
3376 {
3377 location_t loc = EXPR_LOCATION (exp);
3378
3379 if (validate_arglist (exp,
3380 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3381 {
3382 tree dest = CALL_EXPR_ARG (exp, 0);
3383 tree src = CALL_EXPR_ARG (exp, 1);
3384 tree len = CALL_EXPR_ARG (exp, 2);
3385 tree slen = c_strlen (src, 1);
3386
3387 /* We must be passed a constant len and src parameter. */
3388 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3389 return NULL_RTX;
3390
3391 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3392
3393 /* We're required to pad with trailing zeros if the requested
3394 len is greater than strlen(s2)+1. In that case try to
3395 use store_by_pieces, if it fails, punt. */
3396 if (tree_int_cst_lt (slen, len))
3397 {
3398 unsigned int dest_align = get_pointer_alignment (dest);
3399 const char *p = c_getstr (src);
3400 rtx dest_mem;
3401
3402 if (!p || dest_align == 0 || !host_integerp (len, 1)
3403 || !can_store_by_pieces (tree_low_cst (len, 1),
3404 builtin_strncpy_read_str,
3405 CONST_CAST (char *, p),
3406 dest_align, false))
3407 return NULL_RTX;
3408
3409 dest_mem = get_memory_rtx (dest, len);
3410 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3411 builtin_strncpy_read_str,
3412 CONST_CAST (char *, p), dest_align, false, 0);
3413 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3414 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3415 return dest_mem;
3416 }
3417 }
3418 return NULL_RTX;
3419 }
3420
3421 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3422 bytes from constant string DATA + OFFSET and return it as target
3423 constant. */
3424
3425 rtx
3426 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3427 enum machine_mode mode)
3428 {
3429 const char *c = (const char *) data;
3430 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3431
3432 memset (p, *c, GET_MODE_SIZE (mode));
3433
3434 return c_readstr (p, mode);
3435 }
3436
3437 /* Callback routine for store_by_pieces. Return the RTL of a register
3438 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3439 char value given in the RTL register data. For example, if mode is
3440 4 bytes wide, return the RTL for 0x01010101*data. */
3441
3442 static rtx
3443 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3444 enum machine_mode mode)
3445 {
3446 rtx target, coeff;
3447 size_t size;
3448 char *p;
3449
3450 size = GET_MODE_SIZE (mode);
3451 if (size == 1)
3452 return (rtx) data;
3453
3454 p = XALLOCAVEC (char, size);
3455 memset (p, 1, size);
3456 coeff = c_readstr (p, mode);
3457
3458 target = convert_to_mode (mode, (rtx) data, 1);
3459 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3460 return force_reg (mode, target);
3461 }
3462
3463 /* Expand expression EXP, which is a call to the memset builtin. Return
3464 NULL_RTX if we failed the caller should emit a normal call, otherwise
3465 try to get the result in TARGET, if convenient (and in mode MODE if that's
3466 convenient). */
3467
3468 static rtx
3469 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3470 {
3471 if (!validate_arglist (exp,
3472 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3473 return NULL_RTX;
3474 else
3475 {
3476 tree dest = CALL_EXPR_ARG (exp, 0);
3477 tree val = CALL_EXPR_ARG (exp, 1);
3478 tree len = CALL_EXPR_ARG (exp, 2);
3479 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3480 }
3481 }
3482
3483 /* Helper function to do the actual work for expand_builtin_memset. The
3484 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3485 so that this can also be called without constructing an actual CALL_EXPR.
3486 The other arguments and return value are the same as for
3487 expand_builtin_memset. */
3488
3489 static rtx
3490 expand_builtin_memset_args (tree dest, tree val, tree len,
3491 rtx target, enum machine_mode mode, tree orig_exp)
3492 {
3493 tree fndecl, fn;
3494 enum built_in_function fcode;
3495 enum machine_mode val_mode;
3496 char c;
3497 unsigned int dest_align;
3498 rtx dest_mem, dest_addr, len_rtx;
3499 HOST_WIDE_INT expected_size = -1;
3500 unsigned int expected_align = 0;
3501
3502 dest_align = get_pointer_alignment (dest);
3503
3504 /* If DEST is not a pointer type, don't do this operation in-line. */
3505 if (dest_align == 0)
3506 return NULL_RTX;
3507
3508 if (currently_expanding_gimple_stmt)
3509 stringop_block_profile (currently_expanding_gimple_stmt,
3510 &expected_align, &expected_size);
3511
3512 if (expected_align < dest_align)
3513 expected_align = dest_align;
3514
3515 /* If the LEN parameter is zero, return DEST. */
3516 if (integer_zerop (len))
3517 {
3518 /* Evaluate and ignore VAL in case it has side-effects. */
3519 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3520 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3521 }
3522
3523 /* Stabilize the arguments in case we fail. */
3524 dest = builtin_save_expr (dest);
3525 val = builtin_save_expr (val);
3526 len = builtin_save_expr (len);
3527
3528 len_rtx = expand_normal (len);
3529 dest_mem = get_memory_rtx (dest, len);
3530 val_mode = TYPE_MODE (unsigned_char_type_node);
3531
3532 if (TREE_CODE (val) != INTEGER_CST)
3533 {
3534 rtx val_rtx;
3535
3536 val_rtx = expand_normal (val);
3537 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3538
3539 /* Assume that we can memset by pieces if we can store
3540 * the coefficients by pieces (in the required modes).
3541 * We can't pass builtin_memset_gen_str as that emits RTL. */
3542 c = 1;
3543 if (host_integerp (len, 1)
3544 && can_store_by_pieces (tree_low_cst (len, 1),
3545 builtin_memset_read_str, &c, dest_align,
3546 true))
3547 {
3548 val_rtx = force_reg (val_mode, val_rtx);
3549 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3550 builtin_memset_gen_str, val_rtx, dest_align,
3551 true, 0);
3552 }
3553 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3554 dest_align, expected_align,
3555 expected_size))
3556 goto do_libcall;
3557
3558 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3559 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3560 return dest_mem;
3561 }
3562
3563 if (target_char_cast (val, &c))
3564 goto do_libcall;
3565
3566 if (c)
3567 {
3568 if (host_integerp (len, 1)
3569 && can_store_by_pieces (tree_low_cst (len, 1),
3570 builtin_memset_read_str, &c, dest_align,
3571 true))
3572 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3573 builtin_memset_read_str, &c, dest_align, true, 0);
3574 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3575 gen_int_mode (c, val_mode),
3576 dest_align, expected_align,
3577 expected_size))
3578 goto do_libcall;
3579
3580 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3581 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3582 return dest_mem;
3583 }
3584
3585 set_mem_align (dest_mem, dest_align);
3586 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3587 CALL_EXPR_TAILCALL (orig_exp)
3588 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3589 expected_align, expected_size);
3590
3591 if (dest_addr == 0)
3592 {
3593 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3594 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3595 }
3596
3597 return dest_addr;
3598
3599 do_libcall:
3600 fndecl = get_callee_fndecl (orig_exp);
3601 fcode = DECL_FUNCTION_CODE (fndecl);
3602 if (fcode == BUILT_IN_MEMSET)
3603 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3604 dest, val, len);
3605 else if (fcode == BUILT_IN_BZERO)
3606 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3607 dest, len);
3608 else
3609 gcc_unreachable ();
3610 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3611 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3612 return expand_call (fn, target, target == const0_rtx);
3613 }
3614
3615 /* Expand expression EXP, which is a call to the bzero builtin. Return
3616 NULL_RTX if we failed the caller should emit a normal call. */
3617
3618 static rtx
3619 expand_builtin_bzero (tree exp)
3620 {
3621 tree dest, size;
3622 location_t loc = EXPR_LOCATION (exp);
3623
3624 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3625 return NULL_RTX;
3626
3627 dest = CALL_EXPR_ARG (exp, 0);
3628 size = CALL_EXPR_ARG (exp, 1);
3629
3630 /* New argument list transforming bzero(ptr x, int y) to
3631 memset(ptr x, int 0, size_t y). This is done this way
3632 so that if it isn't expanded inline, we fallback to
3633 calling bzero instead of memset. */
3634
3635 return expand_builtin_memset_args (dest, integer_zero_node,
3636 fold_convert_loc (loc,
3637 size_type_node, size),
3638 const0_rtx, VOIDmode, exp);
3639 }
3640
3641 /* Expand expression EXP, which is a call to the memcmp built-in function.
3642 Return NULL_RTX if we failed and the caller should emit a normal call,
3643 otherwise try to get the result in TARGET, if convenient (and in mode
3644 MODE, if that's convenient). */
3645
3646 static rtx
3647 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3648 ATTRIBUTE_UNUSED enum machine_mode mode)
3649 {
3650 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3651
3652 if (!validate_arglist (exp,
3653 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3654 return NULL_RTX;
3655
3656 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3657 implementing memcmp because it will stop if it encounters two
3658 zero bytes. */
3659 #if defined HAVE_cmpmemsi
3660 {
3661 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3662 rtx result;
3663 rtx insn;
3664 tree arg1 = CALL_EXPR_ARG (exp, 0);
3665 tree arg2 = CALL_EXPR_ARG (exp, 1);
3666 tree len = CALL_EXPR_ARG (exp, 2);
3667
3668 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3669 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3670 enum machine_mode insn_mode;
3671
3672 if (HAVE_cmpmemsi)
3673 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3674 else
3675 return NULL_RTX;
3676
3677 /* If we don't have POINTER_TYPE, call the function. */
3678 if (arg1_align == 0 || arg2_align == 0)
3679 return NULL_RTX;
3680
3681 /* Make a place to write the result of the instruction. */
3682 result = target;
3683 if (! (result != 0
3684 && REG_P (result) && GET_MODE (result) == insn_mode
3685 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3686 result = gen_reg_rtx (insn_mode);
3687
3688 arg1_rtx = get_memory_rtx (arg1, len);
3689 arg2_rtx = get_memory_rtx (arg2, len);
3690 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3691
3692 /* Set MEM_SIZE as appropriate. */
3693 if (CONST_INT_P (arg3_rtx))
3694 {
3695 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3696 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3697 }
3698
3699 if (HAVE_cmpmemsi)
3700 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3701 GEN_INT (MIN (arg1_align, arg2_align)));
3702 else
3703 gcc_unreachable ();
3704
3705 if (insn)
3706 emit_insn (insn);
3707 else
3708 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3709 TYPE_MODE (integer_type_node), 3,
3710 XEXP (arg1_rtx, 0), Pmode,
3711 XEXP (arg2_rtx, 0), Pmode,
3712 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3713 TYPE_UNSIGNED (sizetype)),
3714 TYPE_MODE (sizetype));
3715
3716 /* Return the value in the proper mode for this function. */
3717 mode = TYPE_MODE (TREE_TYPE (exp));
3718 if (GET_MODE (result) == mode)
3719 return result;
3720 else if (target != 0)
3721 {
3722 convert_move (target, result, 0);
3723 return target;
3724 }
3725 else
3726 return convert_to_mode (mode, result, 0);
3727 }
3728 #endif /* HAVE_cmpmemsi. */
3729
3730 return NULL_RTX;
3731 }
3732
3733 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3734 if we failed the caller should emit a normal call, otherwise try to get
3735 the result in TARGET, if convenient. */
3736
3737 static rtx
3738 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3739 {
3740 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3741 return NULL_RTX;
3742
3743 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3744 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3745 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3746 {
3747 rtx arg1_rtx, arg2_rtx;
3748 rtx result, insn = NULL_RTX;
3749 tree fndecl, fn;
3750 tree arg1 = CALL_EXPR_ARG (exp, 0);
3751 tree arg2 = CALL_EXPR_ARG (exp, 1);
3752
3753 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3754 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3755
3756 /* If we don't have POINTER_TYPE, call the function. */
3757 if (arg1_align == 0 || arg2_align == 0)
3758 return NULL_RTX;
3759
3760 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3761 arg1 = builtin_save_expr (arg1);
3762 arg2 = builtin_save_expr (arg2);
3763
3764 arg1_rtx = get_memory_rtx (arg1, NULL);
3765 arg2_rtx = get_memory_rtx (arg2, NULL);
3766
3767 #ifdef HAVE_cmpstrsi
3768 /* Try to call cmpstrsi. */
3769 if (HAVE_cmpstrsi)
3770 {
3771 enum machine_mode insn_mode
3772 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3773
3774 /* Make a place to write the result of the instruction. */
3775 result = target;
3776 if (! (result != 0
3777 && REG_P (result) && GET_MODE (result) == insn_mode
3778 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3779 result = gen_reg_rtx (insn_mode);
3780
3781 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3782 GEN_INT (MIN (arg1_align, arg2_align)));
3783 }
3784 #endif
3785 #ifdef HAVE_cmpstrnsi
3786 /* Try to determine at least one length and call cmpstrnsi. */
3787 if (!insn && HAVE_cmpstrnsi)
3788 {
3789 tree len;
3790 rtx arg3_rtx;
3791
3792 enum machine_mode insn_mode
3793 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3794 tree len1 = c_strlen (arg1, 1);
3795 tree len2 = c_strlen (arg2, 1);
3796
3797 if (len1)
3798 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3799 if (len2)
3800 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3801
3802 /* If we don't have a constant length for the first, use the length
3803 of the second, if we know it. We don't require a constant for
3804 this case; some cost analysis could be done if both are available
3805 but neither is constant. For now, assume they're equally cheap,
3806 unless one has side effects. If both strings have constant lengths,
3807 use the smaller. */
3808
3809 if (!len1)
3810 len = len2;
3811 else if (!len2)
3812 len = len1;
3813 else if (TREE_SIDE_EFFECTS (len1))
3814 len = len2;
3815 else if (TREE_SIDE_EFFECTS (len2))
3816 len = len1;
3817 else if (TREE_CODE (len1) != INTEGER_CST)
3818 len = len2;
3819 else if (TREE_CODE (len2) != INTEGER_CST)
3820 len = len1;
3821 else if (tree_int_cst_lt (len1, len2))
3822 len = len1;
3823 else
3824 len = len2;
3825
3826 /* If both arguments have side effects, we cannot optimize. */
3827 if (!len || TREE_SIDE_EFFECTS (len))
3828 goto do_libcall;
3829
3830 arg3_rtx = expand_normal (len);
3831
3832 /* Make a place to write the result of the instruction. */
3833 result = target;
3834 if (! (result != 0
3835 && REG_P (result) && GET_MODE (result) == insn_mode
3836 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3837 result = gen_reg_rtx (insn_mode);
3838
3839 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3840 GEN_INT (MIN (arg1_align, arg2_align)));
3841 }
3842 #endif
3843
3844 if (insn)
3845 {
3846 enum machine_mode mode;
3847 emit_insn (insn);
3848
3849 /* Return the value in the proper mode for this function. */
3850 mode = TYPE_MODE (TREE_TYPE (exp));
3851 if (GET_MODE (result) == mode)
3852 return result;
3853 if (target == 0)
3854 return convert_to_mode (mode, result, 0);
3855 convert_move (target, result, 0);
3856 return target;
3857 }
3858
3859 /* Expand the library call ourselves using a stabilized argument
3860 list to avoid re-evaluating the function's arguments twice. */
3861 #ifdef HAVE_cmpstrnsi
3862 do_libcall:
3863 #endif
3864 fndecl = get_callee_fndecl (exp);
3865 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3866 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3867 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3868 return expand_call (fn, target, target == const0_rtx);
3869 }
3870 #endif
3871 return NULL_RTX;
3872 }
3873
3874 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3875 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3876 the result in TARGET, if convenient. */
3877
3878 static rtx
3879 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3880 ATTRIBUTE_UNUSED enum machine_mode mode)
3881 {
3882 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3883
3884 if (!validate_arglist (exp,
3885 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3886 return NULL_RTX;
3887
3888 /* If c_strlen can determine an expression for one of the string
3889 lengths, and it doesn't have side effects, then emit cmpstrnsi
3890 using length MIN(strlen(string)+1, arg3). */
3891 #ifdef HAVE_cmpstrnsi
3892 if (HAVE_cmpstrnsi)
3893 {
3894 tree len, len1, len2;
3895 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3896 rtx result, insn;
3897 tree fndecl, fn;
3898 tree arg1 = CALL_EXPR_ARG (exp, 0);
3899 tree arg2 = CALL_EXPR_ARG (exp, 1);
3900 tree arg3 = CALL_EXPR_ARG (exp, 2);
3901
3902 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3903 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3904 enum machine_mode insn_mode
3905 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3906
3907 len1 = c_strlen (arg1, 1);
3908 len2 = c_strlen (arg2, 1);
3909
3910 if (len1)
3911 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3912 if (len2)
3913 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3914
3915 /* If we don't have a constant length for the first, use the length
3916 of the second, if we know it. We don't require a constant for
3917 this case; some cost analysis could be done if both are available
3918 but neither is constant. For now, assume they're equally cheap,
3919 unless one has side effects. If both strings have constant lengths,
3920 use the smaller. */
3921
3922 if (!len1)
3923 len = len2;
3924 else if (!len2)
3925 len = len1;
3926 else if (TREE_SIDE_EFFECTS (len1))
3927 len = len2;
3928 else if (TREE_SIDE_EFFECTS (len2))
3929 len = len1;
3930 else if (TREE_CODE (len1) != INTEGER_CST)
3931 len = len2;
3932 else if (TREE_CODE (len2) != INTEGER_CST)
3933 len = len1;
3934 else if (tree_int_cst_lt (len1, len2))
3935 len = len1;
3936 else
3937 len = len2;
3938
3939 /* If both arguments have side effects, we cannot optimize. */
3940 if (!len || TREE_SIDE_EFFECTS (len))
3941 return NULL_RTX;
3942
3943 /* The actual new length parameter is MIN(len,arg3). */
3944 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3945 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3946
3947 /* If we don't have POINTER_TYPE, call the function. */
3948 if (arg1_align == 0 || arg2_align == 0)
3949 return NULL_RTX;
3950
3951 /* Make a place to write the result of the instruction. */
3952 result = target;
3953 if (! (result != 0
3954 && REG_P (result) && GET_MODE (result) == insn_mode
3955 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3956 result = gen_reg_rtx (insn_mode);
3957
3958 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3959 arg1 = builtin_save_expr (arg1);
3960 arg2 = builtin_save_expr (arg2);
3961 len = builtin_save_expr (len);
3962
3963 arg1_rtx = get_memory_rtx (arg1, len);
3964 arg2_rtx = get_memory_rtx (arg2, len);
3965 arg3_rtx = expand_normal (len);
3966 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3967 GEN_INT (MIN (arg1_align, arg2_align)));
3968 if (insn)
3969 {
3970 emit_insn (insn);
3971
3972 /* Return the value in the proper mode for this function. */
3973 mode = TYPE_MODE (TREE_TYPE (exp));
3974 if (GET_MODE (result) == mode)
3975 return result;
3976 if (target == 0)
3977 return convert_to_mode (mode, result, 0);
3978 convert_move (target, result, 0);
3979 return target;
3980 }
3981
3982 /* Expand the library call ourselves using a stabilized argument
3983 list to avoid re-evaluating the function's arguments twice. */
3984 fndecl = get_callee_fndecl (exp);
3985 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
3986 arg1, arg2, len);
3987 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3988 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3989 return expand_call (fn, target, target == const0_rtx);
3990 }
3991 #endif
3992 return NULL_RTX;
3993 }
3994
3995 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3996 if that's convenient. */
3997
3998 rtx
3999 expand_builtin_saveregs (void)
4000 {
4001 rtx val, seq;
4002
4003 /* Don't do __builtin_saveregs more than once in a function.
4004 Save the result of the first call and reuse it. */
4005 if (saveregs_value != 0)
4006 return saveregs_value;
4007
4008 /* When this function is called, it means that registers must be
4009 saved on entry to this function. So we migrate the call to the
4010 first insn of this function. */
4011
4012 start_sequence ();
4013
4014 /* Do whatever the machine needs done in this case. */
4015 val = targetm.calls.expand_builtin_saveregs ();
4016
4017 seq = get_insns ();
4018 end_sequence ();
4019
4020 saveregs_value = val;
4021
4022 /* Put the insns after the NOTE that starts the function. If this
4023 is inside a start_sequence, make the outer-level insn chain current, so
4024 the code is placed at the start of the function. */
4025 push_topmost_sequence ();
4026 emit_insn_after (seq, entry_of_function ());
4027 pop_topmost_sequence ();
4028
4029 return val;
4030 }
4031
4032 /* Expand a call to __builtin_next_arg. */
4033
4034 static rtx
4035 expand_builtin_next_arg (void)
4036 {
4037 /* Checking arguments is already done in fold_builtin_next_arg
4038 that must be called before this function. */
4039 return expand_binop (ptr_mode, add_optab,
4040 crtl->args.internal_arg_pointer,
4041 crtl->args.arg_offset_rtx,
4042 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4043 }
4044
4045 /* Make it easier for the backends by protecting the valist argument
4046 from multiple evaluations. */
4047
4048 static tree
4049 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4050 {
4051 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4052
4053 /* The current way of determining the type of valist is completely
4054 bogus. We should have the information on the va builtin instead. */
4055 if (!vatype)
4056 vatype = targetm.fn_abi_va_list (cfun->decl);
4057
4058 if (TREE_CODE (vatype) == ARRAY_TYPE)
4059 {
4060 if (TREE_SIDE_EFFECTS (valist))
4061 valist = save_expr (valist);
4062
4063 /* For this case, the backends will be expecting a pointer to
4064 vatype, but it's possible we've actually been given an array
4065 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4066 So fix it. */
4067 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4068 {
4069 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4070 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4071 }
4072 }
4073 else
4074 {
4075 tree pt = build_pointer_type (vatype);
4076
4077 if (! needs_lvalue)
4078 {
4079 if (! TREE_SIDE_EFFECTS (valist))
4080 return valist;
4081
4082 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4083 TREE_SIDE_EFFECTS (valist) = 1;
4084 }
4085
4086 if (TREE_SIDE_EFFECTS (valist))
4087 valist = save_expr (valist);
4088 valist = fold_build2_loc (loc, MEM_REF,
4089 vatype, valist, build_int_cst (pt, 0));
4090 }
4091
4092 return valist;
4093 }
4094
4095 /* The "standard" definition of va_list is void*. */
4096
4097 tree
4098 std_build_builtin_va_list (void)
4099 {
4100 return ptr_type_node;
4101 }
4102
4103 /* The "standard" abi va_list is va_list_type_node. */
4104
4105 tree
4106 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4107 {
4108 return va_list_type_node;
4109 }
4110
4111 /* The "standard" type of va_list is va_list_type_node. */
4112
4113 tree
4114 std_canonical_va_list_type (tree type)
4115 {
4116 tree wtype, htype;
4117
4118 if (INDIRECT_REF_P (type))
4119 type = TREE_TYPE (type);
4120 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4121 type = TREE_TYPE (type);
4122 wtype = va_list_type_node;
4123 htype = type;
4124 /* Treat structure va_list types. */
4125 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4126 htype = TREE_TYPE (htype);
4127 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4128 {
4129 /* If va_list is an array type, the argument may have decayed
4130 to a pointer type, e.g. by being passed to another function.
4131 In that case, unwrap both types so that we can compare the
4132 underlying records. */
4133 if (TREE_CODE (htype) == ARRAY_TYPE
4134 || POINTER_TYPE_P (htype))
4135 {
4136 wtype = TREE_TYPE (wtype);
4137 htype = TREE_TYPE (htype);
4138 }
4139 }
4140 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4141 return va_list_type_node;
4142
4143 return NULL_TREE;
4144 }
4145
4146 /* The "standard" implementation of va_start: just assign `nextarg' to
4147 the variable. */
4148
4149 void
4150 std_expand_builtin_va_start (tree valist, rtx nextarg)
4151 {
4152 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4153 convert_move (va_r, nextarg, 0);
4154 }
4155
4156 /* Expand EXP, a call to __builtin_va_start. */
4157
4158 static rtx
4159 expand_builtin_va_start (tree exp)
4160 {
4161 rtx nextarg;
4162 tree valist;
4163 location_t loc = EXPR_LOCATION (exp);
4164
4165 if (call_expr_nargs (exp) < 2)
4166 {
4167 error_at (loc, "too few arguments to function %<va_start%>");
4168 return const0_rtx;
4169 }
4170
4171 if (fold_builtin_next_arg (exp, true))
4172 return const0_rtx;
4173
4174 nextarg = expand_builtin_next_arg ();
4175 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4176
4177 if (targetm.expand_builtin_va_start)
4178 targetm.expand_builtin_va_start (valist, nextarg);
4179 else
4180 std_expand_builtin_va_start (valist, nextarg);
4181
4182 return const0_rtx;
4183 }
4184
4185 /* The "standard" implementation of va_arg: read the value from the
4186 current (padded) address and increment by the (padded) size. */
4187
4188 tree
4189 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4190 gimple_seq *post_p)
4191 {
4192 tree addr, t, type_size, rounded_size, valist_tmp;
4193 unsigned HOST_WIDE_INT align, boundary;
4194 bool indirect;
4195
4196 #ifdef ARGS_GROW_DOWNWARD
4197 /* All of the alignment and movement below is for args-grow-up machines.
4198 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4199 implement their own specialized gimplify_va_arg_expr routines. */
4200 gcc_unreachable ();
4201 #endif
4202
4203 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4204 if (indirect)
4205 type = build_pointer_type (type);
4206
4207 align = PARM_BOUNDARY / BITS_PER_UNIT;
4208 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4209
4210 /* When we align parameter on stack for caller, if the parameter
4211 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4212 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4213 here with caller. */
4214 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4215 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4216
4217 boundary /= BITS_PER_UNIT;
4218
4219 /* Hoist the valist value into a temporary for the moment. */
4220 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4221
4222 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4223 requires greater alignment, we must perform dynamic alignment. */
4224 if (boundary > align
4225 && !integer_zerop (TYPE_SIZE (type)))
4226 {
4227 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4228 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
4229 gimplify_and_add (t, pre_p);
4230
4231 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4232 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
4233 valist_tmp,
4234 build_int_cst (TREE_TYPE (valist), -boundary)));
4235 gimplify_and_add (t, pre_p);
4236 }
4237 else
4238 boundary = align;
4239
4240 /* If the actual alignment is less than the alignment of the type,
4241 adjust the type accordingly so that we don't assume strict alignment
4242 when dereferencing the pointer. */
4243 boundary *= BITS_PER_UNIT;
4244 if (boundary < TYPE_ALIGN (type))
4245 {
4246 type = build_variant_type_copy (type);
4247 TYPE_ALIGN (type) = boundary;
4248 }
4249
4250 /* Compute the rounded size of the type. */
4251 type_size = size_in_bytes (type);
4252 rounded_size = round_up (type_size, align);
4253
4254 /* Reduce rounded_size so it's sharable with the postqueue. */
4255 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4256
4257 /* Get AP. */
4258 addr = valist_tmp;
4259 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4260 {
4261 /* Small args are padded downward. */
4262 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4263 rounded_size, size_int (align));
4264 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4265 size_binop (MINUS_EXPR, rounded_size, type_size));
4266 addr = fold_build_pointer_plus (addr, t);
4267 }
4268
4269 /* Compute new value for AP. */
4270 t = fold_build_pointer_plus (valist_tmp, rounded_size);
4271 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4272 gimplify_and_add (t, pre_p);
4273
4274 addr = fold_convert (build_pointer_type (type), addr);
4275
4276 if (indirect)
4277 addr = build_va_arg_indirect_ref (addr);
4278
4279 return build_va_arg_indirect_ref (addr);
4280 }
4281
4282 /* Build an indirect-ref expression over the given TREE, which represents a
4283 piece of a va_arg() expansion. */
4284 tree
4285 build_va_arg_indirect_ref (tree addr)
4286 {
4287 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
4288
4289 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4290 mf_mark (addr);
4291
4292 return addr;
4293 }
4294
4295 /* Return a dummy expression of type TYPE in order to keep going after an
4296 error. */
4297
4298 static tree
4299 dummy_object (tree type)
4300 {
4301 tree t = build_int_cst (build_pointer_type (type), 0);
4302 return build2 (MEM_REF, type, t, t);
4303 }
4304
4305 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4306 builtin function, but a very special sort of operator. */
4307
4308 enum gimplify_status
4309 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4310 {
4311 tree promoted_type, have_va_type;
4312 tree valist = TREE_OPERAND (*expr_p, 0);
4313 tree type = TREE_TYPE (*expr_p);
4314 tree t;
4315 location_t loc = EXPR_LOCATION (*expr_p);
4316
4317 /* Verify that valist is of the proper type. */
4318 have_va_type = TREE_TYPE (valist);
4319 if (have_va_type == error_mark_node)
4320 return GS_ERROR;
4321 have_va_type = targetm.canonical_va_list_type (have_va_type);
4322
4323 if (have_va_type == NULL_TREE)
4324 {
4325 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4326 return GS_ERROR;
4327 }
4328
4329 /* Generate a diagnostic for requesting data of a type that cannot
4330 be passed through `...' due to type promotion at the call site. */
4331 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4332 != type)
4333 {
4334 static bool gave_help;
4335 bool warned;
4336
4337 /* Unfortunately, this is merely undefined, rather than a constraint
4338 violation, so we cannot make this an error. If this call is never
4339 executed, the program is still strictly conforming. */
4340 warned = warning_at (loc, 0,
4341 "%qT is promoted to %qT when passed through %<...%>",
4342 type, promoted_type);
4343 if (!gave_help && warned)
4344 {
4345 gave_help = true;
4346 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4347 promoted_type, type);
4348 }
4349
4350 /* We can, however, treat "undefined" any way we please.
4351 Call abort to encourage the user to fix the program. */
4352 if (warned)
4353 inform (loc, "if this code is reached, the program will abort");
4354 /* Before the abort, allow the evaluation of the va_list
4355 expression to exit or longjmp. */
4356 gimplify_and_add (valist, pre_p);
4357 t = build_call_expr_loc (loc,
4358 builtin_decl_implicit (BUILT_IN_TRAP), 0);
4359 gimplify_and_add (t, pre_p);
4360
4361 /* This is dead code, but go ahead and finish so that the
4362 mode of the result comes out right. */
4363 *expr_p = dummy_object (type);
4364 return GS_ALL_DONE;
4365 }
4366 else
4367 {
4368 /* Make it easier for the backends by protecting the valist argument
4369 from multiple evaluations. */
4370 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4371 {
4372 /* For this case, the backends will be expecting a pointer to
4373 TREE_TYPE (abi), but it's possible we've
4374 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4375 So fix it. */
4376 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4377 {
4378 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4379 valist = fold_convert_loc (loc, p1,
4380 build_fold_addr_expr_loc (loc, valist));
4381 }
4382
4383 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4384 }
4385 else
4386 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4387
4388 if (!targetm.gimplify_va_arg_expr)
4389 /* FIXME: Once most targets are converted we should merely
4390 assert this is non-null. */
4391 return GS_ALL_DONE;
4392
4393 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4394 return GS_OK;
4395 }
4396 }
4397
4398 /* Expand EXP, a call to __builtin_va_end. */
4399
4400 static rtx
4401 expand_builtin_va_end (tree exp)
4402 {
4403 tree valist = CALL_EXPR_ARG (exp, 0);
4404
4405 /* Evaluate for side effects, if needed. I hate macros that don't
4406 do that. */
4407 if (TREE_SIDE_EFFECTS (valist))
4408 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4409
4410 return const0_rtx;
4411 }
4412
4413 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4414 builtin rather than just as an assignment in stdarg.h because of the
4415 nastiness of array-type va_list types. */
4416
4417 static rtx
4418 expand_builtin_va_copy (tree exp)
4419 {
4420 tree dst, src, t;
4421 location_t loc = EXPR_LOCATION (exp);
4422
4423 dst = CALL_EXPR_ARG (exp, 0);
4424 src = CALL_EXPR_ARG (exp, 1);
4425
4426 dst = stabilize_va_list_loc (loc, dst, 1);
4427 src = stabilize_va_list_loc (loc, src, 0);
4428
4429 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4430
4431 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4432 {
4433 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4434 TREE_SIDE_EFFECTS (t) = 1;
4435 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4436 }
4437 else
4438 {
4439 rtx dstb, srcb, size;
4440
4441 /* Evaluate to pointers. */
4442 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4443 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4444 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4445 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4446
4447 dstb = convert_memory_address (Pmode, dstb);
4448 srcb = convert_memory_address (Pmode, srcb);
4449
4450 /* "Dereference" to BLKmode memories. */
4451 dstb = gen_rtx_MEM (BLKmode, dstb);
4452 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4453 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4454 srcb = gen_rtx_MEM (BLKmode, srcb);
4455 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4456 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4457
4458 /* Copy. */
4459 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4460 }
4461
4462 return const0_rtx;
4463 }
4464
4465 /* Expand a call to one of the builtin functions __builtin_frame_address or
4466 __builtin_return_address. */
4467
4468 static rtx
4469 expand_builtin_frame_address (tree fndecl, tree exp)
4470 {
4471 /* The argument must be a nonnegative integer constant.
4472 It counts the number of frames to scan up the stack.
4473 The value is the return address saved in that frame. */
4474 if (call_expr_nargs (exp) == 0)
4475 /* Warning about missing arg was already issued. */
4476 return const0_rtx;
4477 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4478 {
4479 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4480 error ("invalid argument to %<__builtin_frame_address%>");
4481 else
4482 error ("invalid argument to %<__builtin_return_address%>");
4483 return const0_rtx;
4484 }
4485 else
4486 {
4487 rtx tem
4488 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4489 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4490
4491 /* Some ports cannot access arbitrary stack frames. */
4492 if (tem == NULL)
4493 {
4494 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4495 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4496 else
4497 warning (0, "unsupported argument to %<__builtin_return_address%>");
4498 return const0_rtx;
4499 }
4500
4501 /* For __builtin_frame_address, return what we've got. */
4502 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4503 return tem;
4504
4505 if (!REG_P (tem)
4506 && ! CONSTANT_P (tem))
4507 tem = copy_to_mode_reg (Pmode, tem);
4508 return tem;
4509 }
4510 }
4511
4512 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4513 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4514 is the same as for allocate_dynamic_stack_space. */
4515
4516 static rtx
4517 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4518 {
4519 rtx op0;
4520 rtx result;
4521 bool valid_arglist;
4522 unsigned int align;
4523 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4524 == BUILT_IN_ALLOCA_WITH_ALIGN);
4525
4526 /* Emit normal call if marked not-inlineable. */
4527 if (CALL_CANNOT_INLINE_P (exp))
4528 return NULL_RTX;
4529
4530 valid_arglist
4531 = (alloca_with_align
4532 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4533 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4534
4535 if (!valid_arglist)
4536 return NULL_RTX;
4537
4538 /* Compute the argument. */
4539 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4540
4541 /* Compute the alignment. */
4542 align = (alloca_with_align
4543 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4544 : BIGGEST_ALIGNMENT);
4545
4546 /* Allocate the desired space. */
4547 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4548 result = convert_memory_address (ptr_mode, result);
4549
4550 return result;
4551 }
4552
4553 /* Expand a call to a bswap builtin with argument ARG0. MODE
4554 is the mode to expand with. */
4555
4556 static rtx
4557 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4558 {
4559 enum machine_mode mode;
4560 tree arg;
4561 rtx op0;
4562
4563 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4564 return NULL_RTX;
4565
4566 arg = CALL_EXPR_ARG (exp, 0);
4567 mode = TYPE_MODE (TREE_TYPE (arg));
4568 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4569
4570 target = expand_unop (mode, bswap_optab, op0, target, 1);
4571
4572 gcc_assert (target);
4573
4574 return convert_to_mode (mode, target, 0);
4575 }
4576
4577 /* Expand a call to a unary builtin in EXP.
4578 Return NULL_RTX if a normal call should be emitted rather than expanding the
4579 function in-line. If convenient, the result should be placed in TARGET.
4580 SUBTARGET may be used as the target for computing one of EXP's operands. */
4581
4582 static rtx
4583 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4584 rtx subtarget, optab op_optab)
4585 {
4586 rtx op0;
4587
4588 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4589 return NULL_RTX;
4590
4591 /* Compute the argument. */
4592 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4593 (subtarget
4594 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4595 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4596 VOIDmode, EXPAND_NORMAL);
4597 /* Compute op, into TARGET if possible.
4598 Set TARGET to wherever the result comes back. */
4599 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4600 op_optab, op0, target, op_optab != clrsb_optab);
4601 gcc_assert (target);
4602
4603 return convert_to_mode (target_mode, target, 0);
4604 }
4605
4606 /* Expand a call to __builtin_expect. We just return our argument
4607 as the builtin_expect semantic should've been already executed by
4608 tree branch prediction pass. */
4609
4610 static rtx
4611 expand_builtin_expect (tree exp, rtx target)
4612 {
4613 tree arg;
4614
4615 if (call_expr_nargs (exp) < 2)
4616 return const0_rtx;
4617 arg = CALL_EXPR_ARG (exp, 0);
4618
4619 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4620 /* When guessing was done, the hints should be already stripped away. */
4621 gcc_assert (!flag_guess_branch_prob
4622 || optimize == 0 || seen_error ());
4623 return target;
4624 }
4625
4626 /* Expand a call to __builtin_assume_aligned. We just return our first
4627 argument as the builtin_assume_aligned semantic should've been already
4628 executed by CCP. */
4629
4630 static rtx
4631 expand_builtin_assume_aligned (tree exp, rtx target)
4632 {
4633 if (call_expr_nargs (exp) < 2)
4634 return const0_rtx;
4635 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4636 EXPAND_NORMAL);
4637 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4638 && (call_expr_nargs (exp) < 3
4639 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4640 return target;
4641 }
4642
4643 void
4644 expand_builtin_trap (void)
4645 {
4646 #ifdef HAVE_trap
4647 if (HAVE_trap)
4648 emit_insn (gen_trap ());
4649 else
4650 #endif
4651 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4652 emit_barrier ();
4653 }
4654
4655 /* Expand a call to __builtin_unreachable. We do nothing except emit
4656 a barrier saying that control flow will not pass here.
4657
4658 It is the responsibility of the program being compiled to ensure
4659 that control flow does never reach __builtin_unreachable. */
4660 static void
4661 expand_builtin_unreachable (void)
4662 {
4663 emit_barrier ();
4664 }
4665
4666 /* Expand EXP, a call to fabs, fabsf or fabsl.
4667 Return NULL_RTX if a normal call should be emitted rather than expanding
4668 the function inline. If convenient, the result should be placed
4669 in TARGET. SUBTARGET may be used as the target for computing
4670 the operand. */
4671
4672 static rtx
4673 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4674 {
4675 enum machine_mode mode;
4676 tree arg;
4677 rtx op0;
4678
4679 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4680 return NULL_RTX;
4681
4682 arg = CALL_EXPR_ARG (exp, 0);
4683 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4684 mode = TYPE_MODE (TREE_TYPE (arg));
4685 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4686 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4687 }
4688
4689 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4690 Return NULL is a normal call should be emitted rather than expanding the
4691 function inline. If convenient, the result should be placed in TARGET.
4692 SUBTARGET may be used as the target for computing the operand. */
4693
4694 static rtx
4695 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4696 {
4697 rtx op0, op1;
4698 tree arg;
4699
4700 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4701 return NULL_RTX;
4702
4703 arg = CALL_EXPR_ARG (exp, 0);
4704 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4705
4706 arg = CALL_EXPR_ARG (exp, 1);
4707 op1 = expand_normal (arg);
4708
4709 return expand_copysign (op0, op1, target);
4710 }
4711
4712 /* Create a new constant string literal and return a char* pointer to it.
4713 The STRING_CST value is the LEN characters at STR. */
4714 tree
4715 build_string_literal (int len, const char *str)
4716 {
4717 tree t, elem, index, type;
4718
4719 t = build_string (len, str);
4720 elem = build_type_variant (char_type_node, 1, 0);
4721 index = build_index_type (size_int (len - 1));
4722 type = build_array_type (elem, index);
4723 TREE_TYPE (t) = type;
4724 TREE_CONSTANT (t) = 1;
4725 TREE_READONLY (t) = 1;
4726 TREE_STATIC (t) = 1;
4727
4728 type = build_pointer_type (elem);
4729 t = build1 (ADDR_EXPR, type,
4730 build4 (ARRAY_REF, elem,
4731 t, integer_zero_node, NULL_TREE, NULL_TREE));
4732 return t;
4733 }
4734
4735 /* Expand a call to __builtin___clear_cache. */
4736
4737 static rtx
4738 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4739 {
4740 #ifndef HAVE_clear_cache
4741 #ifdef CLEAR_INSN_CACHE
4742 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4743 does something. Just do the default expansion to a call to
4744 __clear_cache(). */
4745 return NULL_RTX;
4746 #else
4747 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4748 does nothing. There is no need to call it. Do nothing. */
4749 return const0_rtx;
4750 #endif /* CLEAR_INSN_CACHE */
4751 #else
4752 /* We have a "clear_cache" insn, and it will handle everything. */
4753 tree begin, end;
4754 rtx begin_rtx, end_rtx;
4755
4756 /* We must not expand to a library call. If we did, any
4757 fallback library function in libgcc that might contain a call to
4758 __builtin___clear_cache() would recurse infinitely. */
4759 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4760 {
4761 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4762 return const0_rtx;
4763 }
4764
4765 if (HAVE_clear_cache)
4766 {
4767 struct expand_operand ops[2];
4768
4769 begin = CALL_EXPR_ARG (exp, 0);
4770 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4771
4772 end = CALL_EXPR_ARG (exp, 1);
4773 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4774
4775 create_address_operand (&ops[0], begin_rtx);
4776 create_address_operand (&ops[1], end_rtx);
4777 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4778 return const0_rtx;
4779 }
4780 return const0_rtx;
4781 #endif /* HAVE_clear_cache */
4782 }
4783
4784 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4785
4786 static rtx
4787 round_trampoline_addr (rtx tramp)
4788 {
4789 rtx temp, addend, mask;
4790
4791 /* If we don't need too much alignment, we'll have been guaranteed
4792 proper alignment by get_trampoline_type. */
4793 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4794 return tramp;
4795
4796 /* Round address up to desired boundary. */
4797 temp = gen_reg_rtx (Pmode);
4798 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
4799 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
4800
4801 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4802 temp, 0, OPTAB_LIB_WIDEN);
4803 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4804 temp, 0, OPTAB_LIB_WIDEN);
4805
4806 return tramp;
4807 }
4808
4809 static rtx
4810 expand_builtin_init_trampoline (tree exp)
4811 {
4812 tree t_tramp, t_func, t_chain;
4813 rtx m_tramp, r_tramp, r_chain, tmp;
4814
4815 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4816 POINTER_TYPE, VOID_TYPE))
4817 return NULL_RTX;
4818
4819 t_tramp = CALL_EXPR_ARG (exp, 0);
4820 t_func = CALL_EXPR_ARG (exp, 1);
4821 t_chain = CALL_EXPR_ARG (exp, 2);
4822
4823 r_tramp = expand_normal (t_tramp);
4824 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4825 MEM_NOTRAP_P (m_tramp) = 1;
4826
4827 /* The TRAMP argument should be the address of a field within the
4828 local function's FRAME decl. Let's see if we can fill in the
4829 to fill in the MEM_ATTRs for this memory. */
4830 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4831 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
4832 true, 0);
4833
4834 tmp = round_trampoline_addr (r_tramp);
4835 if (tmp != r_tramp)
4836 {
4837 m_tramp = change_address (m_tramp, BLKmode, tmp);
4838 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4839 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4840 }
4841
4842 /* The FUNC argument should be the address of the nested function.
4843 Extract the actual function decl to pass to the hook. */
4844 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4845 t_func = TREE_OPERAND (t_func, 0);
4846 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4847
4848 r_chain = expand_normal (t_chain);
4849
4850 /* Generate insns to initialize the trampoline. */
4851 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4852
4853 trampolines_created = 1;
4854
4855 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4856 "trampoline generated for nested function %qD", t_func);
4857
4858 return const0_rtx;
4859 }
4860
4861 static rtx
4862 expand_builtin_adjust_trampoline (tree exp)
4863 {
4864 rtx tramp;
4865
4866 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4867 return NULL_RTX;
4868
4869 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4870 tramp = round_trampoline_addr (tramp);
4871 if (targetm.calls.trampoline_adjust_address)
4872 tramp = targetm.calls.trampoline_adjust_address (tramp);
4873
4874 return tramp;
4875 }
4876
4877 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4878 function. The function first checks whether the back end provides
4879 an insn to implement signbit for the respective mode. If not, it
4880 checks whether the floating point format of the value is such that
4881 the sign bit can be extracted. If that is not the case, the
4882 function returns NULL_RTX to indicate that a normal call should be
4883 emitted rather than expanding the function in-line. EXP is the
4884 expression that is a call to the builtin function; if convenient,
4885 the result should be placed in TARGET. */
4886 static rtx
4887 expand_builtin_signbit (tree exp, rtx target)
4888 {
4889 const struct real_format *fmt;
4890 enum machine_mode fmode, imode, rmode;
4891 tree arg;
4892 int word, bitpos;
4893 enum insn_code icode;
4894 rtx temp;
4895 location_t loc = EXPR_LOCATION (exp);
4896
4897 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4898 return NULL_RTX;
4899
4900 arg = CALL_EXPR_ARG (exp, 0);
4901 fmode = TYPE_MODE (TREE_TYPE (arg));
4902 rmode = TYPE_MODE (TREE_TYPE (exp));
4903 fmt = REAL_MODE_FORMAT (fmode);
4904
4905 arg = builtin_save_expr (arg);
4906
4907 /* Expand the argument yielding a RTX expression. */
4908 temp = expand_normal (arg);
4909
4910 /* Check if the back end provides an insn that handles signbit for the
4911 argument's mode. */
4912 icode = optab_handler (signbit_optab, fmode);
4913 if (icode != CODE_FOR_nothing)
4914 {
4915 rtx last = get_last_insn ();
4916 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4917 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4918 return target;
4919 delete_insns_since (last);
4920 }
4921
4922 /* For floating point formats without a sign bit, implement signbit
4923 as "ARG < 0.0". */
4924 bitpos = fmt->signbit_ro;
4925 if (bitpos < 0)
4926 {
4927 /* But we can't do this if the format supports signed zero. */
4928 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4929 return NULL_RTX;
4930
4931 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4932 build_real (TREE_TYPE (arg), dconst0));
4933 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4934 }
4935
4936 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4937 {
4938 imode = int_mode_for_mode (fmode);
4939 if (imode == BLKmode)
4940 return NULL_RTX;
4941 temp = gen_lowpart (imode, temp);
4942 }
4943 else
4944 {
4945 imode = word_mode;
4946 /* Handle targets with different FP word orders. */
4947 if (FLOAT_WORDS_BIG_ENDIAN)
4948 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4949 else
4950 word = bitpos / BITS_PER_WORD;
4951 temp = operand_subword_force (temp, word, fmode);
4952 bitpos = bitpos % BITS_PER_WORD;
4953 }
4954
4955 /* Force the intermediate word_mode (or narrower) result into a
4956 register. This avoids attempting to create paradoxical SUBREGs
4957 of floating point modes below. */
4958 temp = force_reg (imode, temp);
4959
4960 /* If the bitpos is within the "result mode" lowpart, the operation
4961 can be implement with a single bitwise AND. Otherwise, we need
4962 a right shift and an AND. */
4963
4964 if (bitpos < GET_MODE_BITSIZE (rmode))
4965 {
4966 double_int mask = double_int_setbit (double_int_zero, bitpos);
4967
4968 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4969 temp = gen_lowpart (rmode, temp);
4970 temp = expand_binop (rmode, and_optab, temp,
4971 immed_double_int_const (mask, rmode),
4972 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4973 }
4974 else
4975 {
4976 /* Perform a logical right shift to place the signbit in the least
4977 significant bit, then truncate the result to the desired mode
4978 and mask just this bit. */
4979 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4980 temp = gen_lowpart (rmode, temp);
4981 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4982 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4983 }
4984
4985 return temp;
4986 }
4987
4988 /* Expand fork or exec calls. TARGET is the desired target of the
4989 call. EXP is the call. FN is the
4990 identificator of the actual function. IGNORE is nonzero if the
4991 value is to be ignored. */
4992
4993 static rtx
4994 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4995 {
4996 tree id, decl;
4997 tree call;
4998
4999 /* If we are not profiling, just call the function. */
5000 if (!profile_arc_flag)
5001 return NULL_RTX;
5002
5003 /* Otherwise call the wrapper. This should be equivalent for the rest of
5004 compiler, so the code does not diverge, and the wrapper may run the
5005 code necessary for keeping the profiling sane. */
5006
5007 switch (DECL_FUNCTION_CODE (fn))
5008 {
5009 case BUILT_IN_FORK:
5010 id = get_identifier ("__gcov_fork");
5011 break;
5012
5013 case BUILT_IN_EXECL:
5014 id = get_identifier ("__gcov_execl");
5015 break;
5016
5017 case BUILT_IN_EXECV:
5018 id = get_identifier ("__gcov_execv");
5019 break;
5020
5021 case BUILT_IN_EXECLP:
5022 id = get_identifier ("__gcov_execlp");
5023 break;
5024
5025 case BUILT_IN_EXECLE:
5026 id = get_identifier ("__gcov_execle");
5027 break;
5028
5029 case BUILT_IN_EXECVP:
5030 id = get_identifier ("__gcov_execvp");
5031 break;
5032
5033 case BUILT_IN_EXECVE:
5034 id = get_identifier ("__gcov_execve");
5035 break;
5036
5037 default:
5038 gcc_unreachable ();
5039 }
5040
5041 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5042 FUNCTION_DECL, id, TREE_TYPE (fn));
5043 DECL_EXTERNAL (decl) = 1;
5044 TREE_PUBLIC (decl) = 1;
5045 DECL_ARTIFICIAL (decl) = 1;
5046 TREE_NOTHROW (decl) = 1;
5047 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5048 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5049 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5050 return expand_call (call, target, ignore);
5051 }
5052
5053
5054 \f
5055 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5056 the pointer in these functions is void*, the tree optimizers may remove
5057 casts. The mode computed in expand_builtin isn't reliable either, due
5058 to __sync_bool_compare_and_swap.
5059
5060 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5061 group of builtins. This gives us log2 of the mode size. */
5062
5063 static inline enum machine_mode
5064 get_builtin_sync_mode (int fcode_diff)
5065 {
5066 /* The size is not negotiable, so ask not to get BLKmode in return
5067 if the target indicates that a smaller size would be better. */
5068 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5069 }
5070
5071 /* Expand the memory expression LOC and return the appropriate memory operand
5072 for the builtin_sync operations. */
5073
5074 static rtx
5075 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5076 {
5077 rtx addr, mem;
5078
5079 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5080 addr = convert_memory_address (Pmode, addr);
5081
5082 /* Note that we explicitly do not want any alias information for this
5083 memory, so that we kill all other live memories. Otherwise we don't
5084 satisfy the full barrier semantics of the intrinsic. */
5085 mem = validize_mem (gen_rtx_MEM (mode, addr));
5086
5087 /* The alignment needs to be at least according to that of the mode. */
5088 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5089 get_pointer_alignment (loc)));
5090 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5091 MEM_VOLATILE_P (mem) = 1;
5092
5093 return mem;
5094 }
5095
5096 /* Make sure an argument is in the right mode.
5097 EXP is the tree argument.
5098 MODE is the mode it should be in. */
5099
5100 static rtx
5101 expand_expr_force_mode (tree exp, enum machine_mode mode)
5102 {
5103 rtx val;
5104 enum machine_mode old_mode;
5105
5106 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5107 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5108 of CONST_INTs, where we know the old_mode only from the call argument. */
5109
5110 old_mode = GET_MODE (val);
5111 if (old_mode == VOIDmode)
5112 old_mode = TYPE_MODE (TREE_TYPE (exp));
5113 val = convert_modes (mode, old_mode, val, 1);
5114 return val;
5115 }
5116
5117
5118 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5119 EXP is the CALL_EXPR. CODE is the rtx code
5120 that corresponds to the arithmetic or logical operation from the name;
5121 an exception here is that NOT actually means NAND. TARGET is an optional
5122 place for us to store the results; AFTER is true if this is the
5123 fetch_and_xxx form. */
5124
5125 static rtx
5126 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5127 enum rtx_code code, bool after,
5128 rtx target)
5129 {
5130 rtx val, mem;
5131 location_t loc = EXPR_LOCATION (exp);
5132
5133 if (code == NOT && warn_sync_nand)
5134 {
5135 tree fndecl = get_callee_fndecl (exp);
5136 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5137
5138 static bool warned_f_a_n, warned_n_a_f;
5139
5140 switch (fcode)
5141 {
5142 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5143 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5144 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5145 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5146 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5147
5148 if (warned_f_a_n)
5149 break;
5150
5151 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5152 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5153 warned_f_a_n = true;
5154 break;
5155
5156 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5157 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5158 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5159 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5160 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5161
5162 if (warned_n_a_f)
5163 break;
5164
5165 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5166 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5167 warned_n_a_f = true;
5168 break;
5169
5170 default:
5171 gcc_unreachable ();
5172 }
5173 }
5174
5175 /* Expand the operands. */
5176 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5177 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5178
5179 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5180 after);
5181 }
5182
5183 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5184 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5185 true if this is the boolean form. TARGET is a place for us to store the
5186 results; this is NOT optional if IS_BOOL is true. */
5187
5188 static rtx
5189 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5190 bool is_bool, rtx target)
5191 {
5192 rtx old_val, new_val, mem;
5193
5194 /* Expand the operands. */
5195 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5196 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5197 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5198
5199 if (!expand_atomic_compare_and_swap ((is_bool ? &target : NULL),
5200 (is_bool ? NULL : &target),
5201 mem, old_val, new_val, false,
5202 MEMMODEL_SEQ_CST, MEMMODEL_SEQ_CST))
5203 return NULL_RTX;
5204
5205 return target;
5206 }
5207
5208 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5209 general form is actually an atomic exchange, and some targets only
5210 support a reduced form with the second argument being a constant 1.
5211 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5212 the results. */
5213
5214 static rtx
5215 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5216 rtx target)
5217 {
5218 rtx val, mem;
5219
5220 /* Expand the operands. */
5221 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5222 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5223
5224 return expand_atomic_exchange (target, mem, val, MEMMODEL_ACQUIRE, true);
5225 }
5226
5227 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5228
5229 static void
5230 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5231 {
5232 rtx mem;
5233
5234 /* Expand the operands. */
5235 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5236
5237 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5238 }
5239
5240 /* Given an integer representing an ``enum memmodel'', verify its
5241 correctness and return the memory model enum. */
5242
5243 static enum memmodel
5244 get_memmodel (tree exp)
5245 {
5246 rtx op;
5247
5248 /* If the parameter is not a constant, it's a run time value so we'll just
5249 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5250 if (TREE_CODE (exp) != INTEGER_CST)
5251 return MEMMODEL_SEQ_CST;
5252
5253 op = expand_normal (exp);
5254 if (INTVAL (op) < 0 || INTVAL (op) >= MEMMODEL_LAST)
5255 {
5256 warning (OPT_Winvalid_memory_model,
5257 "invalid memory model argument to builtin");
5258 return MEMMODEL_SEQ_CST;
5259 }
5260 return (enum memmodel) INTVAL (op);
5261 }
5262
5263 /* Expand the __atomic_exchange intrinsic:
5264 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5265 EXP is the CALL_EXPR.
5266 TARGET is an optional place for us to store the results. */
5267
5268 static rtx
5269 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5270 {
5271 rtx val, mem;
5272 enum memmodel model;
5273
5274 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5275 if (model == MEMMODEL_CONSUME)
5276 {
5277 error ("invalid memory model for %<__atomic_exchange%>");
5278 return NULL_RTX;
5279 }
5280
5281 if (!flag_inline_atomics)
5282 return NULL_RTX;
5283
5284 /* Expand the operands. */
5285 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5286 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5287
5288 return expand_atomic_exchange (target, mem, val, model, false);
5289 }
5290
5291 /* Expand the __atomic_compare_exchange intrinsic:
5292 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5293 TYPE desired, BOOL weak,
5294 enum memmodel success,
5295 enum memmodel failure)
5296 EXP is the CALL_EXPR.
5297 TARGET is an optional place for us to store the results. */
5298
5299 static rtx
5300 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5301 rtx target)
5302 {
5303 rtx expect, desired, mem, oldval;
5304 enum memmodel success, failure;
5305 tree weak;
5306 bool is_weak;
5307
5308 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5309 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5310
5311 if (failure == MEMMODEL_RELEASE || failure == MEMMODEL_ACQ_REL)
5312 {
5313 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5314 return NULL_RTX;
5315 }
5316
5317 if (failure > success)
5318 {
5319 error ("failure memory model cannot be stronger than success "
5320 "memory model for %<__atomic_compare_exchange%>");
5321 return NULL_RTX;
5322 }
5323
5324 if (!flag_inline_atomics)
5325 return NULL_RTX;
5326
5327 /* Expand the operands. */
5328 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5329
5330 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5331 expect = convert_memory_address (Pmode, expect);
5332 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5333
5334 weak = CALL_EXPR_ARG (exp, 3);
5335 is_weak = false;
5336 if (host_integerp (weak, 0) && tree_low_cst (weak, 0) != 0)
5337 is_weak = true;
5338
5339 oldval = copy_to_reg (gen_rtx_MEM (mode, expect));
5340
5341 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, oldval,
5342 desired, is_weak, success, failure))
5343 return NULL_RTX;
5344
5345 emit_move_insn (gen_rtx_MEM (mode, expect), oldval);
5346 return target;
5347 }
5348
5349 /* Expand the __atomic_load intrinsic:
5350 TYPE __atomic_load (TYPE *object, enum memmodel)
5351 EXP is the CALL_EXPR.
5352 TARGET is an optional place for us to store the results. */
5353
5354 static rtx
5355 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5356 {
5357 rtx mem;
5358 enum memmodel model;
5359
5360 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5361 if (model == MEMMODEL_RELEASE
5362 || model == MEMMODEL_ACQ_REL)
5363 {
5364 error ("invalid memory model for %<__atomic_load%>");
5365 return NULL_RTX;
5366 }
5367
5368 if (!flag_inline_atomics)
5369 return NULL_RTX;
5370
5371 /* Expand the operand. */
5372 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5373
5374 return expand_atomic_load (target, mem, model);
5375 }
5376
5377
5378 /* Expand the __atomic_store intrinsic:
5379 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5380 EXP is the CALL_EXPR.
5381 TARGET is an optional place for us to store the results. */
5382
5383 static rtx
5384 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5385 {
5386 rtx mem, val;
5387 enum memmodel model;
5388
5389 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5390 if (model != MEMMODEL_RELAXED
5391 && model != MEMMODEL_SEQ_CST
5392 && model != MEMMODEL_RELEASE)
5393 {
5394 error ("invalid memory model for %<__atomic_store%>");
5395 return NULL_RTX;
5396 }
5397
5398 if (!flag_inline_atomics)
5399 return NULL_RTX;
5400
5401 /* Expand the operands. */
5402 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5403 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5404
5405 return expand_atomic_store (mem, val, model, false);
5406 }
5407
5408 /* Expand the __atomic_fetch_XXX intrinsic:
5409 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5410 EXP is the CALL_EXPR.
5411 TARGET is an optional place for us to store the results.
5412 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5413 FETCH_AFTER is true if returning the result of the operation.
5414 FETCH_AFTER is false if returning the value before the operation.
5415 IGNORE is true if the result is not used.
5416 EXT_CALL is the correct builtin for an external call if this cannot be
5417 resolved to an instruction sequence. */
5418
5419 static rtx
5420 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5421 enum rtx_code code, bool fetch_after,
5422 bool ignore, enum built_in_function ext_call)
5423 {
5424 rtx val, mem, ret;
5425 enum memmodel model;
5426 tree fndecl;
5427 tree addr;
5428
5429 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5430
5431 /* Expand the operands. */
5432 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5433 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5434
5435 /* Only try generating instructions if inlining is turned on. */
5436 if (flag_inline_atomics)
5437 {
5438 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5439 if (ret)
5440 return ret;
5441 }
5442
5443 /* Return if a different routine isn't needed for the library call. */
5444 if (ext_call == BUILT_IN_NONE)
5445 return NULL_RTX;
5446
5447 /* Change the call to the specified function. */
5448 fndecl = get_callee_fndecl (exp);
5449 addr = CALL_EXPR_FN (exp);
5450 STRIP_NOPS (addr);
5451
5452 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5453 TREE_OPERAND (addr, 0) = builtin_decl_explicit(ext_call);
5454
5455 /* Expand the call here so we can emit trailing code. */
5456 ret = expand_call (exp, target, ignore);
5457
5458 /* Replace the original function just in case it matters. */
5459 TREE_OPERAND (addr, 0) = fndecl;
5460
5461 /* Then issue the arithmetic correction to return the right result. */
5462 if (!ignore)
5463 {
5464 if (code == NOT)
5465 {
5466 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5467 OPTAB_LIB_WIDEN);
5468 ret = expand_simple_unop (mode, NOT, ret, target, true);
5469 }
5470 else
5471 ret = expand_simple_binop (mode, code, ret, val, target, true,
5472 OPTAB_LIB_WIDEN);
5473 }
5474 return ret;
5475 }
5476
5477 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5478 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5479
5480 static tree
5481 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5482 {
5483 int size;
5484 enum machine_mode mode;
5485 unsigned int mode_align, type_align;
5486
5487 if (TREE_CODE (arg0) != INTEGER_CST)
5488 return NULL_TREE;
5489
5490 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5491 mode = mode_for_size (size, MODE_INT, 0);
5492 mode_align = GET_MODE_ALIGNMENT (mode);
5493
5494 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5495 type_align = mode_align;
5496 else
5497 {
5498 tree ttype = TREE_TYPE (arg1);
5499
5500 /* This function is usually invoked and folded immediately by the front
5501 end before anything else has a chance to look at it. The pointer
5502 parameter at this point is usually cast to a void *, so check for that
5503 and look past the cast. */
5504 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5505 && VOID_TYPE_P (TREE_TYPE (ttype)))
5506 arg1 = TREE_OPERAND (arg1, 0);
5507
5508 ttype = TREE_TYPE (arg1);
5509 gcc_assert (POINTER_TYPE_P (ttype));
5510
5511 /* Get the underlying type of the object. */
5512 ttype = TREE_TYPE (ttype);
5513 type_align = TYPE_ALIGN (ttype);
5514 }
5515
5516 /* If the object has smaller alignment, the the lock free routines cannot
5517 be used. */
5518 if (type_align < mode_align)
5519 return integer_zero_node;
5520
5521 /* Check if a compare_and_swap pattern exists for the mode which represents
5522 the required size. The pattern is not allowed to fail, so the existence
5523 of the pattern indicates support is present. */
5524 if (can_compare_and_swap_p (mode, true))
5525 return integer_one_node;
5526 else
5527 return integer_zero_node;
5528 }
5529
5530 /* Return true if the parameters to call EXP represent an object which will
5531 always generate lock free instructions. The first argument represents the
5532 size of the object, and the second parameter is a pointer to the object
5533 itself. If NULL is passed for the object, then the result is based on
5534 typical alignment for an object of the specified size. Otherwise return
5535 false. */
5536
5537 static rtx
5538 expand_builtin_atomic_always_lock_free (tree exp)
5539 {
5540 tree size;
5541 tree arg0 = CALL_EXPR_ARG (exp, 0);
5542 tree arg1 = CALL_EXPR_ARG (exp, 1);
5543
5544 if (TREE_CODE (arg0) != INTEGER_CST)
5545 {
5546 error ("non-constant argument 1 to __atomic_always_lock_free");
5547 return const0_rtx;
5548 }
5549
5550 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5551 if (size == integer_one_node)
5552 return const1_rtx;
5553 return const0_rtx;
5554 }
5555
5556 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5557 is lock free on this architecture. */
5558
5559 static tree
5560 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5561 {
5562 if (!flag_inline_atomics)
5563 return NULL_TREE;
5564
5565 /* If it isn't always lock free, don't generate a result. */
5566 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == integer_one_node)
5567 return integer_one_node;
5568
5569 return NULL_TREE;
5570 }
5571
5572 /* Return true if the parameters to call EXP represent an object which will
5573 always generate lock free instructions. The first argument represents the
5574 size of the object, and the second parameter is a pointer to the object
5575 itself. If NULL is passed for the object, then the result is based on
5576 typical alignment for an object of the specified size. Otherwise return
5577 NULL*/
5578
5579 static rtx
5580 expand_builtin_atomic_is_lock_free (tree exp)
5581 {
5582 tree size;
5583 tree arg0 = CALL_EXPR_ARG (exp, 0);
5584 tree arg1 = CALL_EXPR_ARG (exp, 1);
5585
5586 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5587 {
5588 error ("non-integer argument 1 to __atomic_is_lock_free");
5589 return NULL_RTX;
5590 }
5591
5592 if (!flag_inline_atomics)
5593 return NULL_RTX;
5594
5595 /* If the value is known at compile time, return the RTX for it. */
5596 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5597 if (size == integer_one_node)
5598 return const1_rtx;
5599
5600 return NULL_RTX;
5601 }
5602
5603 /* This routine will either emit the mem_thread_fence pattern or issue a
5604 sync_synchronize to generate a fence for memory model MEMMODEL. */
5605
5606 #ifndef HAVE_mem_thread_fence
5607 # define HAVE_mem_thread_fence 0
5608 # define gen_mem_thread_fence(x) (gcc_unreachable (), NULL_RTX)
5609 #endif
5610
5611 void
5612 expand_builtin_mem_thread_fence (enum memmodel model)
5613 {
5614 if (HAVE_mem_thread_fence)
5615 emit_insn (gen_mem_thread_fence (GEN_INT (model)));
5616 else if (model != MEMMODEL_RELAXED)
5617 expand_builtin_sync_synchronize ();
5618 }
5619
5620 /* Expand the __atomic_thread_fence intrinsic:
5621 void __atomic_thread_fence (enum memmodel)
5622 EXP is the CALL_EXPR. */
5623
5624 static void
5625 expand_builtin_atomic_thread_fence (tree exp)
5626 {
5627 enum memmodel model;
5628
5629 model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5630 expand_builtin_mem_thread_fence (model);
5631 }
5632
5633 /* This routine will either emit the mem_signal_fence pattern or issue a
5634 sync_synchronize to generate a fence for memory model MEMMODEL. */
5635
5636 #ifndef HAVE_mem_signal_fence
5637 # define HAVE_mem_signal_fence 0
5638 # define gen_mem_signal_fence(x) (gcc_unreachable (), NULL_RTX)
5639 #endif
5640
5641 static void
5642 expand_builtin_mem_signal_fence (enum memmodel model)
5643 {
5644 if (HAVE_mem_signal_fence)
5645 emit_insn (gen_mem_signal_fence (GEN_INT (model)));
5646 else if (model != MEMMODEL_RELAXED)
5647 {
5648 rtx asm_op, clob;
5649
5650 /* By default targets are coherent between a thread and the signal
5651 handler running on the same thread. Thus this really becomes a
5652 compiler barrier, in that stores must not be sunk past
5653 (or raised above) a given point. */
5654
5655 /* Generate asm volatile("" : : : "memory") as the memory barrier. */
5656 asm_op = gen_rtx_ASM_OPERANDS (VOIDmode, empty_string, empty_string, 0,
5657 rtvec_alloc (0), rtvec_alloc (0),
5658 rtvec_alloc (0), UNKNOWN_LOCATION);
5659 MEM_VOLATILE_P (asm_op) = 1;
5660
5661 clob = gen_rtx_SCRATCH (VOIDmode);
5662 clob = gen_rtx_MEM (BLKmode, clob);
5663 clob = gen_rtx_CLOBBER (VOIDmode, clob);
5664
5665 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, asm_op, clob)));
5666 }
5667 }
5668
5669 /* Expand the __atomic_signal_fence intrinsic:
5670 void __atomic_signal_fence (enum memmodel)
5671 EXP is the CALL_EXPR. */
5672
5673 static void
5674 expand_builtin_atomic_signal_fence (tree exp)
5675 {
5676 enum memmodel model;
5677
5678 model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5679 expand_builtin_mem_signal_fence (model);
5680 }
5681
5682 /* Expand the __sync_synchronize intrinsic. */
5683
5684 static void
5685 expand_builtin_sync_synchronize (void)
5686 {
5687 gimple x;
5688 VEC (tree, gc) *v_clobbers;
5689
5690 #ifdef HAVE_memory_barrier
5691 if (HAVE_memory_barrier)
5692 {
5693 emit_insn (gen_memory_barrier ());
5694 return;
5695 }
5696 #endif
5697
5698 if (synchronize_libfunc != NULL_RTX)
5699 {
5700 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5701 return;
5702 }
5703
5704 /* If no explicit memory barrier instruction is available, create an
5705 empty asm stmt with a memory clobber. */
5706 v_clobbers = VEC_alloc (tree, gc, 1);
5707 VEC_quick_push (tree, v_clobbers,
5708 tree_cons (NULL, build_string (6, "memory"), NULL));
5709 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5710 gimple_asm_set_volatile (x, true);
5711 expand_asm_stmt (x);
5712 }
5713
5714 \f
5715 /* Expand an expression EXP that calls a built-in function,
5716 with result going to TARGET if that's convenient
5717 (and in mode MODE if that's convenient).
5718 SUBTARGET may be used as the target for computing one of EXP's operands.
5719 IGNORE is nonzero if the value is to be ignored. */
5720
5721 rtx
5722 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5723 int ignore)
5724 {
5725 tree fndecl = get_callee_fndecl (exp);
5726 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5727 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5728 int flags;
5729
5730 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5731 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5732
5733 /* When not optimizing, generate calls to library functions for a certain
5734 set of builtins. */
5735 if (!optimize
5736 && !called_as_built_in (fndecl)
5737 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5738 && fcode != BUILT_IN_ALLOCA
5739 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5740 && fcode != BUILT_IN_FREE)
5741 return expand_call (exp, target, ignore);
5742
5743 /* The built-in function expanders test for target == const0_rtx
5744 to determine whether the function's result will be ignored. */
5745 if (ignore)
5746 target = const0_rtx;
5747
5748 /* If the result of a pure or const built-in function is ignored, and
5749 none of its arguments are volatile, we can avoid expanding the
5750 built-in call and just evaluate the arguments for side-effects. */
5751 if (target == const0_rtx
5752 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5753 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5754 {
5755 bool volatilep = false;
5756 tree arg;
5757 call_expr_arg_iterator iter;
5758
5759 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5760 if (TREE_THIS_VOLATILE (arg))
5761 {
5762 volatilep = true;
5763 break;
5764 }
5765
5766 if (! volatilep)
5767 {
5768 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5769 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5770 return const0_rtx;
5771 }
5772 }
5773
5774 switch (fcode)
5775 {
5776 CASE_FLT_FN (BUILT_IN_FABS):
5777 target = expand_builtin_fabs (exp, target, subtarget);
5778 if (target)
5779 return target;
5780 break;
5781
5782 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5783 target = expand_builtin_copysign (exp, target, subtarget);
5784 if (target)
5785 return target;
5786 break;
5787
5788 /* Just do a normal library call if we were unable to fold
5789 the values. */
5790 CASE_FLT_FN (BUILT_IN_CABS):
5791 break;
5792
5793 CASE_FLT_FN (BUILT_IN_EXP):
5794 CASE_FLT_FN (BUILT_IN_EXP10):
5795 CASE_FLT_FN (BUILT_IN_POW10):
5796 CASE_FLT_FN (BUILT_IN_EXP2):
5797 CASE_FLT_FN (BUILT_IN_EXPM1):
5798 CASE_FLT_FN (BUILT_IN_LOGB):
5799 CASE_FLT_FN (BUILT_IN_LOG):
5800 CASE_FLT_FN (BUILT_IN_LOG10):
5801 CASE_FLT_FN (BUILT_IN_LOG2):
5802 CASE_FLT_FN (BUILT_IN_LOG1P):
5803 CASE_FLT_FN (BUILT_IN_TAN):
5804 CASE_FLT_FN (BUILT_IN_ASIN):
5805 CASE_FLT_FN (BUILT_IN_ACOS):
5806 CASE_FLT_FN (BUILT_IN_ATAN):
5807 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5808 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5809 because of possible accuracy problems. */
5810 if (! flag_unsafe_math_optimizations)
5811 break;
5812 CASE_FLT_FN (BUILT_IN_SQRT):
5813 CASE_FLT_FN (BUILT_IN_FLOOR):
5814 CASE_FLT_FN (BUILT_IN_CEIL):
5815 CASE_FLT_FN (BUILT_IN_TRUNC):
5816 CASE_FLT_FN (BUILT_IN_ROUND):
5817 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5818 CASE_FLT_FN (BUILT_IN_RINT):
5819 target = expand_builtin_mathfn (exp, target, subtarget);
5820 if (target)
5821 return target;
5822 break;
5823
5824 CASE_FLT_FN (BUILT_IN_FMA):
5825 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5826 if (target)
5827 return target;
5828 break;
5829
5830 CASE_FLT_FN (BUILT_IN_ILOGB):
5831 if (! flag_unsafe_math_optimizations)
5832 break;
5833 CASE_FLT_FN (BUILT_IN_ISINF):
5834 CASE_FLT_FN (BUILT_IN_FINITE):
5835 case BUILT_IN_ISFINITE:
5836 case BUILT_IN_ISNORMAL:
5837 target = expand_builtin_interclass_mathfn (exp, target);
5838 if (target)
5839 return target;
5840 break;
5841
5842 CASE_FLT_FN (BUILT_IN_ICEIL):
5843 CASE_FLT_FN (BUILT_IN_LCEIL):
5844 CASE_FLT_FN (BUILT_IN_LLCEIL):
5845 CASE_FLT_FN (BUILT_IN_LFLOOR):
5846 CASE_FLT_FN (BUILT_IN_IFLOOR):
5847 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5848 target = expand_builtin_int_roundingfn (exp, target);
5849 if (target)
5850 return target;
5851 break;
5852
5853 CASE_FLT_FN (BUILT_IN_IRINT):
5854 CASE_FLT_FN (BUILT_IN_LRINT):
5855 CASE_FLT_FN (BUILT_IN_LLRINT):
5856 CASE_FLT_FN (BUILT_IN_IROUND):
5857 CASE_FLT_FN (BUILT_IN_LROUND):
5858 CASE_FLT_FN (BUILT_IN_LLROUND):
5859 target = expand_builtin_int_roundingfn_2 (exp, target);
5860 if (target)
5861 return target;
5862 break;
5863
5864 CASE_FLT_FN (BUILT_IN_POWI):
5865 target = expand_builtin_powi (exp, target);
5866 if (target)
5867 return target;
5868 break;
5869
5870 CASE_FLT_FN (BUILT_IN_ATAN2):
5871 CASE_FLT_FN (BUILT_IN_LDEXP):
5872 CASE_FLT_FN (BUILT_IN_SCALB):
5873 CASE_FLT_FN (BUILT_IN_SCALBN):
5874 CASE_FLT_FN (BUILT_IN_SCALBLN):
5875 if (! flag_unsafe_math_optimizations)
5876 break;
5877
5878 CASE_FLT_FN (BUILT_IN_FMOD):
5879 CASE_FLT_FN (BUILT_IN_REMAINDER):
5880 CASE_FLT_FN (BUILT_IN_DREM):
5881 CASE_FLT_FN (BUILT_IN_POW):
5882 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5883 if (target)
5884 return target;
5885 break;
5886
5887 CASE_FLT_FN (BUILT_IN_CEXPI):
5888 target = expand_builtin_cexpi (exp, target);
5889 gcc_assert (target);
5890 return target;
5891
5892 CASE_FLT_FN (BUILT_IN_SIN):
5893 CASE_FLT_FN (BUILT_IN_COS):
5894 if (! flag_unsafe_math_optimizations)
5895 break;
5896 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5897 if (target)
5898 return target;
5899 break;
5900
5901 CASE_FLT_FN (BUILT_IN_SINCOS):
5902 if (! flag_unsafe_math_optimizations)
5903 break;
5904 target = expand_builtin_sincos (exp);
5905 if (target)
5906 return target;
5907 break;
5908
5909 case BUILT_IN_APPLY_ARGS:
5910 return expand_builtin_apply_args ();
5911
5912 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5913 FUNCTION with a copy of the parameters described by
5914 ARGUMENTS, and ARGSIZE. It returns a block of memory
5915 allocated on the stack into which is stored all the registers
5916 that might possibly be used for returning the result of a
5917 function. ARGUMENTS is the value returned by
5918 __builtin_apply_args. ARGSIZE is the number of bytes of
5919 arguments that must be copied. ??? How should this value be
5920 computed? We'll also need a safe worst case value for varargs
5921 functions. */
5922 case BUILT_IN_APPLY:
5923 if (!validate_arglist (exp, POINTER_TYPE,
5924 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5925 && !validate_arglist (exp, REFERENCE_TYPE,
5926 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5927 return const0_rtx;
5928 else
5929 {
5930 rtx ops[3];
5931
5932 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5933 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5934 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5935
5936 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5937 }
5938
5939 /* __builtin_return (RESULT) causes the function to return the
5940 value described by RESULT. RESULT is address of the block of
5941 memory returned by __builtin_apply. */
5942 case BUILT_IN_RETURN:
5943 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5944 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5945 return const0_rtx;
5946
5947 case BUILT_IN_SAVEREGS:
5948 return expand_builtin_saveregs ();
5949
5950 case BUILT_IN_VA_ARG_PACK:
5951 /* All valid uses of __builtin_va_arg_pack () are removed during
5952 inlining. */
5953 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5954 return const0_rtx;
5955
5956 case BUILT_IN_VA_ARG_PACK_LEN:
5957 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5958 inlining. */
5959 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5960 return const0_rtx;
5961
5962 /* Return the address of the first anonymous stack arg. */
5963 case BUILT_IN_NEXT_ARG:
5964 if (fold_builtin_next_arg (exp, false))
5965 return const0_rtx;
5966 return expand_builtin_next_arg ();
5967
5968 case BUILT_IN_CLEAR_CACHE:
5969 target = expand_builtin___clear_cache (exp);
5970 if (target)
5971 return target;
5972 break;
5973
5974 case BUILT_IN_CLASSIFY_TYPE:
5975 return expand_builtin_classify_type (exp);
5976
5977 case BUILT_IN_CONSTANT_P:
5978 return const0_rtx;
5979
5980 case BUILT_IN_FRAME_ADDRESS:
5981 case BUILT_IN_RETURN_ADDRESS:
5982 return expand_builtin_frame_address (fndecl, exp);
5983
5984 /* Returns the address of the area where the structure is returned.
5985 0 otherwise. */
5986 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5987 if (call_expr_nargs (exp) != 0
5988 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5989 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5990 return const0_rtx;
5991 else
5992 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5993
5994 case BUILT_IN_ALLOCA:
5995 case BUILT_IN_ALLOCA_WITH_ALIGN:
5996 /* If the allocation stems from the declaration of a variable-sized
5997 object, it cannot accumulate. */
5998 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
5999 if (target)
6000 return target;
6001 break;
6002
6003 case BUILT_IN_STACK_SAVE:
6004 return expand_stack_save ();
6005
6006 case BUILT_IN_STACK_RESTORE:
6007 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6008 return const0_rtx;
6009
6010 case BUILT_IN_BSWAP32:
6011 case BUILT_IN_BSWAP64:
6012 target = expand_builtin_bswap (exp, target, subtarget);
6013
6014 if (target)
6015 return target;
6016 break;
6017
6018 CASE_INT_FN (BUILT_IN_FFS):
6019 case BUILT_IN_FFSIMAX:
6020 target = expand_builtin_unop (target_mode, exp, target,
6021 subtarget, ffs_optab);
6022 if (target)
6023 return target;
6024 break;
6025
6026 CASE_INT_FN (BUILT_IN_CLZ):
6027 case BUILT_IN_CLZIMAX:
6028 target = expand_builtin_unop (target_mode, exp, target,
6029 subtarget, clz_optab);
6030 if (target)
6031 return target;
6032 break;
6033
6034 CASE_INT_FN (BUILT_IN_CTZ):
6035 case BUILT_IN_CTZIMAX:
6036 target = expand_builtin_unop (target_mode, exp, target,
6037 subtarget, ctz_optab);
6038 if (target)
6039 return target;
6040 break;
6041
6042 CASE_INT_FN (BUILT_IN_CLRSB):
6043 case BUILT_IN_CLRSBIMAX:
6044 target = expand_builtin_unop (target_mode, exp, target,
6045 subtarget, clrsb_optab);
6046 if (target)
6047 return target;
6048 break;
6049
6050 CASE_INT_FN (BUILT_IN_POPCOUNT):
6051 case BUILT_IN_POPCOUNTIMAX:
6052 target = expand_builtin_unop (target_mode, exp, target,
6053 subtarget, popcount_optab);
6054 if (target)
6055 return target;
6056 break;
6057
6058 CASE_INT_FN (BUILT_IN_PARITY):
6059 case BUILT_IN_PARITYIMAX:
6060 target = expand_builtin_unop (target_mode, exp, target,
6061 subtarget, parity_optab);
6062 if (target)
6063 return target;
6064 break;
6065
6066 case BUILT_IN_STRLEN:
6067 target = expand_builtin_strlen (exp, target, target_mode);
6068 if (target)
6069 return target;
6070 break;
6071
6072 case BUILT_IN_STRCPY:
6073 target = expand_builtin_strcpy (exp, target);
6074 if (target)
6075 return target;
6076 break;
6077
6078 case BUILT_IN_STRNCPY:
6079 target = expand_builtin_strncpy (exp, target);
6080 if (target)
6081 return target;
6082 break;
6083
6084 case BUILT_IN_STPCPY:
6085 target = expand_builtin_stpcpy (exp, target, mode);
6086 if (target)
6087 return target;
6088 break;
6089
6090 case BUILT_IN_MEMCPY:
6091 target = expand_builtin_memcpy (exp, target);
6092 if (target)
6093 return target;
6094 break;
6095
6096 case BUILT_IN_MEMPCPY:
6097 target = expand_builtin_mempcpy (exp, target, mode);
6098 if (target)
6099 return target;
6100 break;
6101
6102 case BUILT_IN_MEMSET:
6103 target = expand_builtin_memset (exp, target, mode);
6104 if (target)
6105 return target;
6106 break;
6107
6108 case BUILT_IN_BZERO:
6109 target = expand_builtin_bzero (exp);
6110 if (target)
6111 return target;
6112 break;
6113
6114 case BUILT_IN_STRCMP:
6115 target = expand_builtin_strcmp (exp, target);
6116 if (target)
6117 return target;
6118 break;
6119
6120 case BUILT_IN_STRNCMP:
6121 target = expand_builtin_strncmp (exp, target, mode);
6122 if (target)
6123 return target;
6124 break;
6125
6126 case BUILT_IN_BCMP:
6127 case BUILT_IN_MEMCMP:
6128 target = expand_builtin_memcmp (exp, target, mode);
6129 if (target)
6130 return target;
6131 break;
6132
6133 case BUILT_IN_SETJMP:
6134 /* This should have been lowered to the builtins below. */
6135 gcc_unreachable ();
6136
6137 case BUILT_IN_SETJMP_SETUP:
6138 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6139 and the receiver label. */
6140 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6141 {
6142 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6143 VOIDmode, EXPAND_NORMAL);
6144 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6145 rtx label_r = label_rtx (label);
6146
6147 /* This is copied from the handling of non-local gotos. */
6148 expand_builtin_setjmp_setup (buf_addr, label_r);
6149 nonlocal_goto_handler_labels
6150 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6151 nonlocal_goto_handler_labels);
6152 /* ??? Do not let expand_label treat us as such since we would
6153 not want to be both on the list of non-local labels and on
6154 the list of forced labels. */
6155 FORCED_LABEL (label) = 0;
6156 return const0_rtx;
6157 }
6158 break;
6159
6160 case BUILT_IN_SETJMP_DISPATCHER:
6161 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6162 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6163 {
6164 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6165 rtx label_r = label_rtx (label);
6166
6167 /* Remove the dispatcher label from the list of non-local labels
6168 since the receiver labels have been added to it above. */
6169 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6170 return const0_rtx;
6171 }
6172 break;
6173
6174 case BUILT_IN_SETJMP_RECEIVER:
6175 /* __builtin_setjmp_receiver is passed the receiver label. */
6176 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6177 {
6178 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6179 rtx label_r = label_rtx (label);
6180
6181 expand_builtin_setjmp_receiver (label_r);
6182 return const0_rtx;
6183 }
6184 break;
6185
6186 /* __builtin_longjmp is passed a pointer to an array of five words.
6187 It's similar to the C library longjmp function but works with
6188 __builtin_setjmp above. */
6189 case BUILT_IN_LONGJMP:
6190 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6191 {
6192 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6193 VOIDmode, EXPAND_NORMAL);
6194 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6195
6196 if (value != const1_rtx)
6197 {
6198 error ("%<__builtin_longjmp%> second argument must be 1");
6199 return const0_rtx;
6200 }
6201
6202 expand_builtin_longjmp (buf_addr, value);
6203 return const0_rtx;
6204 }
6205 break;
6206
6207 case BUILT_IN_NONLOCAL_GOTO:
6208 target = expand_builtin_nonlocal_goto (exp);
6209 if (target)
6210 return target;
6211 break;
6212
6213 /* This updates the setjmp buffer that is its argument with the value
6214 of the current stack pointer. */
6215 case BUILT_IN_UPDATE_SETJMP_BUF:
6216 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6217 {
6218 rtx buf_addr
6219 = expand_normal (CALL_EXPR_ARG (exp, 0));
6220
6221 expand_builtin_update_setjmp_buf (buf_addr);
6222 return const0_rtx;
6223 }
6224 break;
6225
6226 case BUILT_IN_TRAP:
6227 expand_builtin_trap ();
6228 return const0_rtx;
6229
6230 case BUILT_IN_UNREACHABLE:
6231 expand_builtin_unreachable ();
6232 return const0_rtx;
6233
6234 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6235 case BUILT_IN_SIGNBITD32:
6236 case BUILT_IN_SIGNBITD64:
6237 case BUILT_IN_SIGNBITD128:
6238 target = expand_builtin_signbit (exp, target);
6239 if (target)
6240 return target;
6241 break;
6242
6243 /* Various hooks for the DWARF 2 __throw routine. */
6244 case BUILT_IN_UNWIND_INIT:
6245 expand_builtin_unwind_init ();
6246 return const0_rtx;
6247 case BUILT_IN_DWARF_CFA:
6248 return virtual_cfa_rtx;
6249 #ifdef DWARF2_UNWIND_INFO
6250 case BUILT_IN_DWARF_SP_COLUMN:
6251 return expand_builtin_dwarf_sp_column ();
6252 case BUILT_IN_INIT_DWARF_REG_SIZES:
6253 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6254 return const0_rtx;
6255 #endif
6256 case BUILT_IN_FROB_RETURN_ADDR:
6257 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6258 case BUILT_IN_EXTRACT_RETURN_ADDR:
6259 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6260 case BUILT_IN_EH_RETURN:
6261 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6262 CALL_EXPR_ARG (exp, 1));
6263 return const0_rtx;
6264 #ifdef EH_RETURN_DATA_REGNO
6265 case BUILT_IN_EH_RETURN_DATA_REGNO:
6266 return expand_builtin_eh_return_data_regno (exp);
6267 #endif
6268 case BUILT_IN_EXTEND_POINTER:
6269 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6270 case BUILT_IN_EH_POINTER:
6271 return expand_builtin_eh_pointer (exp);
6272 case BUILT_IN_EH_FILTER:
6273 return expand_builtin_eh_filter (exp);
6274 case BUILT_IN_EH_COPY_VALUES:
6275 return expand_builtin_eh_copy_values (exp);
6276
6277 case BUILT_IN_VA_START:
6278 return expand_builtin_va_start (exp);
6279 case BUILT_IN_VA_END:
6280 return expand_builtin_va_end (exp);
6281 case BUILT_IN_VA_COPY:
6282 return expand_builtin_va_copy (exp);
6283 case BUILT_IN_EXPECT:
6284 return expand_builtin_expect (exp, target);
6285 case BUILT_IN_ASSUME_ALIGNED:
6286 return expand_builtin_assume_aligned (exp, target);
6287 case BUILT_IN_PREFETCH:
6288 expand_builtin_prefetch (exp);
6289 return const0_rtx;
6290
6291 case BUILT_IN_INIT_TRAMPOLINE:
6292 return expand_builtin_init_trampoline (exp);
6293 case BUILT_IN_ADJUST_TRAMPOLINE:
6294 return expand_builtin_adjust_trampoline (exp);
6295
6296 case BUILT_IN_FORK:
6297 case BUILT_IN_EXECL:
6298 case BUILT_IN_EXECV:
6299 case BUILT_IN_EXECLP:
6300 case BUILT_IN_EXECLE:
6301 case BUILT_IN_EXECVP:
6302 case BUILT_IN_EXECVE:
6303 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6304 if (target)
6305 return target;
6306 break;
6307
6308 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6309 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6310 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6311 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6312 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6313 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6314 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6315 if (target)
6316 return target;
6317 break;
6318
6319 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6320 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6321 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6322 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6323 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6324 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6325 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6326 if (target)
6327 return target;
6328 break;
6329
6330 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6331 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6332 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6333 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6334 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6335 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6336 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6337 if (target)
6338 return target;
6339 break;
6340
6341 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6342 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6343 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6344 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6345 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6346 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6347 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6348 if (target)
6349 return target;
6350 break;
6351
6352 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6353 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6354 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6355 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6356 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6357 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6358 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6359 if (target)
6360 return target;
6361 break;
6362
6363 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6364 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6365 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6366 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6367 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6368 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6369 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6370 if (target)
6371 return target;
6372 break;
6373
6374 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6375 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6376 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6377 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6378 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6379 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6380 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6381 if (target)
6382 return target;
6383 break;
6384
6385 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6386 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6387 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6388 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6389 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6390 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6391 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6392 if (target)
6393 return target;
6394 break;
6395
6396 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6397 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6398 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6399 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6400 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6401 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6402 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6403 if (target)
6404 return target;
6405 break;
6406
6407 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6408 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6409 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6410 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6411 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6412 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6413 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6414 if (target)
6415 return target;
6416 break;
6417
6418 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6419 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6420 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6421 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6422 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6423 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6424 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6425 if (target)
6426 return target;
6427 break;
6428
6429 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6430 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6431 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6432 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6433 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6434 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6435 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6436 if (target)
6437 return target;
6438 break;
6439
6440 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6441 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6442 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6443 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6444 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6445 if (mode == VOIDmode)
6446 mode = TYPE_MODE (boolean_type_node);
6447 if (!target || !register_operand (target, mode))
6448 target = gen_reg_rtx (mode);
6449
6450 mode = get_builtin_sync_mode
6451 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6452 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6453 if (target)
6454 return target;
6455 break;
6456
6457 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6458 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6459 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6460 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6461 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6462 mode = get_builtin_sync_mode
6463 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6464 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6465 if (target)
6466 return target;
6467 break;
6468
6469 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6470 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6471 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6472 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6473 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6474 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6475 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6476 if (target)
6477 return target;
6478 break;
6479
6480 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6481 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6482 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6483 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6484 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6485 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6486 expand_builtin_sync_lock_release (mode, exp);
6487 return const0_rtx;
6488
6489 case BUILT_IN_SYNC_SYNCHRONIZE:
6490 expand_builtin_sync_synchronize ();
6491 return const0_rtx;
6492
6493 case BUILT_IN_ATOMIC_EXCHANGE_1:
6494 case BUILT_IN_ATOMIC_EXCHANGE_2:
6495 case BUILT_IN_ATOMIC_EXCHANGE_4:
6496 case BUILT_IN_ATOMIC_EXCHANGE_8:
6497 case BUILT_IN_ATOMIC_EXCHANGE_16:
6498 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6499 target = expand_builtin_atomic_exchange (mode, exp, target);
6500 if (target)
6501 return target;
6502 break;
6503
6504 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6505 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6506 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6507 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6508 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6509 mode =
6510 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6511 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6512 if (target)
6513 return target;
6514 break;
6515
6516 case BUILT_IN_ATOMIC_LOAD_1:
6517 case BUILT_IN_ATOMIC_LOAD_2:
6518 case BUILT_IN_ATOMIC_LOAD_4:
6519 case BUILT_IN_ATOMIC_LOAD_8:
6520 case BUILT_IN_ATOMIC_LOAD_16:
6521 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6522 target = expand_builtin_atomic_load (mode, exp, target);
6523 if (target)
6524 return target;
6525 break;
6526
6527 case BUILT_IN_ATOMIC_STORE_1:
6528 case BUILT_IN_ATOMIC_STORE_2:
6529 case BUILT_IN_ATOMIC_STORE_4:
6530 case BUILT_IN_ATOMIC_STORE_8:
6531 case BUILT_IN_ATOMIC_STORE_16:
6532 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6533 target = expand_builtin_atomic_store (mode, exp);
6534 if (target)
6535 return const0_rtx;
6536 break;
6537
6538 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6539 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6540 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6541 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6542 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6543 {
6544 enum built_in_function lib;
6545 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6546 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6547 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6548 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6549 ignore, lib);
6550 if (target)
6551 return target;
6552 break;
6553 }
6554 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6555 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6556 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6557 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6558 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6559 {
6560 enum built_in_function lib;
6561 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6562 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6563 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6564 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6565 ignore, lib);
6566 if (target)
6567 return target;
6568 break;
6569 }
6570 case BUILT_IN_ATOMIC_AND_FETCH_1:
6571 case BUILT_IN_ATOMIC_AND_FETCH_2:
6572 case BUILT_IN_ATOMIC_AND_FETCH_4:
6573 case BUILT_IN_ATOMIC_AND_FETCH_8:
6574 case BUILT_IN_ATOMIC_AND_FETCH_16:
6575 {
6576 enum built_in_function lib;
6577 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6578 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6579 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6580 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6581 ignore, lib);
6582 if (target)
6583 return target;
6584 break;
6585 }
6586 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6587 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6588 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6589 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6590 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6591 {
6592 enum built_in_function lib;
6593 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6594 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6595 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6596 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6597 ignore, lib);
6598 if (target)
6599 return target;
6600 break;
6601 }
6602 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6603 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6604 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6605 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6606 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6607 {
6608 enum built_in_function lib;
6609 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6610 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6611 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6612 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6613 ignore, lib);
6614 if (target)
6615 return target;
6616 break;
6617 }
6618 case BUILT_IN_ATOMIC_OR_FETCH_1:
6619 case BUILT_IN_ATOMIC_OR_FETCH_2:
6620 case BUILT_IN_ATOMIC_OR_FETCH_4:
6621 case BUILT_IN_ATOMIC_OR_FETCH_8:
6622 case BUILT_IN_ATOMIC_OR_FETCH_16:
6623 {
6624 enum built_in_function lib;
6625 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6626 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6627 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6628 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6629 ignore, lib);
6630 if (target)
6631 return target;
6632 break;
6633 }
6634 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6635 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6636 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6637 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6638 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6639 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6640 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6641 ignore, BUILT_IN_NONE);
6642 if (target)
6643 return target;
6644 break;
6645
6646 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6647 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6648 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6649 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6650 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6651 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6652 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6653 ignore, BUILT_IN_NONE);
6654 if (target)
6655 return target;
6656 break;
6657
6658 case BUILT_IN_ATOMIC_FETCH_AND_1:
6659 case BUILT_IN_ATOMIC_FETCH_AND_2:
6660 case BUILT_IN_ATOMIC_FETCH_AND_4:
6661 case BUILT_IN_ATOMIC_FETCH_AND_8:
6662 case BUILT_IN_ATOMIC_FETCH_AND_16:
6663 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6664 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6665 ignore, BUILT_IN_NONE);
6666 if (target)
6667 return target;
6668 break;
6669
6670 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6671 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6672 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6673 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6674 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6675 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6676 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6677 ignore, BUILT_IN_NONE);
6678 if (target)
6679 return target;
6680 break;
6681
6682 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6683 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6684 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6685 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6686 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6687 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6688 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6689 ignore, BUILT_IN_NONE);
6690 if (target)
6691 return target;
6692 break;
6693
6694 case BUILT_IN_ATOMIC_FETCH_OR_1:
6695 case BUILT_IN_ATOMIC_FETCH_OR_2:
6696 case BUILT_IN_ATOMIC_FETCH_OR_4:
6697 case BUILT_IN_ATOMIC_FETCH_OR_8:
6698 case BUILT_IN_ATOMIC_FETCH_OR_16:
6699 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6700 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6701 ignore, BUILT_IN_NONE);
6702 if (target)
6703 return target;
6704 break;
6705
6706 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6707 return expand_builtin_atomic_always_lock_free (exp);
6708
6709 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6710 target = expand_builtin_atomic_is_lock_free (exp);
6711 if (target)
6712 return target;
6713 break;
6714
6715 case BUILT_IN_ATOMIC_THREAD_FENCE:
6716 expand_builtin_atomic_thread_fence (exp);
6717 return const0_rtx;
6718
6719 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6720 expand_builtin_atomic_signal_fence (exp);
6721 return const0_rtx;
6722
6723 case BUILT_IN_OBJECT_SIZE:
6724 return expand_builtin_object_size (exp);
6725
6726 case BUILT_IN_MEMCPY_CHK:
6727 case BUILT_IN_MEMPCPY_CHK:
6728 case BUILT_IN_MEMMOVE_CHK:
6729 case BUILT_IN_MEMSET_CHK:
6730 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6731 if (target)
6732 return target;
6733 break;
6734
6735 case BUILT_IN_STRCPY_CHK:
6736 case BUILT_IN_STPCPY_CHK:
6737 case BUILT_IN_STRNCPY_CHK:
6738 case BUILT_IN_STRCAT_CHK:
6739 case BUILT_IN_STRNCAT_CHK:
6740 case BUILT_IN_SNPRINTF_CHK:
6741 case BUILT_IN_VSNPRINTF_CHK:
6742 maybe_emit_chk_warning (exp, fcode);
6743 break;
6744
6745 case BUILT_IN_SPRINTF_CHK:
6746 case BUILT_IN_VSPRINTF_CHK:
6747 maybe_emit_sprintf_chk_warning (exp, fcode);
6748 break;
6749
6750 case BUILT_IN_FREE:
6751 if (warn_free_nonheap_object)
6752 maybe_emit_free_warning (exp);
6753 break;
6754
6755 default: /* just do library call, if unknown builtin */
6756 break;
6757 }
6758
6759 /* The switch statement above can drop through to cause the function
6760 to be called normally. */
6761 return expand_call (exp, target, ignore);
6762 }
6763
6764 /* Determine whether a tree node represents a call to a built-in
6765 function. If the tree T is a call to a built-in function with
6766 the right number of arguments of the appropriate types, return
6767 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6768 Otherwise the return value is END_BUILTINS. */
6769
6770 enum built_in_function
6771 builtin_mathfn_code (const_tree t)
6772 {
6773 const_tree fndecl, arg, parmlist;
6774 const_tree argtype, parmtype;
6775 const_call_expr_arg_iterator iter;
6776
6777 if (TREE_CODE (t) != CALL_EXPR
6778 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6779 return END_BUILTINS;
6780
6781 fndecl = get_callee_fndecl (t);
6782 if (fndecl == NULL_TREE
6783 || TREE_CODE (fndecl) != FUNCTION_DECL
6784 || ! DECL_BUILT_IN (fndecl)
6785 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6786 return END_BUILTINS;
6787
6788 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6789 init_const_call_expr_arg_iterator (t, &iter);
6790 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6791 {
6792 /* If a function doesn't take a variable number of arguments,
6793 the last element in the list will have type `void'. */
6794 parmtype = TREE_VALUE (parmlist);
6795 if (VOID_TYPE_P (parmtype))
6796 {
6797 if (more_const_call_expr_args_p (&iter))
6798 return END_BUILTINS;
6799 return DECL_FUNCTION_CODE (fndecl);
6800 }
6801
6802 if (! more_const_call_expr_args_p (&iter))
6803 return END_BUILTINS;
6804
6805 arg = next_const_call_expr_arg (&iter);
6806 argtype = TREE_TYPE (arg);
6807
6808 if (SCALAR_FLOAT_TYPE_P (parmtype))
6809 {
6810 if (! SCALAR_FLOAT_TYPE_P (argtype))
6811 return END_BUILTINS;
6812 }
6813 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6814 {
6815 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6816 return END_BUILTINS;
6817 }
6818 else if (POINTER_TYPE_P (parmtype))
6819 {
6820 if (! POINTER_TYPE_P (argtype))
6821 return END_BUILTINS;
6822 }
6823 else if (INTEGRAL_TYPE_P (parmtype))
6824 {
6825 if (! INTEGRAL_TYPE_P (argtype))
6826 return END_BUILTINS;
6827 }
6828 else
6829 return END_BUILTINS;
6830 }
6831
6832 /* Variable-length argument list. */
6833 return DECL_FUNCTION_CODE (fndecl);
6834 }
6835
6836 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6837 evaluate to a constant. */
6838
6839 static tree
6840 fold_builtin_constant_p (tree arg)
6841 {
6842 /* We return 1 for a numeric type that's known to be a constant
6843 value at compile-time or for an aggregate type that's a
6844 literal constant. */
6845 STRIP_NOPS (arg);
6846
6847 /* If we know this is a constant, emit the constant of one. */
6848 if (CONSTANT_CLASS_P (arg)
6849 || (TREE_CODE (arg) == CONSTRUCTOR
6850 && TREE_CONSTANT (arg)))
6851 return integer_one_node;
6852 if (TREE_CODE (arg) == ADDR_EXPR)
6853 {
6854 tree op = TREE_OPERAND (arg, 0);
6855 if (TREE_CODE (op) == STRING_CST
6856 || (TREE_CODE (op) == ARRAY_REF
6857 && integer_zerop (TREE_OPERAND (op, 1))
6858 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6859 return integer_one_node;
6860 }
6861
6862 /* If this expression has side effects, show we don't know it to be a
6863 constant. Likewise if it's a pointer or aggregate type since in
6864 those case we only want literals, since those are only optimized
6865 when generating RTL, not later.
6866 And finally, if we are compiling an initializer, not code, we
6867 need to return a definite result now; there's not going to be any
6868 more optimization done. */
6869 if (TREE_SIDE_EFFECTS (arg)
6870 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6871 || POINTER_TYPE_P (TREE_TYPE (arg))
6872 || cfun == 0
6873 || folding_initializer)
6874 return integer_zero_node;
6875
6876 return NULL_TREE;
6877 }
6878
6879 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6880 return it as a truthvalue. */
6881
6882 static tree
6883 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6884 {
6885 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6886
6887 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6888 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6889 ret_type = TREE_TYPE (TREE_TYPE (fn));
6890 pred_type = TREE_VALUE (arg_types);
6891 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6892
6893 pred = fold_convert_loc (loc, pred_type, pred);
6894 expected = fold_convert_loc (loc, expected_type, expected);
6895 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6896
6897 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6898 build_int_cst (ret_type, 0));
6899 }
6900
6901 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6902 NULL_TREE if no simplification is possible. */
6903
6904 static tree
6905 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6906 {
6907 tree inner, fndecl, inner_arg0;
6908 enum tree_code code;
6909
6910 /* Distribute the expected value over short-circuiting operators.
6911 See through the cast from truthvalue_type_node to long. */
6912 inner_arg0 = arg0;
6913 while (TREE_CODE (inner_arg0) == NOP_EXPR
6914 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6915 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6916 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6917
6918 /* If this is a builtin_expect within a builtin_expect keep the
6919 inner one. See through a comparison against a constant. It
6920 might have been added to create a thruthvalue. */
6921 inner = inner_arg0;
6922
6923 if (COMPARISON_CLASS_P (inner)
6924 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6925 inner = TREE_OPERAND (inner, 0);
6926
6927 if (TREE_CODE (inner) == CALL_EXPR
6928 && (fndecl = get_callee_fndecl (inner))
6929 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6930 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6931 return arg0;
6932
6933 inner = inner_arg0;
6934 code = TREE_CODE (inner);
6935 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6936 {
6937 tree op0 = TREE_OPERAND (inner, 0);
6938 tree op1 = TREE_OPERAND (inner, 1);
6939
6940 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6941 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6942 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6943
6944 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6945 }
6946
6947 /* If the argument isn't invariant then there's nothing else we can do. */
6948 if (!TREE_CONSTANT (inner_arg0))
6949 return NULL_TREE;
6950
6951 /* If we expect that a comparison against the argument will fold to
6952 a constant return the constant. In practice, this means a true
6953 constant or the address of a non-weak symbol. */
6954 inner = inner_arg0;
6955 STRIP_NOPS (inner);
6956 if (TREE_CODE (inner) == ADDR_EXPR)
6957 {
6958 do
6959 {
6960 inner = TREE_OPERAND (inner, 0);
6961 }
6962 while (TREE_CODE (inner) == COMPONENT_REF
6963 || TREE_CODE (inner) == ARRAY_REF);
6964 if ((TREE_CODE (inner) == VAR_DECL
6965 || TREE_CODE (inner) == FUNCTION_DECL)
6966 && DECL_WEAK (inner))
6967 return NULL_TREE;
6968 }
6969
6970 /* Otherwise, ARG0 already has the proper type for the return value. */
6971 return arg0;
6972 }
6973
6974 /* Fold a call to __builtin_classify_type with argument ARG. */
6975
6976 static tree
6977 fold_builtin_classify_type (tree arg)
6978 {
6979 if (arg == 0)
6980 return build_int_cst (integer_type_node, no_type_class);
6981
6982 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
6983 }
6984
6985 /* Fold a call to __builtin_strlen with argument ARG. */
6986
6987 static tree
6988 fold_builtin_strlen (location_t loc, tree type, tree arg)
6989 {
6990 if (!validate_arg (arg, POINTER_TYPE))
6991 return NULL_TREE;
6992 else
6993 {
6994 tree len = c_strlen (arg, 0);
6995
6996 if (len)
6997 return fold_convert_loc (loc, type, len);
6998
6999 return NULL_TREE;
7000 }
7001 }
7002
7003 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7004
7005 static tree
7006 fold_builtin_inf (location_t loc, tree type, int warn)
7007 {
7008 REAL_VALUE_TYPE real;
7009
7010 /* __builtin_inff is intended to be usable to define INFINITY on all
7011 targets. If an infinity is not available, INFINITY expands "to a
7012 positive constant of type float that overflows at translation
7013 time", footnote "In this case, using INFINITY will violate the
7014 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7015 Thus we pedwarn to ensure this constraint violation is
7016 diagnosed. */
7017 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7018 pedwarn (loc, 0, "target format does not support infinity");
7019
7020 real_inf (&real);
7021 return build_real (type, real);
7022 }
7023
7024 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7025
7026 static tree
7027 fold_builtin_nan (tree arg, tree type, int quiet)
7028 {
7029 REAL_VALUE_TYPE real;
7030 const char *str;
7031
7032 if (!validate_arg (arg, POINTER_TYPE))
7033 return NULL_TREE;
7034 str = c_getstr (arg);
7035 if (!str)
7036 return NULL_TREE;
7037
7038 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7039 return NULL_TREE;
7040
7041 return build_real (type, real);
7042 }
7043
7044 /* Return true if the floating point expression T has an integer value.
7045 We also allow +Inf, -Inf and NaN to be considered integer values. */
7046
7047 static bool
7048 integer_valued_real_p (tree t)
7049 {
7050 switch (TREE_CODE (t))
7051 {
7052 case FLOAT_EXPR:
7053 return true;
7054
7055 case ABS_EXPR:
7056 case SAVE_EXPR:
7057 return integer_valued_real_p (TREE_OPERAND (t, 0));
7058
7059 case COMPOUND_EXPR:
7060 case MODIFY_EXPR:
7061 case BIND_EXPR:
7062 return integer_valued_real_p (TREE_OPERAND (t, 1));
7063
7064 case PLUS_EXPR:
7065 case MINUS_EXPR:
7066 case MULT_EXPR:
7067 case MIN_EXPR:
7068 case MAX_EXPR:
7069 return integer_valued_real_p (TREE_OPERAND (t, 0))
7070 && integer_valued_real_p (TREE_OPERAND (t, 1));
7071
7072 case COND_EXPR:
7073 return integer_valued_real_p (TREE_OPERAND (t, 1))
7074 && integer_valued_real_p (TREE_OPERAND (t, 2));
7075
7076 case REAL_CST:
7077 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7078
7079 case NOP_EXPR:
7080 {
7081 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7082 if (TREE_CODE (type) == INTEGER_TYPE)
7083 return true;
7084 if (TREE_CODE (type) == REAL_TYPE)
7085 return integer_valued_real_p (TREE_OPERAND (t, 0));
7086 break;
7087 }
7088
7089 case CALL_EXPR:
7090 switch (builtin_mathfn_code (t))
7091 {
7092 CASE_FLT_FN (BUILT_IN_CEIL):
7093 CASE_FLT_FN (BUILT_IN_FLOOR):
7094 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7095 CASE_FLT_FN (BUILT_IN_RINT):
7096 CASE_FLT_FN (BUILT_IN_ROUND):
7097 CASE_FLT_FN (BUILT_IN_TRUNC):
7098 return true;
7099
7100 CASE_FLT_FN (BUILT_IN_FMIN):
7101 CASE_FLT_FN (BUILT_IN_FMAX):
7102 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7103 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7104
7105 default:
7106 break;
7107 }
7108 break;
7109
7110 default:
7111 break;
7112 }
7113 return false;
7114 }
7115
7116 /* FNDECL is assumed to be a builtin where truncation can be propagated
7117 across (for instance floor((double)f) == (double)floorf (f).
7118 Do the transformation for a call with argument ARG. */
7119
7120 static tree
7121 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7122 {
7123 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7124
7125 if (!validate_arg (arg, REAL_TYPE))
7126 return NULL_TREE;
7127
7128 /* Integer rounding functions are idempotent. */
7129 if (fcode == builtin_mathfn_code (arg))
7130 return arg;
7131
7132 /* If argument is already integer valued, and we don't need to worry
7133 about setting errno, there's no need to perform rounding. */
7134 if (! flag_errno_math && integer_valued_real_p (arg))
7135 return arg;
7136
7137 if (optimize)
7138 {
7139 tree arg0 = strip_float_extensions (arg);
7140 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7141 tree newtype = TREE_TYPE (arg0);
7142 tree decl;
7143
7144 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7145 && (decl = mathfn_built_in (newtype, fcode)))
7146 return fold_convert_loc (loc, ftype,
7147 build_call_expr_loc (loc, decl, 1,
7148 fold_convert_loc (loc,
7149 newtype,
7150 arg0)));
7151 }
7152 return NULL_TREE;
7153 }
7154
7155 /* FNDECL is assumed to be builtin which can narrow the FP type of
7156 the argument, for instance lround((double)f) -> lroundf (f).
7157 Do the transformation for a call with argument ARG. */
7158
7159 static tree
7160 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7161 {
7162 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7163
7164 if (!validate_arg (arg, REAL_TYPE))
7165 return NULL_TREE;
7166
7167 /* If argument is already integer valued, and we don't need to worry
7168 about setting errno, there's no need to perform rounding. */
7169 if (! flag_errno_math && integer_valued_real_p (arg))
7170 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7171 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7172
7173 if (optimize)
7174 {
7175 tree ftype = TREE_TYPE (arg);
7176 tree arg0 = strip_float_extensions (arg);
7177 tree newtype = TREE_TYPE (arg0);
7178 tree decl;
7179
7180 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7181 && (decl = mathfn_built_in (newtype, fcode)))
7182 return build_call_expr_loc (loc, decl, 1,
7183 fold_convert_loc (loc, newtype, arg0));
7184 }
7185
7186 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7187 sizeof (int) == sizeof (long). */
7188 if (TYPE_PRECISION (integer_type_node)
7189 == TYPE_PRECISION (long_integer_type_node))
7190 {
7191 tree newfn = NULL_TREE;
7192 switch (fcode)
7193 {
7194 CASE_FLT_FN (BUILT_IN_ICEIL):
7195 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7196 break;
7197
7198 CASE_FLT_FN (BUILT_IN_IFLOOR):
7199 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7200 break;
7201
7202 CASE_FLT_FN (BUILT_IN_IROUND):
7203 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7204 break;
7205
7206 CASE_FLT_FN (BUILT_IN_IRINT):
7207 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7208 break;
7209
7210 default:
7211 break;
7212 }
7213
7214 if (newfn)
7215 {
7216 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7217 return fold_convert_loc (loc,
7218 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7219 }
7220 }
7221
7222 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7223 sizeof (long long) == sizeof (long). */
7224 if (TYPE_PRECISION (long_long_integer_type_node)
7225 == TYPE_PRECISION (long_integer_type_node))
7226 {
7227 tree newfn = NULL_TREE;
7228 switch (fcode)
7229 {
7230 CASE_FLT_FN (BUILT_IN_LLCEIL):
7231 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7232 break;
7233
7234 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7235 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7236 break;
7237
7238 CASE_FLT_FN (BUILT_IN_LLROUND):
7239 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7240 break;
7241
7242 CASE_FLT_FN (BUILT_IN_LLRINT):
7243 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7244 break;
7245
7246 default:
7247 break;
7248 }
7249
7250 if (newfn)
7251 {
7252 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7253 return fold_convert_loc (loc,
7254 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7255 }
7256 }
7257
7258 return NULL_TREE;
7259 }
7260
7261 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7262 return type. Return NULL_TREE if no simplification can be made. */
7263
7264 static tree
7265 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7266 {
7267 tree res;
7268
7269 if (!validate_arg (arg, COMPLEX_TYPE)
7270 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7271 return NULL_TREE;
7272
7273 /* Calculate the result when the argument is a constant. */
7274 if (TREE_CODE (arg) == COMPLEX_CST
7275 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7276 type, mpfr_hypot)))
7277 return res;
7278
7279 if (TREE_CODE (arg) == COMPLEX_EXPR)
7280 {
7281 tree real = TREE_OPERAND (arg, 0);
7282 tree imag = TREE_OPERAND (arg, 1);
7283
7284 /* If either part is zero, cabs is fabs of the other. */
7285 if (real_zerop (real))
7286 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7287 if (real_zerop (imag))
7288 return fold_build1_loc (loc, ABS_EXPR, type, real);
7289
7290 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7291 if (flag_unsafe_math_optimizations
7292 && operand_equal_p (real, imag, OEP_PURE_SAME))
7293 {
7294 const REAL_VALUE_TYPE sqrt2_trunc
7295 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7296 STRIP_NOPS (real);
7297 return fold_build2_loc (loc, MULT_EXPR, type,
7298 fold_build1_loc (loc, ABS_EXPR, type, real),
7299 build_real (type, sqrt2_trunc));
7300 }
7301 }
7302
7303 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7304 if (TREE_CODE (arg) == NEGATE_EXPR
7305 || TREE_CODE (arg) == CONJ_EXPR)
7306 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7307
7308 /* Don't do this when optimizing for size. */
7309 if (flag_unsafe_math_optimizations
7310 && optimize && optimize_function_for_speed_p (cfun))
7311 {
7312 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7313
7314 if (sqrtfn != NULL_TREE)
7315 {
7316 tree rpart, ipart, result;
7317
7318 arg = builtin_save_expr (arg);
7319
7320 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7321 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7322
7323 rpart = builtin_save_expr (rpart);
7324 ipart = builtin_save_expr (ipart);
7325
7326 result = fold_build2_loc (loc, PLUS_EXPR, type,
7327 fold_build2_loc (loc, MULT_EXPR, type,
7328 rpart, rpart),
7329 fold_build2_loc (loc, MULT_EXPR, type,
7330 ipart, ipart));
7331
7332 return build_call_expr_loc (loc, sqrtfn, 1, result);
7333 }
7334 }
7335
7336 return NULL_TREE;
7337 }
7338
7339 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7340 complex tree type of the result. If NEG is true, the imaginary
7341 zero is negative. */
7342
7343 static tree
7344 build_complex_cproj (tree type, bool neg)
7345 {
7346 REAL_VALUE_TYPE rinf, rzero = dconst0;
7347
7348 real_inf (&rinf);
7349 rzero.sign = neg;
7350 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7351 build_real (TREE_TYPE (type), rzero));
7352 }
7353
7354 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7355 return type. Return NULL_TREE if no simplification can be made. */
7356
7357 static tree
7358 fold_builtin_cproj (location_t loc, tree arg, tree type)
7359 {
7360 if (!validate_arg (arg, COMPLEX_TYPE)
7361 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7362 return NULL_TREE;
7363
7364 /* If there are no infinities, return arg. */
7365 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7366 return non_lvalue_loc (loc, arg);
7367
7368 /* Calculate the result when the argument is a constant. */
7369 if (TREE_CODE (arg) == COMPLEX_CST)
7370 {
7371 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7372 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7373
7374 if (real_isinf (real) || real_isinf (imag))
7375 return build_complex_cproj (type, imag->sign);
7376 else
7377 return arg;
7378 }
7379 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7380 {
7381 tree real = TREE_OPERAND (arg, 0);
7382 tree imag = TREE_OPERAND (arg, 1);
7383
7384 STRIP_NOPS (real);
7385 STRIP_NOPS (imag);
7386
7387 /* If the real part is inf and the imag part is known to be
7388 nonnegative, return (inf + 0i). Remember side-effects are
7389 possible in the imag part. */
7390 if (TREE_CODE (real) == REAL_CST
7391 && real_isinf (TREE_REAL_CST_PTR (real))
7392 && tree_expr_nonnegative_p (imag))
7393 return omit_one_operand_loc (loc, type,
7394 build_complex_cproj (type, false),
7395 arg);
7396
7397 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7398 Remember side-effects are possible in the real part. */
7399 if (TREE_CODE (imag) == REAL_CST
7400 && real_isinf (TREE_REAL_CST_PTR (imag)))
7401 return
7402 omit_one_operand_loc (loc, type,
7403 build_complex_cproj (type, TREE_REAL_CST_PTR
7404 (imag)->sign), arg);
7405 }
7406
7407 return NULL_TREE;
7408 }
7409
7410 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7411 Return NULL_TREE if no simplification can be made. */
7412
7413 static tree
7414 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7415 {
7416
7417 enum built_in_function fcode;
7418 tree res;
7419
7420 if (!validate_arg (arg, REAL_TYPE))
7421 return NULL_TREE;
7422
7423 /* Calculate the result when the argument is a constant. */
7424 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7425 return res;
7426
7427 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7428 fcode = builtin_mathfn_code (arg);
7429 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7430 {
7431 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7432 arg = fold_build2_loc (loc, MULT_EXPR, type,
7433 CALL_EXPR_ARG (arg, 0),
7434 build_real (type, dconsthalf));
7435 return build_call_expr_loc (loc, expfn, 1, arg);
7436 }
7437
7438 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7439 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7440 {
7441 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7442
7443 if (powfn)
7444 {
7445 tree arg0 = CALL_EXPR_ARG (arg, 0);
7446 tree tree_root;
7447 /* The inner root was either sqrt or cbrt. */
7448 /* This was a conditional expression but it triggered a bug
7449 in Sun C 5.5. */
7450 REAL_VALUE_TYPE dconstroot;
7451 if (BUILTIN_SQRT_P (fcode))
7452 dconstroot = dconsthalf;
7453 else
7454 dconstroot = dconst_third ();
7455
7456 /* Adjust for the outer root. */
7457 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7458 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7459 tree_root = build_real (type, dconstroot);
7460 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7461 }
7462 }
7463
7464 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7465 if (flag_unsafe_math_optimizations
7466 && (fcode == BUILT_IN_POW
7467 || fcode == BUILT_IN_POWF
7468 || fcode == BUILT_IN_POWL))
7469 {
7470 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7471 tree arg0 = CALL_EXPR_ARG (arg, 0);
7472 tree arg1 = CALL_EXPR_ARG (arg, 1);
7473 tree narg1;
7474 if (!tree_expr_nonnegative_p (arg0))
7475 arg0 = build1 (ABS_EXPR, type, arg0);
7476 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7477 build_real (type, dconsthalf));
7478 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7479 }
7480
7481 return NULL_TREE;
7482 }
7483
7484 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7485 Return NULL_TREE if no simplification can be made. */
7486
7487 static tree
7488 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7489 {
7490 const enum built_in_function fcode = builtin_mathfn_code (arg);
7491 tree res;
7492
7493 if (!validate_arg (arg, REAL_TYPE))
7494 return NULL_TREE;
7495
7496 /* Calculate the result when the argument is a constant. */
7497 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7498 return res;
7499
7500 if (flag_unsafe_math_optimizations)
7501 {
7502 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7503 if (BUILTIN_EXPONENT_P (fcode))
7504 {
7505 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7506 const REAL_VALUE_TYPE third_trunc =
7507 real_value_truncate (TYPE_MODE (type), dconst_third ());
7508 arg = fold_build2_loc (loc, MULT_EXPR, type,
7509 CALL_EXPR_ARG (arg, 0),
7510 build_real (type, third_trunc));
7511 return build_call_expr_loc (loc, expfn, 1, arg);
7512 }
7513
7514 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7515 if (BUILTIN_SQRT_P (fcode))
7516 {
7517 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7518
7519 if (powfn)
7520 {
7521 tree arg0 = CALL_EXPR_ARG (arg, 0);
7522 tree tree_root;
7523 REAL_VALUE_TYPE dconstroot = dconst_third ();
7524
7525 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7526 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7527 tree_root = build_real (type, dconstroot);
7528 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7529 }
7530 }
7531
7532 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7533 if (BUILTIN_CBRT_P (fcode))
7534 {
7535 tree arg0 = CALL_EXPR_ARG (arg, 0);
7536 if (tree_expr_nonnegative_p (arg0))
7537 {
7538 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7539
7540 if (powfn)
7541 {
7542 tree tree_root;
7543 REAL_VALUE_TYPE dconstroot;
7544
7545 real_arithmetic (&dconstroot, MULT_EXPR,
7546 dconst_third_ptr (), dconst_third_ptr ());
7547 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7548 tree_root = build_real (type, dconstroot);
7549 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7550 }
7551 }
7552 }
7553
7554 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7555 if (fcode == BUILT_IN_POW
7556 || fcode == BUILT_IN_POWF
7557 || fcode == BUILT_IN_POWL)
7558 {
7559 tree arg00 = CALL_EXPR_ARG (arg, 0);
7560 tree arg01 = CALL_EXPR_ARG (arg, 1);
7561 if (tree_expr_nonnegative_p (arg00))
7562 {
7563 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7564 const REAL_VALUE_TYPE dconstroot
7565 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7566 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7567 build_real (type, dconstroot));
7568 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7569 }
7570 }
7571 }
7572 return NULL_TREE;
7573 }
7574
7575 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7576 TYPE is the type of the return value. Return NULL_TREE if no
7577 simplification can be made. */
7578
7579 static tree
7580 fold_builtin_cos (location_t loc,
7581 tree arg, tree type, tree fndecl)
7582 {
7583 tree res, narg;
7584
7585 if (!validate_arg (arg, REAL_TYPE))
7586 return NULL_TREE;
7587
7588 /* Calculate the result when the argument is a constant. */
7589 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7590 return res;
7591
7592 /* Optimize cos(-x) into cos (x). */
7593 if ((narg = fold_strip_sign_ops (arg)))
7594 return build_call_expr_loc (loc, fndecl, 1, narg);
7595
7596 return NULL_TREE;
7597 }
7598
7599 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7600 Return NULL_TREE if no simplification can be made. */
7601
7602 static tree
7603 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7604 {
7605 if (validate_arg (arg, REAL_TYPE))
7606 {
7607 tree res, narg;
7608
7609 /* Calculate the result when the argument is a constant. */
7610 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7611 return res;
7612
7613 /* Optimize cosh(-x) into cosh (x). */
7614 if ((narg = fold_strip_sign_ops (arg)))
7615 return build_call_expr_loc (loc, fndecl, 1, narg);
7616 }
7617
7618 return NULL_TREE;
7619 }
7620
7621 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7622 argument ARG. TYPE is the type of the return value. Return
7623 NULL_TREE if no simplification can be made. */
7624
7625 static tree
7626 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7627 bool hyper)
7628 {
7629 if (validate_arg (arg, COMPLEX_TYPE)
7630 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7631 {
7632 tree tmp;
7633
7634 /* Calculate the result when the argument is a constant. */
7635 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7636 return tmp;
7637
7638 /* Optimize fn(-x) into fn(x). */
7639 if ((tmp = fold_strip_sign_ops (arg)))
7640 return build_call_expr_loc (loc, fndecl, 1, tmp);
7641 }
7642
7643 return NULL_TREE;
7644 }
7645
7646 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7647 Return NULL_TREE if no simplification can be made. */
7648
7649 static tree
7650 fold_builtin_tan (tree arg, tree type)
7651 {
7652 enum built_in_function fcode;
7653 tree res;
7654
7655 if (!validate_arg (arg, REAL_TYPE))
7656 return NULL_TREE;
7657
7658 /* Calculate the result when the argument is a constant. */
7659 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7660 return res;
7661
7662 /* Optimize tan(atan(x)) = x. */
7663 fcode = builtin_mathfn_code (arg);
7664 if (flag_unsafe_math_optimizations
7665 && (fcode == BUILT_IN_ATAN
7666 || fcode == BUILT_IN_ATANF
7667 || fcode == BUILT_IN_ATANL))
7668 return CALL_EXPR_ARG (arg, 0);
7669
7670 return NULL_TREE;
7671 }
7672
7673 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7674 NULL_TREE if no simplification can be made. */
7675
7676 static tree
7677 fold_builtin_sincos (location_t loc,
7678 tree arg0, tree arg1, tree arg2)
7679 {
7680 tree type;
7681 tree res, fn, call;
7682
7683 if (!validate_arg (arg0, REAL_TYPE)
7684 || !validate_arg (arg1, POINTER_TYPE)
7685 || !validate_arg (arg2, POINTER_TYPE))
7686 return NULL_TREE;
7687
7688 type = TREE_TYPE (arg0);
7689
7690 /* Calculate the result when the argument is a constant. */
7691 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7692 return res;
7693
7694 /* Canonicalize sincos to cexpi. */
7695 if (!TARGET_C99_FUNCTIONS)
7696 return NULL_TREE;
7697 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7698 if (!fn)
7699 return NULL_TREE;
7700
7701 call = build_call_expr_loc (loc, fn, 1, arg0);
7702 call = builtin_save_expr (call);
7703
7704 return build2 (COMPOUND_EXPR, void_type_node,
7705 build2 (MODIFY_EXPR, void_type_node,
7706 build_fold_indirect_ref_loc (loc, arg1),
7707 build1 (IMAGPART_EXPR, type, call)),
7708 build2 (MODIFY_EXPR, void_type_node,
7709 build_fold_indirect_ref_loc (loc, arg2),
7710 build1 (REALPART_EXPR, type, call)));
7711 }
7712
7713 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7714 NULL_TREE if no simplification can be made. */
7715
7716 static tree
7717 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7718 {
7719 tree rtype;
7720 tree realp, imagp, ifn;
7721 tree res;
7722
7723 if (!validate_arg (arg0, COMPLEX_TYPE)
7724 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7725 return NULL_TREE;
7726
7727 /* Calculate the result when the argument is a constant. */
7728 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7729 return res;
7730
7731 rtype = TREE_TYPE (TREE_TYPE (arg0));
7732
7733 /* In case we can figure out the real part of arg0 and it is constant zero
7734 fold to cexpi. */
7735 if (!TARGET_C99_FUNCTIONS)
7736 return NULL_TREE;
7737 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7738 if (!ifn)
7739 return NULL_TREE;
7740
7741 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7742 && real_zerop (realp))
7743 {
7744 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7745 return build_call_expr_loc (loc, ifn, 1, narg);
7746 }
7747
7748 /* In case we can easily decompose real and imaginary parts split cexp
7749 to exp (r) * cexpi (i). */
7750 if (flag_unsafe_math_optimizations
7751 && realp)
7752 {
7753 tree rfn, rcall, icall;
7754
7755 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7756 if (!rfn)
7757 return NULL_TREE;
7758
7759 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7760 if (!imagp)
7761 return NULL_TREE;
7762
7763 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7764 icall = builtin_save_expr (icall);
7765 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7766 rcall = builtin_save_expr (rcall);
7767 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7768 fold_build2_loc (loc, MULT_EXPR, rtype,
7769 rcall,
7770 fold_build1_loc (loc, REALPART_EXPR,
7771 rtype, icall)),
7772 fold_build2_loc (loc, MULT_EXPR, rtype,
7773 rcall,
7774 fold_build1_loc (loc, IMAGPART_EXPR,
7775 rtype, icall)));
7776 }
7777
7778 return NULL_TREE;
7779 }
7780
7781 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7782 Return NULL_TREE if no simplification can be made. */
7783
7784 static tree
7785 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7786 {
7787 if (!validate_arg (arg, REAL_TYPE))
7788 return NULL_TREE;
7789
7790 /* Optimize trunc of constant value. */
7791 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7792 {
7793 REAL_VALUE_TYPE r, x;
7794 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7795
7796 x = TREE_REAL_CST (arg);
7797 real_trunc (&r, TYPE_MODE (type), &x);
7798 return build_real (type, r);
7799 }
7800
7801 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7802 }
7803
7804 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7805 Return NULL_TREE if no simplification can be made. */
7806
7807 static tree
7808 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7809 {
7810 if (!validate_arg (arg, REAL_TYPE))
7811 return NULL_TREE;
7812
7813 /* Optimize floor of constant value. */
7814 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7815 {
7816 REAL_VALUE_TYPE x;
7817
7818 x = TREE_REAL_CST (arg);
7819 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7820 {
7821 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7822 REAL_VALUE_TYPE r;
7823
7824 real_floor (&r, TYPE_MODE (type), &x);
7825 return build_real (type, r);
7826 }
7827 }
7828
7829 /* Fold floor (x) where x is nonnegative to trunc (x). */
7830 if (tree_expr_nonnegative_p (arg))
7831 {
7832 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7833 if (truncfn)
7834 return build_call_expr_loc (loc, truncfn, 1, arg);
7835 }
7836
7837 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7838 }
7839
7840 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7841 Return NULL_TREE if no simplification can be made. */
7842
7843 static tree
7844 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7845 {
7846 if (!validate_arg (arg, REAL_TYPE))
7847 return NULL_TREE;
7848
7849 /* Optimize ceil of constant value. */
7850 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7851 {
7852 REAL_VALUE_TYPE x;
7853
7854 x = TREE_REAL_CST (arg);
7855 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7856 {
7857 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7858 REAL_VALUE_TYPE r;
7859
7860 real_ceil (&r, TYPE_MODE (type), &x);
7861 return build_real (type, r);
7862 }
7863 }
7864
7865 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7866 }
7867
7868 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7869 Return NULL_TREE if no simplification can be made. */
7870
7871 static tree
7872 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7873 {
7874 if (!validate_arg (arg, REAL_TYPE))
7875 return NULL_TREE;
7876
7877 /* Optimize round of constant value. */
7878 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7879 {
7880 REAL_VALUE_TYPE x;
7881
7882 x = TREE_REAL_CST (arg);
7883 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7884 {
7885 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7886 REAL_VALUE_TYPE r;
7887
7888 real_round (&r, TYPE_MODE (type), &x);
7889 return build_real (type, r);
7890 }
7891 }
7892
7893 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7894 }
7895
7896 /* Fold function call to builtin lround, lroundf or lroundl (or the
7897 corresponding long long versions) and other rounding functions. ARG
7898 is the argument to the call. Return NULL_TREE if no simplification
7899 can be made. */
7900
7901 static tree
7902 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7903 {
7904 if (!validate_arg (arg, REAL_TYPE))
7905 return NULL_TREE;
7906
7907 /* Optimize lround of constant value. */
7908 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7909 {
7910 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7911
7912 if (real_isfinite (&x))
7913 {
7914 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7915 tree ftype = TREE_TYPE (arg);
7916 double_int val;
7917 REAL_VALUE_TYPE r;
7918
7919 switch (DECL_FUNCTION_CODE (fndecl))
7920 {
7921 CASE_FLT_FN (BUILT_IN_IFLOOR):
7922 CASE_FLT_FN (BUILT_IN_LFLOOR):
7923 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7924 real_floor (&r, TYPE_MODE (ftype), &x);
7925 break;
7926
7927 CASE_FLT_FN (BUILT_IN_ICEIL):
7928 CASE_FLT_FN (BUILT_IN_LCEIL):
7929 CASE_FLT_FN (BUILT_IN_LLCEIL):
7930 real_ceil (&r, TYPE_MODE (ftype), &x);
7931 break;
7932
7933 CASE_FLT_FN (BUILT_IN_IROUND):
7934 CASE_FLT_FN (BUILT_IN_LROUND):
7935 CASE_FLT_FN (BUILT_IN_LLROUND):
7936 real_round (&r, TYPE_MODE (ftype), &x);
7937 break;
7938
7939 default:
7940 gcc_unreachable ();
7941 }
7942
7943 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7944 if (double_int_fits_to_tree_p (itype, val))
7945 return double_int_to_tree (itype, val);
7946 }
7947 }
7948
7949 switch (DECL_FUNCTION_CODE (fndecl))
7950 {
7951 CASE_FLT_FN (BUILT_IN_LFLOOR):
7952 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7953 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7954 if (tree_expr_nonnegative_p (arg))
7955 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7956 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7957 break;
7958 default:;
7959 }
7960
7961 return fold_fixed_mathfn (loc, fndecl, arg);
7962 }
7963
7964 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7965 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7966 the argument to the call. Return NULL_TREE if no simplification can
7967 be made. */
7968
7969 static tree
7970 fold_builtin_bitop (tree fndecl, tree arg)
7971 {
7972 if (!validate_arg (arg, INTEGER_TYPE))
7973 return NULL_TREE;
7974
7975 /* Optimize for constant argument. */
7976 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7977 {
7978 HOST_WIDE_INT hi, width, result;
7979 unsigned HOST_WIDE_INT lo;
7980 tree type;
7981
7982 type = TREE_TYPE (arg);
7983 width = TYPE_PRECISION (type);
7984 lo = TREE_INT_CST_LOW (arg);
7985
7986 /* Clear all the bits that are beyond the type's precision. */
7987 if (width > HOST_BITS_PER_WIDE_INT)
7988 {
7989 hi = TREE_INT_CST_HIGH (arg);
7990 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7991 hi &= ~((unsigned HOST_WIDE_INT) (-1)
7992 << (width - HOST_BITS_PER_WIDE_INT));
7993 }
7994 else
7995 {
7996 hi = 0;
7997 if (width < HOST_BITS_PER_WIDE_INT)
7998 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7999 }
8000
8001 switch (DECL_FUNCTION_CODE (fndecl))
8002 {
8003 CASE_INT_FN (BUILT_IN_FFS):
8004 if (lo != 0)
8005 result = ffs_hwi (lo);
8006 else if (hi != 0)
8007 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
8008 else
8009 result = 0;
8010 break;
8011
8012 CASE_INT_FN (BUILT_IN_CLZ):
8013 if (hi != 0)
8014 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8015 else if (lo != 0)
8016 result = width - floor_log2 (lo) - 1;
8017 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8018 result = width;
8019 break;
8020
8021 CASE_INT_FN (BUILT_IN_CTZ):
8022 if (lo != 0)
8023 result = ctz_hwi (lo);
8024 else if (hi != 0)
8025 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
8026 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8027 result = width;
8028 break;
8029
8030 CASE_INT_FN (BUILT_IN_CLRSB):
8031 if (width > HOST_BITS_PER_WIDE_INT
8032 && (hi & ((unsigned HOST_WIDE_INT) 1
8033 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8034 {
8035 hi = ~hi & ~((unsigned HOST_WIDE_INT) (-1)
8036 << (width - HOST_BITS_PER_WIDE_INT - 1));
8037 lo = ~lo;
8038 }
8039 else if (width <= HOST_BITS_PER_WIDE_INT
8040 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8041 lo = ~lo & ~((unsigned HOST_WIDE_INT) (-1) << (width - 1));
8042 if (hi != 0)
8043 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8044 else if (lo != 0)
8045 result = width - floor_log2 (lo) - 2;
8046 else
8047 result = width - 1;
8048 break;
8049
8050 CASE_INT_FN (BUILT_IN_POPCOUNT):
8051 result = 0;
8052 while (lo)
8053 result++, lo &= lo - 1;
8054 while (hi)
8055 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8056 break;
8057
8058 CASE_INT_FN (BUILT_IN_PARITY):
8059 result = 0;
8060 while (lo)
8061 result++, lo &= lo - 1;
8062 while (hi)
8063 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8064 result &= 1;
8065 break;
8066
8067 default:
8068 gcc_unreachable ();
8069 }
8070
8071 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8072 }
8073
8074 return NULL_TREE;
8075 }
8076
8077 /* Fold function call to builtin_bswap and the long and long long
8078 variants. Return NULL_TREE if no simplification can be made. */
8079 static tree
8080 fold_builtin_bswap (tree fndecl, tree arg)
8081 {
8082 if (! validate_arg (arg, INTEGER_TYPE))
8083 return NULL_TREE;
8084
8085 /* Optimize constant value. */
8086 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8087 {
8088 HOST_WIDE_INT hi, width, r_hi = 0;
8089 unsigned HOST_WIDE_INT lo, r_lo = 0;
8090 tree type;
8091
8092 type = TREE_TYPE (arg);
8093 width = TYPE_PRECISION (type);
8094 lo = TREE_INT_CST_LOW (arg);
8095 hi = TREE_INT_CST_HIGH (arg);
8096
8097 switch (DECL_FUNCTION_CODE (fndecl))
8098 {
8099 case BUILT_IN_BSWAP32:
8100 case BUILT_IN_BSWAP64:
8101 {
8102 int s;
8103
8104 for (s = 0; s < width; s += 8)
8105 {
8106 int d = width - s - 8;
8107 unsigned HOST_WIDE_INT byte;
8108
8109 if (s < HOST_BITS_PER_WIDE_INT)
8110 byte = (lo >> s) & 0xff;
8111 else
8112 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8113
8114 if (d < HOST_BITS_PER_WIDE_INT)
8115 r_lo |= byte << d;
8116 else
8117 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8118 }
8119 }
8120
8121 break;
8122
8123 default:
8124 gcc_unreachable ();
8125 }
8126
8127 if (width < HOST_BITS_PER_WIDE_INT)
8128 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8129 else
8130 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8131 }
8132
8133 return NULL_TREE;
8134 }
8135
8136 /* A subroutine of fold_builtin to fold the various logarithmic
8137 functions. Return NULL_TREE if no simplification can me made.
8138 FUNC is the corresponding MPFR logarithm function. */
8139
8140 static tree
8141 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8142 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8143 {
8144 if (validate_arg (arg, REAL_TYPE))
8145 {
8146 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8147 tree res;
8148 const enum built_in_function fcode = builtin_mathfn_code (arg);
8149
8150 /* Calculate the result when the argument is a constant. */
8151 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8152 return res;
8153
8154 /* Special case, optimize logN(expN(x)) = x. */
8155 if (flag_unsafe_math_optimizations
8156 && ((func == mpfr_log
8157 && (fcode == BUILT_IN_EXP
8158 || fcode == BUILT_IN_EXPF
8159 || fcode == BUILT_IN_EXPL))
8160 || (func == mpfr_log2
8161 && (fcode == BUILT_IN_EXP2
8162 || fcode == BUILT_IN_EXP2F
8163 || fcode == BUILT_IN_EXP2L))
8164 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8165 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8166
8167 /* Optimize logN(func()) for various exponential functions. We
8168 want to determine the value "x" and the power "exponent" in
8169 order to transform logN(x**exponent) into exponent*logN(x). */
8170 if (flag_unsafe_math_optimizations)
8171 {
8172 tree exponent = 0, x = 0;
8173
8174 switch (fcode)
8175 {
8176 CASE_FLT_FN (BUILT_IN_EXP):
8177 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8178 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8179 dconst_e ()));
8180 exponent = CALL_EXPR_ARG (arg, 0);
8181 break;
8182 CASE_FLT_FN (BUILT_IN_EXP2):
8183 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8184 x = build_real (type, dconst2);
8185 exponent = CALL_EXPR_ARG (arg, 0);
8186 break;
8187 CASE_FLT_FN (BUILT_IN_EXP10):
8188 CASE_FLT_FN (BUILT_IN_POW10):
8189 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8190 {
8191 REAL_VALUE_TYPE dconst10;
8192 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8193 x = build_real (type, dconst10);
8194 }
8195 exponent = CALL_EXPR_ARG (arg, 0);
8196 break;
8197 CASE_FLT_FN (BUILT_IN_SQRT):
8198 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8199 x = CALL_EXPR_ARG (arg, 0);
8200 exponent = build_real (type, dconsthalf);
8201 break;
8202 CASE_FLT_FN (BUILT_IN_CBRT):
8203 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8204 x = CALL_EXPR_ARG (arg, 0);
8205 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8206 dconst_third ()));
8207 break;
8208 CASE_FLT_FN (BUILT_IN_POW):
8209 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8210 x = CALL_EXPR_ARG (arg, 0);
8211 exponent = CALL_EXPR_ARG (arg, 1);
8212 break;
8213 default:
8214 break;
8215 }
8216
8217 /* Now perform the optimization. */
8218 if (x && exponent)
8219 {
8220 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8221 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8222 }
8223 }
8224 }
8225
8226 return NULL_TREE;
8227 }
8228
8229 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8230 NULL_TREE if no simplification can be made. */
8231
8232 static tree
8233 fold_builtin_hypot (location_t loc, tree fndecl,
8234 tree arg0, tree arg1, tree type)
8235 {
8236 tree res, narg0, narg1;
8237
8238 if (!validate_arg (arg0, REAL_TYPE)
8239 || !validate_arg (arg1, REAL_TYPE))
8240 return NULL_TREE;
8241
8242 /* Calculate the result when the argument is a constant. */
8243 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8244 return res;
8245
8246 /* If either argument to hypot has a negate or abs, strip that off.
8247 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8248 narg0 = fold_strip_sign_ops (arg0);
8249 narg1 = fold_strip_sign_ops (arg1);
8250 if (narg0 || narg1)
8251 {
8252 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8253 narg1 ? narg1 : arg1);
8254 }
8255
8256 /* If either argument is zero, hypot is fabs of the other. */
8257 if (real_zerop (arg0))
8258 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8259 else if (real_zerop (arg1))
8260 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8261
8262 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8263 if (flag_unsafe_math_optimizations
8264 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8265 {
8266 const REAL_VALUE_TYPE sqrt2_trunc
8267 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8268 return fold_build2_loc (loc, MULT_EXPR, type,
8269 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8270 build_real (type, sqrt2_trunc));
8271 }
8272
8273 return NULL_TREE;
8274 }
8275
8276
8277 /* Fold a builtin function call to pow, powf, or powl. Return
8278 NULL_TREE if no simplification can be made. */
8279 static tree
8280 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8281 {
8282 tree res;
8283
8284 if (!validate_arg (arg0, REAL_TYPE)
8285 || !validate_arg (arg1, REAL_TYPE))
8286 return NULL_TREE;
8287
8288 /* Calculate the result when the argument is a constant. */
8289 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8290 return res;
8291
8292 /* Optimize pow(1.0,y) = 1.0. */
8293 if (real_onep (arg0))
8294 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8295
8296 if (TREE_CODE (arg1) == REAL_CST
8297 && !TREE_OVERFLOW (arg1))
8298 {
8299 REAL_VALUE_TYPE cint;
8300 REAL_VALUE_TYPE c;
8301 HOST_WIDE_INT n;
8302
8303 c = TREE_REAL_CST (arg1);
8304
8305 /* Optimize pow(x,0.0) = 1.0. */
8306 if (REAL_VALUES_EQUAL (c, dconst0))
8307 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8308 arg0);
8309
8310 /* Optimize pow(x,1.0) = x. */
8311 if (REAL_VALUES_EQUAL (c, dconst1))
8312 return arg0;
8313
8314 /* Optimize pow(x,-1.0) = 1.0/x. */
8315 if (REAL_VALUES_EQUAL (c, dconstm1))
8316 return fold_build2_loc (loc, RDIV_EXPR, type,
8317 build_real (type, dconst1), arg0);
8318
8319 /* Optimize pow(x,0.5) = sqrt(x). */
8320 if (flag_unsafe_math_optimizations
8321 && REAL_VALUES_EQUAL (c, dconsthalf))
8322 {
8323 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8324
8325 if (sqrtfn != NULL_TREE)
8326 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8327 }
8328
8329 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8330 if (flag_unsafe_math_optimizations)
8331 {
8332 const REAL_VALUE_TYPE dconstroot
8333 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8334
8335 if (REAL_VALUES_EQUAL (c, dconstroot))
8336 {
8337 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8338 if (cbrtfn != NULL_TREE)
8339 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8340 }
8341 }
8342
8343 /* Check for an integer exponent. */
8344 n = real_to_integer (&c);
8345 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8346 if (real_identical (&c, &cint))
8347 {
8348 /* Attempt to evaluate pow at compile-time, unless this should
8349 raise an exception. */
8350 if (TREE_CODE (arg0) == REAL_CST
8351 && !TREE_OVERFLOW (arg0)
8352 && (n > 0
8353 || (!flag_trapping_math && !flag_errno_math)
8354 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8355 {
8356 REAL_VALUE_TYPE x;
8357 bool inexact;
8358
8359 x = TREE_REAL_CST (arg0);
8360 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8361 if (flag_unsafe_math_optimizations || !inexact)
8362 return build_real (type, x);
8363 }
8364
8365 /* Strip sign ops from even integer powers. */
8366 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8367 {
8368 tree narg0 = fold_strip_sign_ops (arg0);
8369 if (narg0)
8370 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8371 }
8372 }
8373 }
8374
8375 if (flag_unsafe_math_optimizations)
8376 {
8377 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8378
8379 /* Optimize pow(expN(x),y) = expN(x*y). */
8380 if (BUILTIN_EXPONENT_P (fcode))
8381 {
8382 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8383 tree arg = CALL_EXPR_ARG (arg0, 0);
8384 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8385 return build_call_expr_loc (loc, expfn, 1, arg);
8386 }
8387
8388 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8389 if (BUILTIN_SQRT_P (fcode))
8390 {
8391 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8392 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8393 build_real (type, dconsthalf));
8394 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8395 }
8396
8397 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8398 if (BUILTIN_CBRT_P (fcode))
8399 {
8400 tree arg = CALL_EXPR_ARG (arg0, 0);
8401 if (tree_expr_nonnegative_p (arg))
8402 {
8403 const REAL_VALUE_TYPE dconstroot
8404 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8405 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8406 build_real (type, dconstroot));
8407 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8408 }
8409 }
8410
8411 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8412 if (fcode == BUILT_IN_POW
8413 || fcode == BUILT_IN_POWF
8414 || fcode == BUILT_IN_POWL)
8415 {
8416 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8417 if (tree_expr_nonnegative_p (arg00))
8418 {
8419 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8420 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8421 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8422 }
8423 }
8424 }
8425
8426 return NULL_TREE;
8427 }
8428
8429 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8430 Return NULL_TREE if no simplification can be made. */
8431 static tree
8432 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8433 tree arg0, tree arg1, tree type)
8434 {
8435 if (!validate_arg (arg0, REAL_TYPE)
8436 || !validate_arg (arg1, INTEGER_TYPE))
8437 return NULL_TREE;
8438
8439 /* Optimize pow(1.0,y) = 1.0. */
8440 if (real_onep (arg0))
8441 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8442
8443 if (host_integerp (arg1, 0))
8444 {
8445 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8446
8447 /* Evaluate powi at compile-time. */
8448 if (TREE_CODE (arg0) == REAL_CST
8449 && !TREE_OVERFLOW (arg0))
8450 {
8451 REAL_VALUE_TYPE x;
8452 x = TREE_REAL_CST (arg0);
8453 real_powi (&x, TYPE_MODE (type), &x, c);
8454 return build_real (type, x);
8455 }
8456
8457 /* Optimize pow(x,0) = 1.0. */
8458 if (c == 0)
8459 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8460 arg0);
8461
8462 /* Optimize pow(x,1) = x. */
8463 if (c == 1)
8464 return arg0;
8465
8466 /* Optimize pow(x,-1) = 1.0/x. */
8467 if (c == -1)
8468 return fold_build2_loc (loc, RDIV_EXPR, type,
8469 build_real (type, dconst1), arg0);
8470 }
8471
8472 return NULL_TREE;
8473 }
8474
8475 /* A subroutine of fold_builtin to fold the various exponent
8476 functions. Return NULL_TREE if no simplification can be made.
8477 FUNC is the corresponding MPFR exponent function. */
8478
8479 static tree
8480 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8481 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8482 {
8483 if (validate_arg (arg, REAL_TYPE))
8484 {
8485 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8486 tree res;
8487
8488 /* Calculate the result when the argument is a constant. */
8489 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8490 return res;
8491
8492 /* Optimize expN(logN(x)) = x. */
8493 if (flag_unsafe_math_optimizations)
8494 {
8495 const enum built_in_function fcode = builtin_mathfn_code (arg);
8496
8497 if ((func == mpfr_exp
8498 && (fcode == BUILT_IN_LOG
8499 || fcode == BUILT_IN_LOGF
8500 || fcode == BUILT_IN_LOGL))
8501 || (func == mpfr_exp2
8502 && (fcode == BUILT_IN_LOG2
8503 || fcode == BUILT_IN_LOG2F
8504 || fcode == BUILT_IN_LOG2L))
8505 || (func == mpfr_exp10
8506 && (fcode == BUILT_IN_LOG10
8507 || fcode == BUILT_IN_LOG10F
8508 || fcode == BUILT_IN_LOG10L)))
8509 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8510 }
8511 }
8512
8513 return NULL_TREE;
8514 }
8515
8516 /* Return true if VAR is a VAR_DECL or a component thereof. */
8517
8518 static bool
8519 var_decl_component_p (tree var)
8520 {
8521 tree inner = var;
8522 while (handled_component_p (inner))
8523 inner = TREE_OPERAND (inner, 0);
8524 return SSA_VAR_P (inner);
8525 }
8526
8527 /* Fold function call to builtin memset. Return
8528 NULL_TREE if no simplification can be made. */
8529
8530 static tree
8531 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8532 tree type, bool ignore)
8533 {
8534 tree var, ret, etype;
8535 unsigned HOST_WIDE_INT length, cval;
8536
8537 if (! validate_arg (dest, POINTER_TYPE)
8538 || ! validate_arg (c, INTEGER_TYPE)
8539 || ! validate_arg (len, INTEGER_TYPE))
8540 return NULL_TREE;
8541
8542 if (! host_integerp (len, 1))
8543 return NULL_TREE;
8544
8545 /* If the LEN parameter is zero, return DEST. */
8546 if (integer_zerop (len))
8547 return omit_one_operand_loc (loc, type, dest, c);
8548
8549 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8550 return NULL_TREE;
8551
8552 var = dest;
8553 STRIP_NOPS (var);
8554 if (TREE_CODE (var) != ADDR_EXPR)
8555 return NULL_TREE;
8556
8557 var = TREE_OPERAND (var, 0);
8558 if (TREE_THIS_VOLATILE (var))
8559 return NULL_TREE;
8560
8561 etype = TREE_TYPE (var);
8562 if (TREE_CODE (etype) == ARRAY_TYPE)
8563 etype = TREE_TYPE (etype);
8564
8565 if (!INTEGRAL_TYPE_P (etype)
8566 && !POINTER_TYPE_P (etype))
8567 return NULL_TREE;
8568
8569 if (! var_decl_component_p (var))
8570 return NULL_TREE;
8571
8572 length = tree_low_cst (len, 1);
8573 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8574 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8575 return NULL_TREE;
8576
8577 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8578 return NULL_TREE;
8579
8580 if (integer_zerop (c))
8581 cval = 0;
8582 else
8583 {
8584 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8585 return NULL_TREE;
8586
8587 cval = TREE_INT_CST_LOW (c);
8588 cval &= 0xff;
8589 cval |= cval << 8;
8590 cval |= cval << 16;
8591 cval |= (cval << 31) << 1;
8592 }
8593
8594 ret = build_int_cst_type (etype, cval);
8595 var = build_fold_indirect_ref_loc (loc,
8596 fold_convert_loc (loc,
8597 build_pointer_type (etype),
8598 dest));
8599 ret = build2 (MODIFY_EXPR, etype, var, ret);
8600 if (ignore)
8601 return ret;
8602
8603 return omit_one_operand_loc (loc, type, dest, ret);
8604 }
8605
8606 /* Fold function call to builtin memset. Return
8607 NULL_TREE if no simplification can be made. */
8608
8609 static tree
8610 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8611 {
8612 if (! validate_arg (dest, POINTER_TYPE)
8613 || ! validate_arg (size, INTEGER_TYPE))
8614 return NULL_TREE;
8615
8616 if (!ignore)
8617 return NULL_TREE;
8618
8619 /* New argument list transforming bzero(ptr x, int y) to
8620 memset(ptr x, int 0, size_t y). This is done this way
8621 so that if it isn't expanded inline, we fallback to
8622 calling bzero instead of memset. */
8623
8624 return fold_builtin_memset (loc, dest, integer_zero_node,
8625 fold_convert_loc (loc, size_type_node, size),
8626 void_type_node, ignore);
8627 }
8628
8629 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8630 NULL_TREE if no simplification can be made.
8631 If ENDP is 0, return DEST (like memcpy).
8632 If ENDP is 1, return DEST+LEN (like mempcpy).
8633 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8634 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8635 (memmove). */
8636
8637 static tree
8638 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8639 tree len, tree type, bool ignore, int endp)
8640 {
8641 tree destvar, srcvar, expr;
8642
8643 if (! validate_arg (dest, POINTER_TYPE)
8644 || ! validate_arg (src, POINTER_TYPE)
8645 || ! validate_arg (len, INTEGER_TYPE))
8646 return NULL_TREE;
8647
8648 /* If the LEN parameter is zero, return DEST. */
8649 if (integer_zerop (len))
8650 return omit_one_operand_loc (loc, type, dest, src);
8651
8652 /* If SRC and DEST are the same (and not volatile), return
8653 DEST{,+LEN,+LEN-1}. */
8654 if (operand_equal_p (src, dest, 0))
8655 expr = len;
8656 else
8657 {
8658 tree srctype, desttype;
8659 unsigned int src_align, dest_align;
8660 tree off0;
8661
8662 if (endp == 3)
8663 {
8664 src_align = get_pointer_alignment (src);
8665 dest_align = get_pointer_alignment (dest);
8666
8667 /* Both DEST and SRC must be pointer types.
8668 ??? This is what old code did. Is the testing for pointer types
8669 really mandatory?
8670
8671 If either SRC is readonly or length is 1, we can use memcpy. */
8672 if (!dest_align || !src_align)
8673 return NULL_TREE;
8674 if (readonly_data_expr (src)
8675 || (host_integerp (len, 1)
8676 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8677 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8678 {
8679 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8680 if (!fn)
8681 return NULL_TREE;
8682 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8683 }
8684
8685 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8686 if (TREE_CODE (src) == ADDR_EXPR
8687 && TREE_CODE (dest) == ADDR_EXPR)
8688 {
8689 tree src_base, dest_base, fn;
8690 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8691 HOST_WIDE_INT size = -1;
8692 HOST_WIDE_INT maxsize = -1;
8693
8694 srcvar = TREE_OPERAND (src, 0);
8695 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8696 &size, &maxsize);
8697 destvar = TREE_OPERAND (dest, 0);
8698 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8699 &size, &maxsize);
8700 if (host_integerp (len, 1))
8701 maxsize = tree_low_cst (len, 1);
8702 else
8703 maxsize = -1;
8704 src_offset /= BITS_PER_UNIT;
8705 dest_offset /= BITS_PER_UNIT;
8706 if (SSA_VAR_P (src_base)
8707 && SSA_VAR_P (dest_base))
8708 {
8709 if (operand_equal_p (src_base, dest_base, 0)
8710 && ranges_overlap_p (src_offset, maxsize,
8711 dest_offset, maxsize))
8712 return NULL_TREE;
8713 }
8714 else if (TREE_CODE (src_base) == MEM_REF
8715 && TREE_CODE (dest_base) == MEM_REF)
8716 {
8717 double_int off;
8718 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8719 TREE_OPERAND (dest_base, 0), 0))
8720 return NULL_TREE;
8721 off = double_int_add (mem_ref_offset (src_base),
8722 shwi_to_double_int (src_offset));
8723 if (!double_int_fits_in_shwi_p (off))
8724 return NULL_TREE;
8725 src_offset = off.low;
8726 off = double_int_add (mem_ref_offset (dest_base),
8727 shwi_to_double_int (dest_offset));
8728 if (!double_int_fits_in_shwi_p (off))
8729 return NULL_TREE;
8730 dest_offset = off.low;
8731 if (ranges_overlap_p (src_offset, maxsize,
8732 dest_offset, maxsize))
8733 return NULL_TREE;
8734 }
8735 else
8736 return NULL_TREE;
8737
8738 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8739 if (!fn)
8740 return NULL_TREE;
8741 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8742 }
8743
8744 /* If the destination and source do not alias optimize into
8745 memcpy as well. */
8746 if ((is_gimple_min_invariant (dest)
8747 || TREE_CODE (dest) == SSA_NAME)
8748 && (is_gimple_min_invariant (src)
8749 || TREE_CODE (src) == SSA_NAME))
8750 {
8751 ao_ref destr, srcr;
8752 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8753 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8754 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8755 {
8756 tree fn;
8757 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8758 if (!fn)
8759 return NULL_TREE;
8760 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8761 }
8762 }
8763
8764 return NULL_TREE;
8765 }
8766
8767 if (!host_integerp (len, 0))
8768 return NULL_TREE;
8769 /* FIXME:
8770 This logic lose for arguments like (type *)malloc (sizeof (type)),
8771 since we strip the casts of up to VOID return value from malloc.
8772 Perhaps we ought to inherit type from non-VOID argument here? */
8773 STRIP_NOPS (src);
8774 STRIP_NOPS (dest);
8775 if (!POINTER_TYPE_P (TREE_TYPE (src))
8776 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8777 return NULL_TREE;
8778 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8779 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8780 {
8781 tree tem = TREE_OPERAND (src, 0);
8782 STRIP_NOPS (tem);
8783 if (tem != TREE_OPERAND (src, 0))
8784 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8785 }
8786 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8787 {
8788 tree tem = TREE_OPERAND (dest, 0);
8789 STRIP_NOPS (tem);
8790 if (tem != TREE_OPERAND (dest, 0))
8791 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8792 }
8793 srctype = TREE_TYPE (TREE_TYPE (src));
8794 if (TREE_CODE (srctype) == ARRAY_TYPE
8795 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8796 {
8797 srctype = TREE_TYPE (srctype);
8798 STRIP_NOPS (src);
8799 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8800 }
8801 desttype = TREE_TYPE (TREE_TYPE (dest));
8802 if (TREE_CODE (desttype) == ARRAY_TYPE
8803 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8804 {
8805 desttype = TREE_TYPE (desttype);
8806 STRIP_NOPS (dest);
8807 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8808 }
8809 if (TREE_ADDRESSABLE (srctype)
8810 || TREE_ADDRESSABLE (desttype))
8811 return NULL_TREE;
8812
8813 src_align = get_pointer_alignment (src);
8814 dest_align = get_pointer_alignment (dest);
8815 if (dest_align < TYPE_ALIGN (desttype)
8816 || src_align < TYPE_ALIGN (srctype))
8817 return NULL_TREE;
8818
8819 if (!ignore)
8820 dest = builtin_save_expr (dest);
8821
8822 /* Build accesses at offset zero with a ref-all character type. */
8823 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8824 ptr_mode, true), 0);
8825
8826 destvar = dest;
8827 STRIP_NOPS (destvar);
8828 if (TREE_CODE (destvar) == ADDR_EXPR
8829 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8830 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8831 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8832 else
8833 destvar = NULL_TREE;
8834
8835 srcvar = src;
8836 STRIP_NOPS (srcvar);
8837 if (TREE_CODE (srcvar) == ADDR_EXPR
8838 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8839 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8840 {
8841 if (!destvar
8842 || src_align >= TYPE_ALIGN (desttype))
8843 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8844 srcvar, off0);
8845 else if (!STRICT_ALIGNMENT)
8846 {
8847 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8848 src_align);
8849 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8850 }
8851 else
8852 srcvar = NULL_TREE;
8853 }
8854 else
8855 srcvar = NULL_TREE;
8856
8857 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8858 return NULL_TREE;
8859
8860 if (srcvar == NULL_TREE)
8861 {
8862 STRIP_NOPS (src);
8863 if (src_align >= TYPE_ALIGN (desttype))
8864 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8865 else
8866 {
8867 if (STRICT_ALIGNMENT)
8868 return NULL_TREE;
8869 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8870 src_align);
8871 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8872 }
8873 }
8874 else if (destvar == NULL_TREE)
8875 {
8876 STRIP_NOPS (dest);
8877 if (dest_align >= TYPE_ALIGN (srctype))
8878 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8879 else
8880 {
8881 if (STRICT_ALIGNMENT)
8882 return NULL_TREE;
8883 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8884 dest_align);
8885 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8886 }
8887 }
8888
8889 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8890 }
8891
8892 if (ignore)
8893 return expr;
8894
8895 if (endp == 0 || endp == 3)
8896 return omit_one_operand_loc (loc, type, dest, expr);
8897
8898 if (expr == len)
8899 expr = NULL_TREE;
8900
8901 if (endp == 2)
8902 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8903 ssize_int (1));
8904
8905 dest = fold_build_pointer_plus_loc (loc, dest, len);
8906 dest = fold_convert_loc (loc, type, dest);
8907 if (expr)
8908 dest = omit_one_operand_loc (loc, type, dest, expr);
8909 return dest;
8910 }
8911
8912 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8913 If LEN is not NULL, it represents the length of the string to be
8914 copied. Return NULL_TREE if no simplification can be made. */
8915
8916 tree
8917 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8918 {
8919 tree fn;
8920
8921 if (!validate_arg (dest, POINTER_TYPE)
8922 || !validate_arg (src, POINTER_TYPE))
8923 return NULL_TREE;
8924
8925 /* If SRC and DEST are the same (and not volatile), return DEST. */
8926 if (operand_equal_p (src, dest, 0))
8927 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8928
8929 if (optimize_function_for_size_p (cfun))
8930 return NULL_TREE;
8931
8932 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8933 if (!fn)
8934 return NULL_TREE;
8935
8936 if (!len)
8937 {
8938 len = c_strlen (src, 1);
8939 if (! len || TREE_SIDE_EFFECTS (len))
8940 return NULL_TREE;
8941 }
8942
8943 len = fold_convert_loc (loc, size_type_node, len);
8944 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
8945 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8946 build_call_expr_loc (loc, fn, 3, dest, src, len));
8947 }
8948
8949 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8950 Return NULL_TREE if no simplification can be made. */
8951
8952 static tree
8953 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8954 {
8955 tree fn, len, lenp1, call, type;
8956
8957 if (!validate_arg (dest, POINTER_TYPE)
8958 || !validate_arg (src, POINTER_TYPE))
8959 return NULL_TREE;
8960
8961 len = c_strlen (src, 1);
8962 if (!len
8963 || TREE_CODE (len) != INTEGER_CST)
8964 return NULL_TREE;
8965
8966 if (optimize_function_for_size_p (cfun)
8967 /* If length is zero it's small enough. */
8968 && !integer_zerop (len))
8969 return NULL_TREE;
8970
8971 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8972 if (!fn)
8973 return NULL_TREE;
8974
8975 lenp1 = size_binop_loc (loc, PLUS_EXPR,
8976 fold_convert_loc (loc, size_type_node, len),
8977 build_int_cst (size_type_node, 1));
8978 /* We use dest twice in building our expression. Save it from
8979 multiple expansions. */
8980 dest = builtin_save_expr (dest);
8981 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8982
8983 type = TREE_TYPE (TREE_TYPE (fndecl));
8984 dest = fold_build_pointer_plus_loc (loc, dest, len);
8985 dest = fold_convert_loc (loc, type, dest);
8986 dest = omit_one_operand_loc (loc, type, dest, call);
8987 return dest;
8988 }
8989
8990 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8991 If SLEN is not NULL, it represents the length of the source string.
8992 Return NULL_TREE if no simplification can be made. */
8993
8994 tree
8995 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8996 tree src, tree len, tree slen)
8997 {
8998 tree fn;
8999
9000 if (!validate_arg (dest, POINTER_TYPE)
9001 || !validate_arg (src, POINTER_TYPE)
9002 || !validate_arg (len, INTEGER_TYPE))
9003 return NULL_TREE;
9004
9005 /* If the LEN parameter is zero, return DEST. */
9006 if (integer_zerop (len))
9007 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9008
9009 /* We can't compare slen with len as constants below if len is not a
9010 constant. */
9011 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9012 return NULL_TREE;
9013
9014 if (!slen)
9015 slen = c_strlen (src, 1);
9016
9017 /* Now, we must be passed a constant src ptr parameter. */
9018 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9019 return NULL_TREE;
9020
9021 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9022
9023 /* We do not support simplification of this case, though we do
9024 support it when expanding trees into RTL. */
9025 /* FIXME: generate a call to __builtin_memset. */
9026 if (tree_int_cst_lt (slen, len))
9027 return NULL_TREE;
9028
9029 /* OK transform into builtin memcpy. */
9030 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9031 if (!fn)
9032 return NULL_TREE;
9033
9034 len = fold_convert_loc (loc, size_type_node, len);
9035 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9036 build_call_expr_loc (loc, fn, 3, dest, src, len));
9037 }
9038
9039 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9040 arguments to the call, and TYPE is its return type.
9041 Return NULL_TREE if no simplification can be made. */
9042
9043 static tree
9044 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9045 {
9046 if (!validate_arg (arg1, POINTER_TYPE)
9047 || !validate_arg (arg2, INTEGER_TYPE)
9048 || !validate_arg (len, INTEGER_TYPE))
9049 return NULL_TREE;
9050 else
9051 {
9052 const char *p1;
9053
9054 if (TREE_CODE (arg2) != INTEGER_CST
9055 || !host_integerp (len, 1))
9056 return NULL_TREE;
9057
9058 p1 = c_getstr (arg1);
9059 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9060 {
9061 char c;
9062 const char *r;
9063 tree tem;
9064
9065 if (target_char_cast (arg2, &c))
9066 return NULL_TREE;
9067
9068 r = (const char *) memchr (p1, c, tree_low_cst (len, 1));
9069
9070 if (r == NULL)
9071 return build_int_cst (TREE_TYPE (arg1), 0);
9072
9073 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9074 return fold_convert_loc (loc, type, tem);
9075 }
9076 return NULL_TREE;
9077 }
9078 }
9079
9080 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9081 Return NULL_TREE if no simplification can be made. */
9082
9083 static tree
9084 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9085 {
9086 const char *p1, *p2;
9087
9088 if (!validate_arg (arg1, POINTER_TYPE)
9089 || !validate_arg (arg2, POINTER_TYPE)
9090 || !validate_arg (len, INTEGER_TYPE))
9091 return NULL_TREE;
9092
9093 /* If the LEN parameter is zero, return zero. */
9094 if (integer_zerop (len))
9095 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9096 arg1, arg2);
9097
9098 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9099 if (operand_equal_p (arg1, arg2, 0))
9100 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9101
9102 p1 = c_getstr (arg1);
9103 p2 = c_getstr (arg2);
9104
9105 /* If all arguments are constant, and the value of len is not greater
9106 than the lengths of arg1 and arg2, evaluate at compile-time. */
9107 if (host_integerp (len, 1) && p1 && p2
9108 && compare_tree_int (len, strlen (p1) + 1) <= 0
9109 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9110 {
9111 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9112
9113 if (r > 0)
9114 return integer_one_node;
9115 else if (r < 0)
9116 return integer_minus_one_node;
9117 else
9118 return integer_zero_node;
9119 }
9120
9121 /* If len parameter is one, return an expression corresponding to
9122 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9123 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9124 {
9125 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9126 tree cst_uchar_ptr_node
9127 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9128
9129 tree ind1
9130 = fold_convert_loc (loc, integer_type_node,
9131 build1 (INDIRECT_REF, cst_uchar_node,
9132 fold_convert_loc (loc,
9133 cst_uchar_ptr_node,
9134 arg1)));
9135 tree ind2
9136 = fold_convert_loc (loc, integer_type_node,
9137 build1 (INDIRECT_REF, cst_uchar_node,
9138 fold_convert_loc (loc,
9139 cst_uchar_ptr_node,
9140 arg2)));
9141 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9142 }
9143
9144 return NULL_TREE;
9145 }
9146
9147 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9148 Return NULL_TREE if no simplification can be made. */
9149
9150 static tree
9151 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9152 {
9153 const char *p1, *p2;
9154
9155 if (!validate_arg (arg1, POINTER_TYPE)
9156 || !validate_arg (arg2, POINTER_TYPE))
9157 return NULL_TREE;
9158
9159 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9160 if (operand_equal_p (arg1, arg2, 0))
9161 return integer_zero_node;
9162
9163 p1 = c_getstr (arg1);
9164 p2 = c_getstr (arg2);
9165
9166 if (p1 && p2)
9167 {
9168 const int i = strcmp (p1, p2);
9169 if (i < 0)
9170 return integer_minus_one_node;
9171 else if (i > 0)
9172 return integer_one_node;
9173 else
9174 return integer_zero_node;
9175 }
9176
9177 /* If the second arg is "", return *(const unsigned char*)arg1. */
9178 if (p2 && *p2 == '\0')
9179 {
9180 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9181 tree cst_uchar_ptr_node
9182 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9183
9184 return fold_convert_loc (loc, integer_type_node,
9185 build1 (INDIRECT_REF, cst_uchar_node,
9186 fold_convert_loc (loc,
9187 cst_uchar_ptr_node,
9188 arg1)));
9189 }
9190
9191 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9192 if (p1 && *p1 == '\0')
9193 {
9194 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9195 tree cst_uchar_ptr_node
9196 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9197
9198 tree temp
9199 = fold_convert_loc (loc, integer_type_node,
9200 build1 (INDIRECT_REF, cst_uchar_node,
9201 fold_convert_loc (loc,
9202 cst_uchar_ptr_node,
9203 arg2)));
9204 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9205 }
9206
9207 return NULL_TREE;
9208 }
9209
9210 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9211 Return NULL_TREE if no simplification can be made. */
9212
9213 static tree
9214 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9215 {
9216 const char *p1, *p2;
9217
9218 if (!validate_arg (arg1, POINTER_TYPE)
9219 || !validate_arg (arg2, POINTER_TYPE)
9220 || !validate_arg (len, INTEGER_TYPE))
9221 return NULL_TREE;
9222
9223 /* If the LEN parameter is zero, return zero. */
9224 if (integer_zerop (len))
9225 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9226 arg1, arg2);
9227
9228 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9229 if (operand_equal_p (arg1, arg2, 0))
9230 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9231
9232 p1 = c_getstr (arg1);
9233 p2 = c_getstr (arg2);
9234
9235 if (host_integerp (len, 1) && p1 && p2)
9236 {
9237 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9238 if (i > 0)
9239 return integer_one_node;
9240 else if (i < 0)
9241 return integer_minus_one_node;
9242 else
9243 return integer_zero_node;
9244 }
9245
9246 /* If the second arg is "", and the length is greater than zero,
9247 return *(const unsigned char*)arg1. */
9248 if (p2 && *p2 == '\0'
9249 && TREE_CODE (len) == INTEGER_CST
9250 && tree_int_cst_sgn (len) == 1)
9251 {
9252 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9253 tree cst_uchar_ptr_node
9254 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9255
9256 return fold_convert_loc (loc, integer_type_node,
9257 build1 (INDIRECT_REF, cst_uchar_node,
9258 fold_convert_loc (loc,
9259 cst_uchar_ptr_node,
9260 arg1)));
9261 }
9262
9263 /* If the first arg is "", and the length is greater than zero,
9264 return -*(const unsigned char*)arg2. */
9265 if (p1 && *p1 == '\0'
9266 && TREE_CODE (len) == INTEGER_CST
9267 && tree_int_cst_sgn (len) == 1)
9268 {
9269 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9270 tree cst_uchar_ptr_node
9271 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9272
9273 tree temp = fold_convert_loc (loc, integer_type_node,
9274 build1 (INDIRECT_REF, cst_uchar_node,
9275 fold_convert_loc (loc,
9276 cst_uchar_ptr_node,
9277 arg2)));
9278 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9279 }
9280
9281 /* If len parameter is one, return an expression corresponding to
9282 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9283 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9284 {
9285 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9286 tree cst_uchar_ptr_node
9287 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9288
9289 tree ind1 = fold_convert_loc (loc, integer_type_node,
9290 build1 (INDIRECT_REF, cst_uchar_node,
9291 fold_convert_loc (loc,
9292 cst_uchar_ptr_node,
9293 arg1)));
9294 tree ind2 = fold_convert_loc (loc, integer_type_node,
9295 build1 (INDIRECT_REF, cst_uchar_node,
9296 fold_convert_loc (loc,
9297 cst_uchar_ptr_node,
9298 arg2)));
9299 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9300 }
9301
9302 return NULL_TREE;
9303 }
9304
9305 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9306 ARG. Return NULL_TREE if no simplification can be made. */
9307
9308 static tree
9309 fold_builtin_signbit (location_t loc, tree arg, tree type)
9310 {
9311 if (!validate_arg (arg, REAL_TYPE))
9312 return NULL_TREE;
9313
9314 /* If ARG is a compile-time constant, determine the result. */
9315 if (TREE_CODE (arg) == REAL_CST
9316 && !TREE_OVERFLOW (arg))
9317 {
9318 REAL_VALUE_TYPE c;
9319
9320 c = TREE_REAL_CST (arg);
9321 return (REAL_VALUE_NEGATIVE (c)
9322 ? build_one_cst (type)
9323 : build_zero_cst (type));
9324 }
9325
9326 /* If ARG is non-negative, the result is always zero. */
9327 if (tree_expr_nonnegative_p (arg))
9328 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9329
9330 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9331 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9332 return fold_convert (type,
9333 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9334 build_real (TREE_TYPE (arg), dconst0)));
9335
9336 return NULL_TREE;
9337 }
9338
9339 /* Fold function call to builtin copysign, copysignf or copysignl with
9340 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9341 be made. */
9342
9343 static tree
9344 fold_builtin_copysign (location_t loc, tree fndecl,
9345 tree arg1, tree arg2, tree type)
9346 {
9347 tree tem;
9348
9349 if (!validate_arg (arg1, REAL_TYPE)
9350 || !validate_arg (arg2, REAL_TYPE))
9351 return NULL_TREE;
9352
9353 /* copysign(X,X) is X. */
9354 if (operand_equal_p (arg1, arg2, 0))
9355 return fold_convert_loc (loc, type, arg1);
9356
9357 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9358 if (TREE_CODE (arg1) == REAL_CST
9359 && TREE_CODE (arg2) == REAL_CST
9360 && !TREE_OVERFLOW (arg1)
9361 && !TREE_OVERFLOW (arg2))
9362 {
9363 REAL_VALUE_TYPE c1, c2;
9364
9365 c1 = TREE_REAL_CST (arg1);
9366 c2 = TREE_REAL_CST (arg2);
9367 /* c1.sign := c2.sign. */
9368 real_copysign (&c1, &c2);
9369 return build_real (type, c1);
9370 }
9371
9372 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9373 Remember to evaluate Y for side-effects. */
9374 if (tree_expr_nonnegative_p (arg2))
9375 return omit_one_operand_loc (loc, type,
9376 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9377 arg2);
9378
9379 /* Strip sign changing operations for the first argument. */
9380 tem = fold_strip_sign_ops (arg1);
9381 if (tem)
9382 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9383
9384 return NULL_TREE;
9385 }
9386
9387 /* Fold a call to builtin isascii with argument ARG. */
9388
9389 static tree
9390 fold_builtin_isascii (location_t loc, tree arg)
9391 {
9392 if (!validate_arg (arg, INTEGER_TYPE))
9393 return NULL_TREE;
9394 else
9395 {
9396 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9397 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9398 build_int_cst (integer_type_node,
9399 ~ (unsigned HOST_WIDE_INT) 0x7f));
9400 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9401 arg, integer_zero_node);
9402 }
9403 }
9404
9405 /* Fold a call to builtin toascii with argument ARG. */
9406
9407 static tree
9408 fold_builtin_toascii (location_t loc, tree arg)
9409 {
9410 if (!validate_arg (arg, INTEGER_TYPE))
9411 return NULL_TREE;
9412
9413 /* Transform toascii(c) -> (c & 0x7f). */
9414 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9415 build_int_cst (integer_type_node, 0x7f));
9416 }
9417
9418 /* Fold a call to builtin isdigit with argument ARG. */
9419
9420 static tree
9421 fold_builtin_isdigit (location_t loc, tree arg)
9422 {
9423 if (!validate_arg (arg, INTEGER_TYPE))
9424 return NULL_TREE;
9425 else
9426 {
9427 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9428 /* According to the C standard, isdigit is unaffected by locale.
9429 However, it definitely is affected by the target character set. */
9430 unsigned HOST_WIDE_INT target_digit0
9431 = lang_hooks.to_target_charset ('0');
9432
9433 if (target_digit0 == 0)
9434 return NULL_TREE;
9435
9436 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9437 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9438 build_int_cst (unsigned_type_node, target_digit0));
9439 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9440 build_int_cst (unsigned_type_node, 9));
9441 }
9442 }
9443
9444 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9445
9446 static tree
9447 fold_builtin_fabs (location_t loc, tree arg, tree type)
9448 {
9449 if (!validate_arg (arg, REAL_TYPE))
9450 return NULL_TREE;
9451
9452 arg = fold_convert_loc (loc, type, arg);
9453 if (TREE_CODE (arg) == REAL_CST)
9454 return fold_abs_const (arg, type);
9455 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9456 }
9457
9458 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9459
9460 static tree
9461 fold_builtin_abs (location_t loc, tree arg, tree type)
9462 {
9463 if (!validate_arg (arg, INTEGER_TYPE))
9464 return NULL_TREE;
9465
9466 arg = fold_convert_loc (loc, type, arg);
9467 if (TREE_CODE (arg) == INTEGER_CST)
9468 return fold_abs_const (arg, type);
9469 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9470 }
9471
9472 /* Fold a fma operation with arguments ARG[012]. */
9473
9474 tree
9475 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9476 tree type, tree arg0, tree arg1, tree arg2)
9477 {
9478 if (TREE_CODE (arg0) == REAL_CST
9479 && TREE_CODE (arg1) == REAL_CST
9480 && TREE_CODE (arg2) == REAL_CST)
9481 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9482
9483 return NULL_TREE;
9484 }
9485
9486 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9487
9488 static tree
9489 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9490 {
9491 if (validate_arg (arg0, REAL_TYPE)
9492 && validate_arg(arg1, REAL_TYPE)
9493 && validate_arg(arg2, REAL_TYPE))
9494 {
9495 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9496 if (tem)
9497 return tem;
9498
9499 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9500 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9501 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9502 }
9503 return NULL_TREE;
9504 }
9505
9506 /* Fold a call to builtin fmin or fmax. */
9507
9508 static tree
9509 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9510 tree type, bool max)
9511 {
9512 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9513 {
9514 /* Calculate the result when the argument is a constant. */
9515 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9516
9517 if (res)
9518 return res;
9519
9520 /* If either argument is NaN, return the other one. Avoid the
9521 transformation if we get (and honor) a signalling NaN. Using
9522 omit_one_operand() ensures we create a non-lvalue. */
9523 if (TREE_CODE (arg0) == REAL_CST
9524 && real_isnan (&TREE_REAL_CST (arg0))
9525 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9526 || ! TREE_REAL_CST (arg0).signalling))
9527 return omit_one_operand_loc (loc, type, arg1, arg0);
9528 if (TREE_CODE (arg1) == REAL_CST
9529 && real_isnan (&TREE_REAL_CST (arg1))
9530 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9531 || ! TREE_REAL_CST (arg1).signalling))
9532 return omit_one_operand_loc (loc, type, arg0, arg1);
9533
9534 /* Transform fmin/fmax(x,x) -> x. */
9535 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9536 return omit_one_operand_loc (loc, type, arg0, arg1);
9537
9538 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9539 functions to return the numeric arg if the other one is NaN.
9540 These tree codes don't honor that, so only transform if
9541 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9542 handled, so we don't have to worry about it either. */
9543 if (flag_finite_math_only)
9544 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9545 fold_convert_loc (loc, type, arg0),
9546 fold_convert_loc (loc, type, arg1));
9547 }
9548 return NULL_TREE;
9549 }
9550
9551 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9552
9553 static tree
9554 fold_builtin_carg (location_t loc, tree arg, tree type)
9555 {
9556 if (validate_arg (arg, COMPLEX_TYPE)
9557 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9558 {
9559 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9560
9561 if (atan2_fn)
9562 {
9563 tree new_arg = builtin_save_expr (arg);
9564 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9565 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9566 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9567 }
9568 }
9569
9570 return NULL_TREE;
9571 }
9572
9573 /* Fold a call to builtin logb/ilogb. */
9574
9575 static tree
9576 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9577 {
9578 if (! validate_arg (arg, REAL_TYPE))
9579 return NULL_TREE;
9580
9581 STRIP_NOPS (arg);
9582
9583 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9584 {
9585 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9586
9587 switch (value->cl)
9588 {
9589 case rvc_nan:
9590 case rvc_inf:
9591 /* If arg is Inf or NaN and we're logb, return it. */
9592 if (TREE_CODE (rettype) == REAL_TYPE)
9593 return fold_convert_loc (loc, rettype, arg);
9594 /* Fall through... */
9595 case rvc_zero:
9596 /* Zero may set errno and/or raise an exception for logb, also
9597 for ilogb we don't know FP_ILOGB0. */
9598 return NULL_TREE;
9599 case rvc_normal:
9600 /* For normal numbers, proceed iff radix == 2. In GCC,
9601 normalized significands are in the range [0.5, 1.0). We
9602 want the exponent as if they were [1.0, 2.0) so get the
9603 exponent and subtract 1. */
9604 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9605 return fold_convert_loc (loc, rettype,
9606 build_int_cst (integer_type_node,
9607 REAL_EXP (value)-1));
9608 break;
9609 }
9610 }
9611
9612 return NULL_TREE;
9613 }
9614
9615 /* Fold a call to builtin significand, if radix == 2. */
9616
9617 static tree
9618 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9619 {
9620 if (! validate_arg (arg, REAL_TYPE))
9621 return NULL_TREE;
9622
9623 STRIP_NOPS (arg);
9624
9625 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9626 {
9627 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9628
9629 switch (value->cl)
9630 {
9631 case rvc_zero:
9632 case rvc_nan:
9633 case rvc_inf:
9634 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9635 return fold_convert_loc (loc, rettype, arg);
9636 case rvc_normal:
9637 /* For normal numbers, proceed iff radix == 2. */
9638 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9639 {
9640 REAL_VALUE_TYPE result = *value;
9641 /* In GCC, normalized significands are in the range [0.5,
9642 1.0). We want them to be [1.0, 2.0) so set the
9643 exponent to 1. */
9644 SET_REAL_EXP (&result, 1);
9645 return build_real (rettype, result);
9646 }
9647 break;
9648 }
9649 }
9650
9651 return NULL_TREE;
9652 }
9653
9654 /* Fold a call to builtin frexp, we can assume the base is 2. */
9655
9656 static tree
9657 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9658 {
9659 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9660 return NULL_TREE;
9661
9662 STRIP_NOPS (arg0);
9663
9664 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9665 return NULL_TREE;
9666
9667 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9668
9669 /* Proceed if a valid pointer type was passed in. */
9670 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9671 {
9672 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9673 tree frac, exp;
9674
9675 switch (value->cl)
9676 {
9677 case rvc_zero:
9678 /* For +-0, return (*exp = 0, +-0). */
9679 exp = integer_zero_node;
9680 frac = arg0;
9681 break;
9682 case rvc_nan:
9683 case rvc_inf:
9684 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9685 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9686 case rvc_normal:
9687 {
9688 /* Since the frexp function always expects base 2, and in
9689 GCC normalized significands are already in the range
9690 [0.5, 1.0), we have exactly what frexp wants. */
9691 REAL_VALUE_TYPE frac_rvt = *value;
9692 SET_REAL_EXP (&frac_rvt, 0);
9693 frac = build_real (rettype, frac_rvt);
9694 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9695 }
9696 break;
9697 default:
9698 gcc_unreachable ();
9699 }
9700
9701 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9702 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9703 TREE_SIDE_EFFECTS (arg1) = 1;
9704 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9705 }
9706
9707 return NULL_TREE;
9708 }
9709
9710 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9711 then we can assume the base is two. If it's false, then we have to
9712 check the mode of the TYPE parameter in certain cases. */
9713
9714 static tree
9715 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9716 tree type, bool ldexp)
9717 {
9718 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9719 {
9720 STRIP_NOPS (arg0);
9721 STRIP_NOPS (arg1);
9722
9723 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9724 if (real_zerop (arg0) || integer_zerop (arg1)
9725 || (TREE_CODE (arg0) == REAL_CST
9726 && !real_isfinite (&TREE_REAL_CST (arg0))))
9727 return omit_one_operand_loc (loc, type, arg0, arg1);
9728
9729 /* If both arguments are constant, then try to evaluate it. */
9730 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9731 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9732 && host_integerp (arg1, 0))
9733 {
9734 /* Bound the maximum adjustment to twice the range of the
9735 mode's valid exponents. Use abs to ensure the range is
9736 positive as a sanity check. */
9737 const long max_exp_adj = 2 *
9738 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9739 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9740
9741 /* Get the user-requested adjustment. */
9742 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9743
9744 /* The requested adjustment must be inside this range. This
9745 is a preliminary cap to avoid things like overflow, we
9746 may still fail to compute the result for other reasons. */
9747 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9748 {
9749 REAL_VALUE_TYPE initial_result;
9750
9751 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9752
9753 /* Ensure we didn't overflow. */
9754 if (! real_isinf (&initial_result))
9755 {
9756 const REAL_VALUE_TYPE trunc_result
9757 = real_value_truncate (TYPE_MODE (type), initial_result);
9758
9759 /* Only proceed if the target mode can hold the
9760 resulting value. */
9761 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9762 return build_real (type, trunc_result);
9763 }
9764 }
9765 }
9766 }
9767
9768 return NULL_TREE;
9769 }
9770
9771 /* Fold a call to builtin modf. */
9772
9773 static tree
9774 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9775 {
9776 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9777 return NULL_TREE;
9778
9779 STRIP_NOPS (arg0);
9780
9781 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9782 return NULL_TREE;
9783
9784 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9785
9786 /* Proceed if a valid pointer type was passed in. */
9787 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9788 {
9789 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9790 REAL_VALUE_TYPE trunc, frac;
9791
9792 switch (value->cl)
9793 {
9794 case rvc_nan:
9795 case rvc_zero:
9796 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9797 trunc = frac = *value;
9798 break;
9799 case rvc_inf:
9800 /* For +-Inf, return (*arg1 = arg0, +-0). */
9801 frac = dconst0;
9802 frac.sign = value->sign;
9803 trunc = *value;
9804 break;
9805 case rvc_normal:
9806 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9807 real_trunc (&trunc, VOIDmode, value);
9808 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9809 /* If the original number was negative and already
9810 integral, then the fractional part is -0.0. */
9811 if (value->sign && frac.cl == rvc_zero)
9812 frac.sign = value->sign;
9813 break;
9814 }
9815
9816 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9817 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9818 build_real (rettype, trunc));
9819 TREE_SIDE_EFFECTS (arg1) = 1;
9820 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9821 build_real (rettype, frac));
9822 }
9823
9824 return NULL_TREE;
9825 }
9826
9827 /* Given a location LOC, an interclass builtin function decl FNDECL
9828 and its single argument ARG, return an folded expression computing
9829 the same, or NULL_TREE if we either couldn't or didn't want to fold
9830 (the latter happen if there's an RTL instruction available). */
9831
9832 static tree
9833 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9834 {
9835 enum machine_mode mode;
9836
9837 if (!validate_arg (arg, REAL_TYPE))
9838 return NULL_TREE;
9839
9840 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9841 return NULL_TREE;
9842
9843 mode = TYPE_MODE (TREE_TYPE (arg));
9844
9845 /* If there is no optab, try generic code. */
9846 switch (DECL_FUNCTION_CODE (fndecl))
9847 {
9848 tree result;
9849
9850 CASE_FLT_FN (BUILT_IN_ISINF):
9851 {
9852 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9853 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9854 tree const type = TREE_TYPE (arg);
9855 REAL_VALUE_TYPE r;
9856 char buf[128];
9857
9858 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9859 real_from_string (&r, buf);
9860 result = build_call_expr (isgr_fn, 2,
9861 fold_build1_loc (loc, ABS_EXPR, type, arg),
9862 build_real (type, r));
9863 return result;
9864 }
9865 CASE_FLT_FN (BUILT_IN_FINITE):
9866 case BUILT_IN_ISFINITE:
9867 {
9868 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9869 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9870 tree const type = TREE_TYPE (arg);
9871 REAL_VALUE_TYPE r;
9872 char buf[128];
9873
9874 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9875 real_from_string (&r, buf);
9876 result = build_call_expr (isle_fn, 2,
9877 fold_build1_loc (loc, ABS_EXPR, type, arg),
9878 build_real (type, r));
9879 /*result = fold_build2_loc (loc, UNGT_EXPR,
9880 TREE_TYPE (TREE_TYPE (fndecl)),
9881 fold_build1_loc (loc, ABS_EXPR, type, arg),
9882 build_real (type, r));
9883 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9884 TREE_TYPE (TREE_TYPE (fndecl)),
9885 result);*/
9886 return result;
9887 }
9888 case BUILT_IN_ISNORMAL:
9889 {
9890 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9891 islessequal(fabs(x),DBL_MAX). */
9892 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9893 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9894 tree const type = TREE_TYPE (arg);
9895 REAL_VALUE_TYPE rmax, rmin;
9896 char buf[128];
9897
9898 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9899 real_from_string (&rmax, buf);
9900 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9901 real_from_string (&rmin, buf);
9902 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9903 result = build_call_expr (isle_fn, 2, arg,
9904 build_real (type, rmax));
9905 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9906 build_call_expr (isge_fn, 2, arg,
9907 build_real (type, rmin)));
9908 return result;
9909 }
9910 default:
9911 break;
9912 }
9913
9914 return NULL_TREE;
9915 }
9916
9917 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9918 ARG is the argument for the call. */
9919
9920 static tree
9921 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9922 {
9923 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9924 REAL_VALUE_TYPE r;
9925
9926 if (!validate_arg (arg, REAL_TYPE))
9927 return NULL_TREE;
9928
9929 switch (builtin_index)
9930 {
9931 case BUILT_IN_ISINF:
9932 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9933 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9934
9935 if (TREE_CODE (arg) == REAL_CST)
9936 {
9937 r = TREE_REAL_CST (arg);
9938 if (real_isinf (&r))
9939 return real_compare (GT_EXPR, &r, &dconst0)
9940 ? integer_one_node : integer_minus_one_node;
9941 else
9942 return integer_zero_node;
9943 }
9944
9945 return NULL_TREE;
9946
9947 case BUILT_IN_ISINF_SIGN:
9948 {
9949 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9950 /* In a boolean context, GCC will fold the inner COND_EXPR to
9951 1. So e.g. "if (isinf_sign(x))" would be folded to just
9952 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9953 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9954 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9955 tree tmp = NULL_TREE;
9956
9957 arg = builtin_save_expr (arg);
9958
9959 if (signbit_fn && isinf_fn)
9960 {
9961 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9962 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9963
9964 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9965 signbit_call, integer_zero_node);
9966 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9967 isinf_call, integer_zero_node);
9968
9969 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9970 integer_minus_one_node, integer_one_node);
9971 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9972 isinf_call, tmp,
9973 integer_zero_node);
9974 }
9975
9976 return tmp;
9977 }
9978
9979 case BUILT_IN_ISFINITE:
9980 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9981 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9982 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9983
9984 if (TREE_CODE (arg) == REAL_CST)
9985 {
9986 r = TREE_REAL_CST (arg);
9987 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9988 }
9989
9990 return NULL_TREE;
9991
9992 case BUILT_IN_ISNAN:
9993 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9994 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9995
9996 if (TREE_CODE (arg) == REAL_CST)
9997 {
9998 r = TREE_REAL_CST (arg);
9999 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10000 }
10001
10002 arg = builtin_save_expr (arg);
10003 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10004
10005 default:
10006 gcc_unreachable ();
10007 }
10008 }
10009
10010 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10011 This builtin will generate code to return the appropriate floating
10012 point classification depending on the value of the floating point
10013 number passed in. The possible return values must be supplied as
10014 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10015 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10016 one floating point argument which is "type generic". */
10017
10018 static tree
10019 fold_builtin_fpclassify (location_t loc, tree exp)
10020 {
10021 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10022 arg, type, res, tmp;
10023 enum machine_mode mode;
10024 REAL_VALUE_TYPE r;
10025 char buf[128];
10026
10027 /* Verify the required arguments in the original call. */
10028 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10029 INTEGER_TYPE, INTEGER_TYPE,
10030 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10031 return NULL_TREE;
10032
10033 fp_nan = CALL_EXPR_ARG (exp, 0);
10034 fp_infinite = CALL_EXPR_ARG (exp, 1);
10035 fp_normal = CALL_EXPR_ARG (exp, 2);
10036 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10037 fp_zero = CALL_EXPR_ARG (exp, 4);
10038 arg = CALL_EXPR_ARG (exp, 5);
10039 type = TREE_TYPE (arg);
10040 mode = TYPE_MODE (type);
10041 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10042
10043 /* fpclassify(x) ->
10044 isnan(x) ? FP_NAN :
10045 (fabs(x) == Inf ? FP_INFINITE :
10046 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10047 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10048
10049 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10050 build_real (type, dconst0));
10051 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10052 tmp, fp_zero, fp_subnormal);
10053
10054 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10055 real_from_string (&r, buf);
10056 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10057 arg, build_real (type, r));
10058 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10059
10060 if (HONOR_INFINITIES (mode))
10061 {
10062 real_inf (&r);
10063 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10064 build_real (type, r));
10065 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10066 fp_infinite, res);
10067 }
10068
10069 if (HONOR_NANS (mode))
10070 {
10071 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10072 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10073 }
10074
10075 return res;
10076 }
10077
10078 /* Fold a call to an unordered comparison function such as
10079 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10080 being called and ARG0 and ARG1 are the arguments for the call.
10081 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10082 the opposite of the desired result. UNORDERED_CODE is used
10083 for modes that can hold NaNs and ORDERED_CODE is used for
10084 the rest. */
10085
10086 static tree
10087 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10088 enum tree_code unordered_code,
10089 enum tree_code ordered_code)
10090 {
10091 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10092 enum tree_code code;
10093 tree type0, type1;
10094 enum tree_code code0, code1;
10095 tree cmp_type = NULL_TREE;
10096
10097 type0 = TREE_TYPE (arg0);
10098 type1 = TREE_TYPE (arg1);
10099
10100 code0 = TREE_CODE (type0);
10101 code1 = TREE_CODE (type1);
10102
10103 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10104 /* Choose the wider of two real types. */
10105 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10106 ? type0 : type1;
10107 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10108 cmp_type = type0;
10109 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10110 cmp_type = type1;
10111
10112 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10113 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10114
10115 if (unordered_code == UNORDERED_EXPR)
10116 {
10117 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10118 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10119 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10120 }
10121
10122 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10123 : ordered_code;
10124 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10125 fold_build2_loc (loc, code, type, arg0, arg1));
10126 }
10127
10128 /* Fold a call to built-in function FNDECL with 0 arguments.
10129 IGNORE is true if the result of the function call is ignored. This
10130 function returns NULL_TREE if no simplification was possible. */
10131
10132 static tree
10133 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10134 {
10135 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10136 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10137 switch (fcode)
10138 {
10139 CASE_FLT_FN (BUILT_IN_INF):
10140 case BUILT_IN_INFD32:
10141 case BUILT_IN_INFD64:
10142 case BUILT_IN_INFD128:
10143 return fold_builtin_inf (loc, type, true);
10144
10145 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10146 return fold_builtin_inf (loc, type, false);
10147
10148 case BUILT_IN_CLASSIFY_TYPE:
10149 return fold_builtin_classify_type (NULL_TREE);
10150
10151 default:
10152 break;
10153 }
10154 return NULL_TREE;
10155 }
10156
10157 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10158 IGNORE is true if the result of the function call is ignored. This
10159 function returns NULL_TREE if no simplification was possible. */
10160
10161 static tree
10162 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10163 {
10164 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10165 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10166 switch (fcode)
10167 {
10168 case BUILT_IN_CONSTANT_P:
10169 {
10170 tree val = fold_builtin_constant_p (arg0);
10171
10172 /* Gimplification will pull the CALL_EXPR for the builtin out of
10173 an if condition. When not optimizing, we'll not CSE it back.
10174 To avoid link error types of regressions, return false now. */
10175 if (!val && !optimize)
10176 val = integer_zero_node;
10177
10178 return val;
10179 }
10180
10181 case BUILT_IN_CLASSIFY_TYPE:
10182 return fold_builtin_classify_type (arg0);
10183
10184 case BUILT_IN_STRLEN:
10185 return fold_builtin_strlen (loc, type, arg0);
10186
10187 CASE_FLT_FN (BUILT_IN_FABS):
10188 return fold_builtin_fabs (loc, arg0, type);
10189
10190 case BUILT_IN_ABS:
10191 case BUILT_IN_LABS:
10192 case BUILT_IN_LLABS:
10193 case BUILT_IN_IMAXABS:
10194 return fold_builtin_abs (loc, arg0, type);
10195
10196 CASE_FLT_FN (BUILT_IN_CONJ):
10197 if (validate_arg (arg0, COMPLEX_TYPE)
10198 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10199 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10200 break;
10201
10202 CASE_FLT_FN (BUILT_IN_CREAL):
10203 if (validate_arg (arg0, COMPLEX_TYPE)
10204 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10205 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10206 break;
10207
10208 CASE_FLT_FN (BUILT_IN_CIMAG):
10209 if (validate_arg (arg0, COMPLEX_TYPE)
10210 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10211 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10212 break;
10213
10214 CASE_FLT_FN (BUILT_IN_CCOS):
10215 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
10216
10217 CASE_FLT_FN (BUILT_IN_CCOSH):
10218 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
10219
10220 CASE_FLT_FN (BUILT_IN_CPROJ):
10221 return fold_builtin_cproj(loc, arg0, type);
10222
10223 CASE_FLT_FN (BUILT_IN_CSIN):
10224 if (validate_arg (arg0, COMPLEX_TYPE)
10225 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10226 return do_mpc_arg1 (arg0, type, mpc_sin);
10227 break;
10228
10229 CASE_FLT_FN (BUILT_IN_CSINH):
10230 if (validate_arg (arg0, COMPLEX_TYPE)
10231 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10232 return do_mpc_arg1 (arg0, type, mpc_sinh);
10233 break;
10234
10235 CASE_FLT_FN (BUILT_IN_CTAN):
10236 if (validate_arg (arg0, COMPLEX_TYPE)
10237 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10238 return do_mpc_arg1 (arg0, type, mpc_tan);
10239 break;
10240
10241 CASE_FLT_FN (BUILT_IN_CTANH):
10242 if (validate_arg (arg0, COMPLEX_TYPE)
10243 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10244 return do_mpc_arg1 (arg0, type, mpc_tanh);
10245 break;
10246
10247 CASE_FLT_FN (BUILT_IN_CLOG):
10248 if (validate_arg (arg0, COMPLEX_TYPE)
10249 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10250 return do_mpc_arg1 (arg0, type, mpc_log);
10251 break;
10252
10253 CASE_FLT_FN (BUILT_IN_CSQRT):
10254 if (validate_arg (arg0, COMPLEX_TYPE)
10255 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10256 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10257 break;
10258
10259 CASE_FLT_FN (BUILT_IN_CASIN):
10260 if (validate_arg (arg0, COMPLEX_TYPE)
10261 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10262 return do_mpc_arg1 (arg0, type, mpc_asin);
10263 break;
10264
10265 CASE_FLT_FN (BUILT_IN_CACOS):
10266 if (validate_arg (arg0, COMPLEX_TYPE)
10267 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10268 return do_mpc_arg1 (arg0, type, mpc_acos);
10269 break;
10270
10271 CASE_FLT_FN (BUILT_IN_CATAN):
10272 if (validate_arg (arg0, COMPLEX_TYPE)
10273 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10274 return do_mpc_arg1 (arg0, type, mpc_atan);
10275 break;
10276
10277 CASE_FLT_FN (BUILT_IN_CASINH):
10278 if (validate_arg (arg0, COMPLEX_TYPE)
10279 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10280 return do_mpc_arg1 (arg0, type, mpc_asinh);
10281 break;
10282
10283 CASE_FLT_FN (BUILT_IN_CACOSH):
10284 if (validate_arg (arg0, COMPLEX_TYPE)
10285 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10286 return do_mpc_arg1 (arg0, type, mpc_acosh);
10287 break;
10288
10289 CASE_FLT_FN (BUILT_IN_CATANH):
10290 if (validate_arg (arg0, COMPLEX_TYPE)
10291 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10292 return do_mpc_arg1 (arg0, type, mpc_atanh);
10293 break;
10294
10295 CASE_FLT_FN (BUILT_IN_CABS):
10296 return fold_builtin_cabs (loc, arg0, type, fndecl);
10297
10298 CASE_FLT_FN (BUILT_IN_CARG):
10299 return fold_builtin_carg (loc, arg0, type);
10300
10301 CASE_FLT_FN (BUILT_IN_SQRT):
10302 return fold_builtin_sqrt (loc, arg0, type);
10303
10304 CASE_FLT_FN (BUILT_IN_CBRT):
10305 return fold_builtin_cbrt (loc, arg0, type);
10306
10307 CASE_FLT_FN (BUILT_IN_ASIN):
10308 if (validate_arg (arg0, REAL_TYPE))
10309 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10310 &dconstm1, &dconst1, true);
10311 break;
10312
10313 CASE_FLT_FN (BUILT_IN_ACOS):
10314 if (validate_arg (arg0, REAL_TYPE))
10315 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10316 &dconstm1, &dconst1, true);
10317 break;
10318
10319 CASE_FLT_FN (BUILT_IN_ATAN):
10320 if (validate_arg (arg0, REAL_TYPE))
10321 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10322 break;
10323
10324 CASE_FLT_FN (BUILT_IN_ASINH):
10325 if (validate_arg (arg0, REAL_TYPE))
10326 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10327 break;
10328
10329 CASE_FLT_FN (BUILT_IN_ACOSH):
10330 if (validate_arg (arg0, REAL_TYPE))
10331 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10332 &dconst1, NULL, true);
10333 break;
10334
10335 CASE_FLT_FN (BUILT_IN_ATANH):
10336 if (validate_arg (arg0, REAL_TYPE))
10337 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10338 &dconstm1, &dconst1, false);
10339 break;
10340
10341 CASE_FLT_FN (BUILT_IN_SIN):
10342 if (validate_arg (arg0, REAL_TYPE))
10343 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10344 break;
10345
10346 CASE_FLT_FN (BUILT_IN_COS):
10347 return fold_builtin_cos (loc, arg0, type, fndecl);
10348
10349 CASE_FLT_FN (BUILT_IN_TAN):
10350 return fold_builtin_tan (arg0, type);
10351
10352 CASE_FLT_FN (BUILT_IN_CEXP):
10353 return fold_builtin_cexp (loc, arg0, type);
10354
10355 CASE_FLT_FN (BUILT_IN_CEXPI):
10356 if (validate_arg (arg0, REAL_TYPE))
10357 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10358 break;
10359
10360 CASE_FLT_FN (BUILT_IN_SINH):
10361 if (validate_arg (arg0, REAL_TYPE))
10362 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10363 break;
10364
10365 CASE_FLT_FN (BUILT_IN_COSH):
10366 return fold_builtin_cosh (loc, arg0, type, fndecl);
10367
10368 CASE_FLT_FN (BUILT_IN_TANH):
10369 if (validate_arg (arg0, REAL_TYPE))
10370 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10371 break;
10372
10373 CASE_FLT_FN (BUILT_IN_ERF):
10374 if (validate_arg (arg0, REAL_TYPE))
10375 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10376 break;
10377
10378 CASE_FLT_FN (BUILT_IN_ERFC):
10379 if (validate_arg (arg0, REAL_TYPE))
10380 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10381 break;
10382
10383 CASE_FLT_FN (BUILT_IN_TGAMMA):
10384 if (validate_arg (arg0, REAL_TYPE))
10385 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10386 break;
10387
10388 CASE_FLT_FN (BUILT_IN_EXP):
10389 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10390
10391 CASE_FLT_FN (BUILT_IN_EXP2):
10392 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10393
10394 CASE_FLT_FN (BUILT_IN_EXP10):
10395 CASE_FLT_FN (BUILT_IN_POW10):
10396 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10397
10398 CASE_FLT_FN (BUILT_IN_EXPM1):
10399 if (validate_arg (arg0, REAL_TYPE))
10400 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10401 break;
10402
10403 CASE_FLT_FN (BUILT_IN_LOG):
10404 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10405
10406 CASE_FLT_FN (BUILT_IN_LOG2):
10407 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10408
10409 CASE_FLT_FN (BUILT_IN_LOG10):
10410 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10411
10412 CASE_FLT_FN (BUILT_IN_LOG1P):
10413 if (validate_arg (arg0, REAL_TYPE))
10414 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10415 &dconstm1, NULL, false);
10416 break;
10417
10418 CASE_FLT_FN (BUILT_IN_J0):
10419 if (validate_arg (arg0, REAL_TYPE))
10420 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10421 NULL, NULL, 0);
10422 break;
10423
10424 CASE_FLT_FN (BUILT_IN_J1):
10425 if (validate_arg (arg0, REAL_TYPE))
10426 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10427 NULL, NULL, 0);
10428 break;
10429
10430 CASE_FLT_FN (BUILT_IN_Y0):
10431 if (validate_arg (arg0, REAL_TYPE))
10432 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10433 &dconst0, NULL, false);
10434 break;
10435
10436 CASE_FLT_FN (BUILT_IN_Y1):
10437 if (validate_arg (arg0, REAL_TYPE))
10438 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10439 &dconst0, NULL, false);
10440 break;
10441
10442 CASE_FLT_FN (BUILT_IN_NAN):
10443 case BUILT_IN_NAND32:
10444 case BUILT_IN_NAND64:
10445 case BUILT_IN_NAND128:
10446 return fold_builtin_nan (arg0, type, true);
10447
10448 CASE_FLT_FN (BUILT_IN_NANS):
10449 return fold_builtin_nan (arg0, type, false);
10450
10451 CASE_FLT_FN (BUILT_IN_FLOOR):
10452 return fold_builtin_floor (loc, fndecl, arg0);
10453
10454 CASE_FLT_FN (BUILT_IN_CEIL):
10455 return fold_builtin_ceil (loc, fndecl, arg0);
10456
10457 CASE_FLT_FN (BUILT_IN_TRUNC):
10458 return fold_builtin_trunc (loc, fndecl, arg0);
10459
10460 CASE_FLT_FN (BUILT_IN_ROUND):
10461 return fold_builtin_round (loc, fndecl, arg0);
10462
10463 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10464 CASE_FLT_FN (BUILT_IN_RINT):
10465 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10466
10467 CASE_FLT_FN (BUILT_IN_ICEIL):
10468 CASE_FLT_FN (BUILT_IN_LCEIL):
10469 CASE_FLT_FN (BUILT_IN_LLCEIL):
10470 CASE_FLT_FN (BUILT_IN_LFLOOR):
10471 CASE_FLT_FN (BUILT_IN_IFLOOR):
10472 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10473 CASE_FLT_FN (BUILT_IN_IROUND):
10474 CASE_FLT_FN (BUILT_IN_LROUND):
10475 CASE_FLT_FN (BUILT_IN_LLROUND):
10476 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10477
10478 CASE_FLT_FN (BUILT_IN_IRINT):
10479 CASE_FLT_FN (BUILT_IN_LRINT):
10480 CASE_FLT_FN (BUILT_IN_LLRINT):
10481 return fold_fixed_mathfn (loc, fndecl, arg0);
10482
10483 case BUILT_IN_BSWAP32:
10484 case BUILT_IN_BSWAP64:
10485 return fold_builtin_bswap (fndecl, arg0);
10486
10487 CASE_INT_FN (BUILT_IN_FFS):
10488 CASE_INT_FN (BUILT_IN_CLZ):
10489 CASE_INT_FN (BUILT_IN_CTZ):
10490 CASE_INT_FN (BUILT_IN_CLRSB):
10491 CASE_INT_FN (BUILT_IN_POPCOUNT):
10492 CASE_INT_FN (BUILT_IN_PARITY):
10493 return fold_builtin_bitop (fndecl, arg0);
10494
10495 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10496 return fold_builtin_signbit (loc, arg0, type);
10497
10498 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10499 return fold_builtin_significand (loc, arg0, type);
10500
10501 CASE_FLT_FN (BUILT_IN_ILOGB):
10502 CASE_FLT_FN (BUILT_IN_LOGB):
10503 return fold_builtin_logb (loc, arg0, type);
10504
10505 case BUILT_IN_ISASCII:
10506 return fold_builtin_isascii (loc, arg0);
10507
10508 case BUILT_IN_TOASCII:
10509 return fold_builtin_toascii (loc, arg0);
10510
10511 case BUILT_IN_ISDIGIT:
10512 return fold_builtin_isdigit (loc, arg0);
10513
10514 CASE_FLT_FN (BUILT_IN_FINITE):
10515 case BUILT_IN_FINITED32:
10516 case BUILT_IN_FINITED64:
10517 case BUILT_IN_FINITED128:
10518 case BUILT_IN_ISFINITE:
10519 {
10520 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10521 if (ret)
10522 return ret;
10523 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10524 }
10525
10526 CASE_FLT_FN (BUILT_IN_ISINF):
10527 case BUILT_IN_ISINFD32:
10528 case BUILT_IN_ISINFD64:
10529 case BUILT_IN_ISINFD128:
10530 {
10531 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10532 if (ret)
10533 return ret;
10534 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10535 }
10536
10537 case BUILT_IN_ISNORMAL:
10538 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10539
10540 case BUILT_IN_ISINF_SIGN:
10541 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10542
10543 CASE_FLT_FN (BUILT_IN_ISNAN):
10544 case BUILT_IN_ISNAND32:
10545 case BUILT_IN_ISNAND64:
10546 case BUILT_IN_ISNAND128:
10547 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10548
10549 case BUILT_IN_PRINTF:
10550 case BUILT_IN_PRINTF_UNLOCKED:
10551 case BUILT_IN_VPRINTF:
10552 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10553
10554 case BUILT_IN_FREE:
10555 if (integer_zerop (arg0))
10556 return build_empty_stmt (loc);
10557 break;
10558
10559 default:
10560 break;
10561 }
10562
10563 return NULL_TREE;
10564
10565 }
10566
10567 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10568 IGNORE is true if the result of the function call is ignored. This
10569 function returns NULL_TREE if no simplification was possible. */
10570
10571 static tree
10572 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10573 {
10574 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10575 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10576
10577 switch (fcode)
10578 {
10579 CASE_FLT_FN (BUILT_IN_JN):
10580 if (validate_arg (arg0, INTEGER_TYPE)
10581 && validate_arg (arg1, REAL_TYPE))
10582 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10583 break;
10584
10585 CASE_FLT_FN (BUILT_IN_YN):
10586 if (validate_arg (arg0, INTEGER_TYPE)
10587 && validate_arg (arg1, REAL_TYPE))
10588 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10589 &dconst0, false);
10590 break;
10591
10592 CASE_FLT_FN (BUILT_IN_DREM):
10593 CASE_FLT_FN (BUILT_IN_REMAINDER):
10594 if (validate_arg (arg0, REAL_TYPE)
10595 && validate_arg(arg1, REAL_TYPE))
10596 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10597 break;
10598
10599 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10600 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10601 if (validate_arg (arg0, REAL_TYPE)
10602 && validate_arg(arg1, POINTER_TYPE))
10603 return do_mpfr_lgamma_r (arg0, arg1, type);
10604 break;
10605
10606 CASE_FLT_FN (BUILT_IN_ATAN2):
10607 if (validate_arg (arg0, REAL_TYPE)
10608 && validate_arg(arg1, REAL_TYPE))
10609 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10610 break;
10611
10612 CASE_FLT_FN (BUILT_IN_FDIM):
10613 if (validate_arg (arg0, REAL_TYPE)
10614 && validate_arg(arg1, REAL_TYPE))
10615 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10616 break;
10617
10618 CASE_FLT_FN (BUILT_IN_HYPOT):
10619 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10620
10621 CASE_FLT_FN (BUILT_IN_CPOW):
10622 if (validate_arg (arg0, COMPLEX_TYPE)
10623 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10624 && validate_arg (arg1, COMPLEX_TYPE)
10625 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10626 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10627 break;
10628
10629 CASE_FLT_FN (BUILT_IN_LDEXP):
10630 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10631 CASE_FLT_FN (BUILT_IN_SCALBN):
10632 CASE_FLT_FN (BUILT_IN_SCALBLN):
10633 return fold_builtin_load_exponent (loc, arg0, arg1,
10634 type, /*ldexp=*/false);
10635
10636 CASE_FLT_FN (BUILT_IN_FREXP):
10637 return fold_builtin_frexp (loc, arg0, arg1, type);
10638
10639 CASE_FLT_FN (BUILT_IN_MODF):
10640 return fold_builtin_modf (loc, arg0, arg1, type);
10641
10642 case BUILT_IN_BZERO:
10643 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10644
10645 case BUILT_IN_FPUTS:
10646 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10647
10648 case BUILT_IN_FPUTS_UNLOCKED:
10649 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10650
10651 case BUILT_IN_STRSTR:
10652 return fold_builtin_strstr (loc, arg0, arg1, type);
10653
10654 case BUILT_IN_STRCAT:
10655 return fold_builtin_strcat (loc, arg0, arg1);
10656
10657 case BUILT_IN_STRSPN:
10658 return fold_builtin_strspn (loc, arg0, arg1);
10659
10660 case BUILT_IN_STRCSPN:
10661 return fold_builtin_strcspn (loc, arg0, arg1);
10662
10663 case BUILT_IN_STRCHR:
10664 case BUILT_IN_INDEX:
10665 return fold_builtin_strchr (loc, arg0, arg1, type);
10666
10667 case BUILT_IN_STRRCHR:
10668 case BUILT_IN_RINDEX:
10669 return fold_builtin_strrchr (loc, arg0, arg1, type);
10670
10671 case BUILT_IN_STRCPY:
10672 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10673
10674 case BUILT_IN_STPCPY:
10675 if (ignore)
10676 {
10677 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10678 if (!fn)
10679 break;
10680
10681 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10682 }
10683 else
10684 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10685 break;
10686
10687 case BUILT_IN_STRCMP:
10688 return fold_builtin_strcmp (loc, arg0, arg1);
10689
10690 case BUILT_IN_STRPBRK:
10691 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10692
10693 case BUILT_IN_EXPECT:
10694 return fold_builtin_expect (loc, arg0, arg1);
10695
10696 CASE_FLT_FN (BUILT_IN_POW):
10697 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10698
10699 CASE_FLT_FN (BUILT_IN_POWI):
10700 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10701
10702 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10703 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10704
10705 CASE_FLT_FN (BUILT_IN_FMIN):
10706 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10707
10708 CASE_FLT_FN (BUILT_IN_FMAX):
10709 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10710
10711 case BUILT_IN_ISGREATER:
10712 return fold_builtin_unordered_cmp (loc, fndecl,
10713 arg0, arg1, UNLE_EXPR, LE_EXPR);
10714 case BUILT_IN_ISGREATEREQUAL:
10715 return fold_builtin_unordered_cmp (loc, fndecl,
10716 arg0, arg1, UNLT_EXPR, LT_EXPR);
10717 case BUILT_IN_ISLESS:
10718 return fold_builtin_unordered_cmp (loc, fndecl,
10719 arg0, arg1, UNGE_EXPR, GE_EXPR);
10720 case BUILT_IN_ISLESSEQUAL:
10721 return fold_builtin_unordered_cmp (loc, fndecl,
10722 arg0, arg1, UNGT_EXPR, GT_EXPR);
10723 case BUILT_IN_ISLESSGREATER:
10724 return fold_builtin_unordered_cmp (loc, fndecl,
10725 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10726 case BUILT_IN_ISUNORDERED:
10727 return fold_builtin_unordered_cmp (loc, fndecl,
10728 arg0, arg1, UNORDERED_EXPR,
10729 NOP_EXPR);
10730
10731 /* We do the folding for va_start in the expander. */
10732 case BUILT_IN_VA_START:
10733 break;
10734
10735 case BUILT_IN_SPRINTF:
10736 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10737
10738 case BUILT_IN_OBJECT_SIZE:
10739 return fold_builtin_object_size (arg0, arg1);
10740
10741 case BUILT_IN_PRINTF:
10742 case BUILT_IN_PRINTF_UNLOCKED:
10743 case BUILT_IN_VPRINTF:
10744 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10745
10746 case BUILT_IN_PRINTF_CHK:
10747 case BUILT_IN_VPRINTF_CHK:
10748 if (!validate_arg (arg0, INTEGER_TYPE)
10749 || TREE_SIDE_EFFECTS (arg0))
10750 return NULL_TREE;
10751 else
10752 return fold_builtin_printf (loc, fndecl,
10753 arg1, NULL_TREE, ignore, fcode);
10754 break;
10755
10756 case BUILT_IN_FPRINTF:
10757 case BUILT_IN_FPRINTF_UNLOCKED:
10758 case BUILT_IN_VFPRINTF:
10759 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10760 ignore, fcode);
10761
10762 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10763 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10764
10765 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10766 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10767
10768 default:
10769 break;
10770 }
10771 return NULL_TREE;
10772 }
10773
10774 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10775 and ARG2. IGNORE is true if the result of the function call is ignored.
10776 This function returns NULL_TREE if no simplification was possible. */
10777
10778 static tree
10779 fold_builtin_3 (location_t loc, tree fndecl,
10780 tree arg0, tree arg1, tree arg2, bool ignore)
10781 {
10782 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10783 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10784 switch (fcode)
10785 {
10786
10787 CASE_FLT_FN (BUILT_IN_SINCOS):
10788 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10789
10790 CASE_FLT_FN (BUILT_IN_FMA):
10791 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10792 break;
10793
10794 CASE_FLT_FN (BUILT_IN_REMQUO):
10795 if (validate_arg (arg0, REAL_TYPE)
10796 && validate_arg(arg1, REAL_TYPE)
10797 && validate_arg(arg2, POINTER_TYPE))
10798 return do_mpfr_remquo (arg0, arg1, arg2);
10799 break;
10800
10801 case BUILT_IN_MEMSET:
10802 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10803
10804 case BUILT_IN_BCOPY:
10805 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10806 void_type_node, true, /*endp=*/3);
10807
10808 case BUILT_IN_MEMCPY:
10809 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10810 type, ignore, /*endp=*/0);
10811
10812 case BUILT_IN_MEMPCPY:
10813 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10814 type, ignore, /*endp=*/1);
10815
10816 case BUILT_IN_MEMMOVE:
10817 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10818 type, ignore, /*endp=*/3);
10819
10820 case BUILT_IN_STRNCAT:
10821 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10822
10823 case BUILT_IN_STRNCPY:
10824 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10825
10826 case BUILT_IN_STRNCMP:
10827 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10828
10829 case BUILT_IN_MEMCHR:
10830 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10831
10832 case BUILT_IN_BCMP:
10833 case BUILT_IN_MEMCMP:
10834 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10835
10836 case BUILT_IN_SPRINTF:
10837 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10838
10839 case BUILT_IN_SNPRINTF:
10840 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10841
10842 case BUILT_IN_STRCPY_CHK:
10843 case BUILT_IN_STPCPY_CHK:
10844 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10845 ignore, fcode);
10846
10847 case BUILT_IN_STRCAT_CHK:
10848 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10849
10850 case BUILT_IN_PRINTF_CHK:
10851 case BUILT_IN_VPRINTF_CHK:
10852 if (!validate_arg (arg0, INTEGER_TYPE)
10853 || TREE_SIDE_EFFECTS (arg0))
10854 return NULL_TREE;
10855 else
10856 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10857 break;
10858
10859 case BUILT_IN_FPRINTF:
10860 case BUILT_IN_FPRINTF_UNLOCKED:
10861 case BUILT_IN_VFPRINTF:
10862 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10863 ignore, fcode);
10864
10865 case BUILT_IN_FPRINTF_CHK:
10866 case BUILT_IN_VFPRINTF_CHK:
10867 if (!validate_arg (arg1, INTEGER_TYPE)
10868 || TREE_SIDE_EFFECTS (arg1))
10869 return NULL_TREE;
10870 else
10871 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10872 ignore, fcode);
10873
10874 default:
10875 break;
10876 }
10877 return NULL_TREE;
10878 }
10879
10880 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10881 ARG2, and ARG3. IGNORE is true if the result of the function call is
10882 ignored. This function returns NULL_TREE if no simplification was
10883 possible. */
10884
10885 static tree
10886 fold_builtin_4 (location_t loc, tree fndecl,
10887 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10888 {
10889 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10890
10891 switch (fcode)
10892 {
10893 case BUILT_IN_MEMCPY_CHK:
10894 case BUILT_IN_MEMPCPY_CHK:
10895 case BUILT_IN_MEMMOVE_CHK:
10896 case BUILT_IN_MEMSET_CHK:
10897 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10898 NULL_TREE, ignore,
10899 DECL_FUNCTION_CODE (fndecl));
10900
10901 case BUILT_IN_STRNCPY_CHK:
10902 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10903
10904 case BUILT_IN_STRNCAT_CHK:
10905 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10906
10907 case BUILT_IN_SNPRINTF:
10908 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
10909
10910 case BUILT_IN_FPRINTF_CHK:
10911 case BUILT_IN_VFPRINTF_CHK:
10912 if (!validate_arg (arg1, INTEGER_TYPE)
10913 || TREE_SIDE_EFFECTS (arg1))
10914 return NULL_TREE;
10915 else
10916 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10917 ignore, fcode);
10918 break;
10919
10920 default:
10921 break;
10922 }
10923 return NULL_TREE;
10924 }
10925
10926 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10927 arguments, where NARGS <= 4. IGNORE is true if the result of the
10928 function call is ignored. This function returns NULL_TREE if no
10929 simplification was possible. Note that this only folds builtins with
10930 fixed argument patterns. Foldings that do varargs-to-varargs
10931 transformations, or that match calls with more than 4 arguments,
10932 need to be handled with fold_builtin_varargs instead. */
10933
10934 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10935
10936 static tree
10937 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10938 {
10939 tree ret = NULL_TREE;
10940
10941 switch (nargs)
10942 {
10943 case 0:
10944 ret = fold_builtin_0 (loc, fndecl, ignore);
10945 break;
10946 case 1:
10947 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10948 break;
10949 case 2:
10950 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10951 break;
10952 case 3:
10953 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10954 break;
10955 case 4:
10956 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10957 ignore);
10958 break;
10959 default:
10960 break;
10961 }
10962 if (ret)
10963 {
10964 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10965 SET_EXPR_LOCATION (ret, loc);
10966 TREE_NO_WARNING (ret) = 1;
10967 return ret;
10968 }
10969 return NULL_TREE;
10970 }
10971
10972 /* Builtins with folding operations that operate on "..." arguments
10973 need special handling; we need to store the arguments in a convenient
10974 data structure before attempting any folding. Fortunately there are
10975 only a few builtins that fall into this category. FNDECL is the
10976 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10977 result of the function call is ignored. */
10978
10979 static tree
10980 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10981 bool ignore ATTRIBUTE_UNUSED)
10982 {
10983 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10984 tree ret = NULL_TREE;
10985
10986 switch (fcode)
10987 {
10988 case BUILT_IN_SPRINTF_CHK:
10989 case BUILT_IN_VSPRINTF_CHK:
10990 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10991 break;
10992
10993 case BUILT_IN_SNPRINTF_CHK:
10994 case BUILT_IN_VSNPRINTF_CHK:
10995 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10996 break;
10997
10998 case BUILT_IN_FPCLASSIFY:
10999 ret = fold_builtin_fpclassify (loc, exp);
11000 break;
11001
11002 default:
11003 break;
11004 }
11005 if (ret)
11006 {
11007 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11008 SET_EXPR_LOCATION (ret, loc);
11009 TREE_NO_WARNING (ret) = 1;
11010 return ret;
11011 }
11012 return NULL_TREE;
11013 }
11014
11015 /* Return true if FNDECL shouldn't be folded right now.
11016 If a built-in function has an inline attribute always_inline
11017 wrapper, defer folding it after always_inline functions have
11018 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11019 might not be performed. */
11020
11021 bool
11022 avoid_folding_inline_builtin (tree fndecl)
11023 {
11024 return (DECL_DECLARED_INLINE_P (fndecl)
11025 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11026 && cfun
11027 && !cfun->always_inline_functions_inlined
11028 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11029 }
11030
11031 /* A wrapper function for builtin folding that prevents warnings for
11032 "statement without effect" and the like, caused by removing the
11033 call node earlier than the warning is generated. */
11034
11035 tree
11036 fold_call_expr (location_t loc, tree exp, bool ignore)
11037 {
11038 tree ret = NULL_TREE;
11039 tree fndecl = get_callee_fndecl (exp);
11040 if (fndecl
11041 && TREE_CODE (fndecl) == FUNCTION_DECL
11042 && DECL_BUILT_IN (fndecl)
11043 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11044 yet. Defer folding until we see all the arguments
11045 (after inlining). */
11046 && !CALL_EXPR_VA_ARG_PACK (exp))
11047 {
11048 int nargs = call_expr_nargs (exp);
11049
11050 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11051 instead last argument is __builtin_va_arg_pack (). Defer folding
11052 even in that case, until arguments are finalized. */
11053 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11054 {
11055 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11056 if (fndecl2
11057 && TREE_CODE (fndecl2) == FUNCTION_DECL
11058 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11059 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11060 return NULL_TREE;
11061 }
11062
11063 if (avoid_folding_inline_builtin (fndecl))
11064 return NULL_TREE;
11065
11066 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11067 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11068 CALL_EXPR_ARGP (exp), ignore);
11069 else
11070 {
11071 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11072 {
11073 tree *args = CALL_EXPR_ARGP (exp);
11074 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11075 }
11076 if (!ret)
11077 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11078 if (ret)
11079 return ret;
11080 }
11081 }
11082 return NULL_TREE;
11083 }
11084
11085 /* Conveniently construct a function call expression. FNDECL names the
11086 function to be called and N arguments are passed in the array
11087 ARGARRAY. */
11088
11089 tree
11090 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11091 {
11092 tree fntype = TREE_TYPE (fndecl);
11093 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11094
11095 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11096 }
11097
11098 /* Conveniently construct a function call expression. FNDECL names the
11099 function to be called and the arguments are passed in the vector
11100 VEC. */
11101
11102 tree
11103 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
11104 {
11105 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
11106 VEC_address (tree, vec));
11107 }
11108
11109
11110 /* Conveniently construct a function call expression. FNDECL names the
11111 function to be called, N is the number of arguments, and the "..."
11112 parameters are the argument expressions. */
11113
11114 tree
11115 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11116 {
11117 va_list ap;
11118 tree *argarray = XALLOCAVEC (tree, n);
11119 int i;
11120
11121 va_start (ap, n);
11122 for (i = 0; i < n; i++)
11123 argarray[i] = va_arg (ap, tree);
11124 va_end (ap);
11125 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11126 }
11127
11128 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11129 varargs macros aren't supported by all bootstrap compilers. */
11130
11131 tree
11132 build_call_expr (tree fndecl, int n, ...)
11133 {
11134 va_list ap;
11135 tree *argarray = XALLOCAVEC (tree, n);
11136 int i;
11137
11138 va_start (ap, n);
11139 for (i = 0; i < n; i++)
11140 argarray[i] = va_arg (ap, tree);
11141 va_end (ap);
11142 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11143 }
11144
11145 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11146 N arguments are passed in the array ARGARRAY. */
11147
11148 tree
11149 fold_builtin_call_array (location_t loc, tree type,
11150 tree fn,
11151 int n,
11152 tree *argarray)
11153 {
11154 tree ret = NULL_TREE;
11155 tree exp;
11156
11157 if (TREE_CODE (fn) == ADDR_EXPR)
11158 {
11159 tree fndecl = TREE_OPERAND (fn, 0);
11160 if (TREE_CODE (fndecl) == FUNCTION_DECL
11161 && DECL_BUILT_IN (fndecl))
11162 {
11163 /* If last argument is __builtin_va_arg_pack (), arguments to this
11164 function are not finalized yet. Defer folding until they are. */
11165 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11166 {
11167 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11168 if (fndecl2
11169 && TREE_CODE (fndecl2) == FUNCTION_DECL
11170 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11171 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11172 return build_call_array_loc (loc, type, fn, n, argarray);
11173 }
11174 if (avoid_folding_inline_builtin (fndecl))
11175 return build_call_array_loc (loc, type, fn, n, argarray);
11176 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11177 {
11178 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11179 if (ret)
11180 return ret;
11181
11182 return build_call_array_loc (loc, type, fn, n, argarray);
11183 }
11184 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11185 {
11186 /* First try the transformations that don't require consing up
11187 an exp. */
11188 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11189 if (ret)
11190 return ret;
11191 }
11192
11193 /* If we got this far, we need to build an exp. */
11194 exp = build_call_array_loc (loc, type, fn, n, argarray);
11195 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11196 return ret ? ret : exp;
11197 }
11198 }
11199
11200 return build_call_array_loc (loc, type, fn, n, argarray);
11201 }
11202
11203 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11204 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11205 of arguments in ARGS to be omitted. OLDNARGS is the number of
11206 elements in ARGS. */
11207
11208 static tree
11209 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11210 int skip, tree fndecl, int n, va_list newargs)
11211 {
11212 int nargs = oldnargs - skip + n;
11213 tree *buffer;
11214
11215 if (n > 0)
11216 {
11217 int i, j;
11218
11219 buffer = XALLOCAVEC (tree, nargs);
11220 for (i = 0; i < n; i++)
11221 buffer[i] = va_arg (newargs, tree);
11222 for (j = skip; j < oldnargs; j++, i++)
11223 buffer[i] = args[j];
11224 }
11225 else
11226 buffer = args + skip;
11227
11228 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11229 }
11230
11231 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11232 list ARGS along with N new arguments specified as the "..."
11233 parameters. SKIP is the number of arguments in ARGS to be omitted.
11234 OLDNARGS is the number of elements in ARGS. */
11235
11236 static tree
11237 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11238 int skip, tree fndecl, int n, ...)
11239 {
11240 va_list ap;
11241 tree t;
11242
11243 va_start (ap, n);
11244 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11245 va_end (ap);
11246
11247 return t;
11248 }
11249
11250 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11251 along with N new arguments specified as the "..." parameters. SKIP
11252 is the number of arguments in EXP to be omitted. This function is used
11253 to do varargs-to-varargs transformations. */
11254
11255 static tree
11256 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11257 {
11258 va_list ap;
11259 tree t;
11260
11261 va_start (ap, n);
11262 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11263 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11264 va_end (ap);
11265
11266 return t;
11267 }
11268
11269 /* Validate a single argument ARG against a tree code CODE representing
11270 a type. */
11271
11272 static bool
11273 validate_arg (const_tree arg, enum tree_code code)
11274 {
11275 if (!arg)
11276 return false;
11277 else if (code == POINTER_TYPE)
11278 return POINTER_TYPE_P (TREE_TYPE (arg));
11279 else if (code == INTEGER_TYPE)
11280 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11281 return code == TREE_CODE (TREE_TYPE (arg));
11282 }
11283
11284 /* This function validates the types of a function call argument list
11285 against a specified list of tree_codes. If the last specifier is a 0,
11286 that represents an ellipses, otherwise the last specifier must be a
11287 VOID_TYPE.
11288
11289 This is the GIMPLE version of validate_arglist. Eventually we want to
11290 completely convert builtins.c to work from GIMPLEs and the tree based
11291 validate_arglist will then be removed. */
11292
11293 bool
11294 validate_gimple_arglist (const_gimple call, ...)
11295 {
11296 enum tree_code code;
11297 bool res = 0;
11298 va_list ap;
11299 const_tree arg;
11300 size_t i;
11301
11302 va_start (ap, call);
11303 i = 0;
11304
11305 do
11306 {
11307 code = (enum tree_code) va_arg (ap, int);
11308 switch (code)
11309 {
11310 case 0:
11311 /* This signifies an ellipses, any further arguments are all ok. */
11312 res = true;
11313 goto end;
11314 case VOID_TYPE:
11315 /* This signifies an endlink, if no arguments remain, return
11316 true, otherwise return false. */
11317 res = (i == gimple_call_num_args (call));
11318 goto end;
11319 default:
11320 /* If no parameters remain or the parameter's code does not
11321 match the specified code, return false. Otherwise continue
11322 checking any remaining arguments. */
11323 arg = gimple_call_arg (call, i++);
11324 if (!validate_arg (arg, code))
11325 goto end;
11326 break;
11327 }
11328 }
11329 while (1);
11330
11331 /* We need gotos here since we can only have one VA_CLOSE in a
11332 function. */
11333 end: ;
11334 va_end (ap);
11335
11336 return res;
11337 }
11338
11339 /* This function validates the types of a function call argument list
11340 against a specified list of tree_codes. If the last specifier is a 0,
11341 that represents an ellipses, otherwise the last specifier must be a
11342 VOID_TYPE. */
11343
11344 bool
11345 validate_arglist (const_tree callexpr, ...)
11346 {
11347 enum tree_code code;
11348 bool res = 0;
11349 va_list ap;
11350 const_call_expr_arg_iterator iter;
11351 const_tree arg;
11352
11353 va_start (ap, callexpr);
11354 init_const_call_expr_arg_iterator (callexpr, &iter);
11355
11356 do
11357 {
11358 code = (enum tree_code) va_arg (ap, int);
11359 switch (code)
11360 {
11361 case 0:
11362 /* This signifies an ellipses, any further arguments are all ok. */
11363 res = true;
11364 goto end;
11365 case VOID_TYPE:
11366 /* This signifies an endlink, if no arguments remain, return
11367 true, otherwise return false. */
11368 res = !more_const_call_expr_args_p (&iter);
11369 goto end;
11370 default:
11371 /* If no parameters remain or the parameter's code does not
11372 match the specified code, return false. Otherwise continue
11373 checking any remaining arguments. */
11374 arg = next_const_call_expr_arg (&iter);
11375 if (!validate_arg (arg, code))
11376 goto end;
11377 break;
11378 }
11379 }
11380 while (1);
11381
11382 /* We need gotos here since we can only have one VA_CLOSE in a
11383 function. */
11384 end: ;
11385 va_end (ap);
11386
11387 return res;
11388 }
11389
11390 /* Default target-specific builtin expander that does nothing. */
11391
11392 rtx
11393 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11394 rtx target ATTRIBUTE_UNUSED,
11395 rtx subtarget ATTRIBUTE_UNUSED,
11396 enum machine_mode mode ATTRIBUTE_UNUSED,
11397 int ignore ATTRIBUTE_UNUSED)
11398 {
11399 return NULL_RTX;
11400 }
11401
11402 /* Returns true is EXP represents data that would potentially reside
11403 in a readonly section. */
11404
11405 static bool
11406 readonly_data_expr (tree exp)
11407 {
11408 STRIP_NOPS (exp);
11409
11410 if (TREE_CODE (exp) != ADDR_EXPR)
11411 return false;
11412
11413 exp = get_base_address (TREE_OPERAND (exp, 0));
11414 if (!exp)
11415 return false;
11416
11417 /* Make sure we call decl_readonly_section only for trees it
11418 can handle (since it returns true for everything it doesn't
11419 understand). */
11420 if (TREE_CODE (exp) == STRING_CST
11421 || TREE_CODE (exp) == CONSTRUCTOR
11422 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11423 return decl_readonly_section (exp, 0);
11424 else
11425 return false;
11426 }
11427
11428 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11429 to the call, and TYPE is its return type.
11430
11431 Return NULL_TREE if no simplification was possible, otherwise return the
11432 simplified form of the call as a tree.
11433
11434 The simplified form may be a constant or other expression which
11435 computes the same value, but in a more efficient manner (including
11436 calls to other builtin functions).
11437
11438 The call may contain arguments which need to be evaluated, but
11439 which are not useful to determine the result of the call. In
11440 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11441 COMPOUND_EXPR will be an argument which must be evaluated.
11442 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11443 COMPOUND_EXPR in the chain will contain the tree for the simplified
11444 form of the builtin function call. */
11445
11446 static tree
11447 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11448 {
11449 if (!validate_arg (s1, POINTER_TYPE)
11450 || !validate_arg (s2, POINTER_TYPE))
11451 return NULL_TREE;
11452 else
11453 {
11454 tree fn;
11455 const char *p1, *p2;
11456
11457 p2 = c_getstr (s2);
11458 if (p2 == NULL)
11459 return NULL_TREE;
11460
11461 p1 = c_getstr (s1);
11462 if (p1 != NULL)
11463 {
11464 const char *r = strstr (p1, p2);
11465 tree tem;
11466
11467 if (r == NULL)
11468 return build_int_cst (TREE_TYPE (s1), 0);
11469
11470 /* Return an offset into the constant string argument. */
11471 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11472 return fold_convert_loc (loc, type, tem);
11473 }
11474
11475 /* The argument is const char *, and the result is char *, so we need
11476 a type conversion here to avoid a warning. */
11477 if (p2[0] == '\0')
11478 return fold_convert_loc (loc, type, s1);
11479
11480 if (p2[1] != '\0')
11481 return NULL_TREE;
11482
11483 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11484 if (!fn)
11485 return NULL_TREE;
11486
11487 /* New argument list transforming strstr(s1, s2) to
11488 strchr(s1, s2[0]). */
11489 return build_call_expr_loc (loc, fn, 2, s1,
11490 build_int_cst (integer_type_node, p2[0]));
11491 }
11492 }
11493
11494 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11495 the call, and TYPE is its return type.
11496
11497 Return NULL_TREE if no simplification was possible, otherwise return the
11498 simplified form of the call as a tree.
11499
11500 The simplified form may be a constant or other expression which
11501 computes the same value, but in a more efficient manner (including
11502 calls to other builtin functions).
11503
11504 The call may contain arguments which need to be evaluated, but
11505 which are not useful to determine the result of the call. In
11506 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11507 COMPOUND_EXPR will be an argument which must be evaluated.
11508 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11509 COMPOUND_EXPR in the chain will contain the tree for the simplified
11510 form of the builtin function call. */
11511
11512 static tree
11513 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11514 {
11515 if (!validate_arg (s1, POINTER_TYPE)
11516 || !validate_arg (s2, INTEGER_TYPE))
11517 return NULL_TREE;
11518 else
11519 {
11520 const char *p1;
11521
11522 if (TREE_CODE (s2) != INTEGER_CST)
11523 return NULL_TREE;
11524
11525 p1 = c_getstr (s1);
11526 if (p1 != NULL)
11527 {
11528 char c;
11529 const char *r;
11530 tree tem;
11531
11532 if (target_char_cast (s2, &c))
11533 return NULL_TREE;
11534
11535 r = strchr (p1, c);
11536
11537 if (r == NULL)
11538 return build_int_cst (TREE_TYPE (s1), 0);
11539
11540 /* Return an offset into the constant string argument. */
11541 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11542 return fold_convert_loc (loc, type, tem);
11543 }
11544 return NULL_TREE;
11545 }
11546 }
11547
11548 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11549 the call, and TYPE is its return type.
11550
11551 Return NULL_TREE if no simplification was possible, otherwise return the
11552 simplified form of the call as a tree.
11553
11554 The simplified form may be a constant or other expression which
11555 computes the same value, but in a more efficient manner (including
11556 calls to other builtin functions).
11557
11558 The call may contain arguments which need to be evaluated, but
11559 which are not useful to determine the result of the call. In
11560 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11561 COMPOUND_EXPR will be an argument which must be evaluated.
11562 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11563 COMPOUND_EXPR in the chain will contain the tree for the simplified
11564 form of the builtin function call. */
11565
11566 static tree
11567 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11568 {
11569 if (!validate_arg (s1, POINTER_TYPE)
11570 || !validate_arg (s2, INTEGER_TYPE))
11571 return NULL_TREE;
11572 else
11573 {
11574 tree fn;
11575 const char *p1;
11576
11577 if (TREE_CODE (s2) != INTEGER_CST)
11578 return NULL_TREE;
11579
11580 p1 = c_getstr (s1);
11581 if (p1 != NULL)
11582 {
11583 char c;
11584 const char *r;
11585 tree tem;
11586
11587 if (target_char_cast (s2, &c))
11588 return NULL_TREE;
11589
11590 r = strrchr (p1, c);
11591
11592 if (r == NULL)
11593 return build_int_cst (TREE_TYPE (s1), 0);
11594
11595 /* Return an offset into the constant string argument. */
11596 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11597 return fold_convert_loc (loc, type, tem);
11598 }
11599
11600 if (! integer_zerop (s2))
11601 return NULL_TREE;
11602
11603 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11604 if (!fn)
11605 return NULL_TREE;
11606
11607 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11608 return build_call_expr_loc (loc, fn, 2, s1, s2);
11609 }
11610 }
11611
11612 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11613 to the call, and TYPE is its return type.
11614
11615 Return NULL_TREE if no simplification was possible, otherwise return the
11616 simplified form of the call as a tree.
11617
11618 The simplified form may be a constant or other expression which
11619 computes the same value, but in a more efficient manner (including
11620 calls to other builtin functions).
11621
11622 The call may contain arguments which need to be evaluated, but
11623 which are not useful to determine the result of the call. In
11624 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11625 COMPOUND_EXPR will be an argument which must be evaluated.
11626 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11627 COMPOUND_EXPR in the chain will contain the tree for the simplified
11628 form of the builtin function call. */
11629
11630 static tree
11631 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11632 {
11633 if (!validate_arg (s1, POINTER_TYPE)
11634 || !validate_arg (s2, POINTER_TYPE))
11635 return NULL_TREE;
11636 else
11637 {
11638 tree fn;
11639 const char *p1, *p2;
11640
11641 p2 = c_getstr (s2);
11642 if (p2 == NULL)
11643 return NULL_TREE;
11644
11645 p1 = c_getstr (s1);
11646 if (p1 != NULL)
11647 {
11648 const char *r = strpbrk (p1, p2);
11649 tree tem;
11650
11651 if (r == NULL)
11652 return build_int_cst (TREE_TYPE (s1), 0);
11653
11654 /* Return an offset into the constant string argument. */
11655 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11656 return fold_convert_loc (loc, type, tem);
11657 }
11658
11659 if (p2[0] == '\0')
11660 /* strpbrk(x, "") == NULL.
11661 Evaluate and ignore s1 in case it had side-effects. */
11662 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11663
11664 if (p2[1] != '\0')
11665 return NULL_TREE; /* Really call strpbrk. */
11666
11667 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11668 if (!fn)
11669 return NULL_TREE;
11670
11671 /* New argument list transforming strpbrk(s1, s2) to
11672 strchr(s1, s2[0]). */
11673 return build_call_expr_loc (loc, fn, 2, s1,
11674 build_int_cst (integer_type_node, p2[0]));
11675 }
11676 }
11677
11678 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11679 to the call.
11680
11681 Return NULL_TREE if no simplification was possible, otherwise return the
11682 simplified form of the call as a tree.
11683
11684 The simplified form may be a constant or other expression which
11685 computes the same value, but in a more efficient manner (including
11686 calls to other builtin functions).
11687
11688 The call may contain arguments which need to be evaluated, but
11689 which are not useful to determine the result of the call. In
11690 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11691 COMPOUND_EXPR will be an argument which must be evaluated.
11692 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11693 COMPOUND_EXPR in the chain will contain the tree for the simplified
11694 form of the builtin function call. */
11695
11696 static tree
11697 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11698 {
11699 if (!validate_arg (dst, POINTER_TYPE)
11700 || !validate_arg (src, POINTER_TYPE))
11701 return NULL_TREE;
11702 else
11703 {
11704 const char *p = c_getstr (src);
11705
11706 /* If the string length is zero, return the dst parameter. */
11707 if (p && *p == '\0')
11708 return dst;
11709
11710 if (optimize_insn_for_speed_p ())
11711 {
11712 /* See if we can store by pieces into (dst + strlen(dst)). */
11713 tree newdst, call;
11714 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11715 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11716
11717 if (!strlen_fn || !strcpy_fn)
11718 return NULL_TREE;
11719
11720 /* If we don't have a movstr we don't want to emit an strcpy
11721 call. We have to do that if the length of the source string
11722 isn't computable (in that case we can use memcpy probably
11723 later expanding to a sequence of mov instructions). If we
11724 have movstr instructions we can emit strcpy calls. */
11725 if (!HAVE_movstr)
11726 {
11727 tree len = c_strlen (src, 1);
11728 if (! len || TREE_SIDE_EFFECTS (len))
11729 return NULL_TREE;
11730 }
11731
11732 /* Stabilize the argument list. */
11733 dst = builtin_save_expr (dst);
11734
11735 /* Create strlen (dst). */
11736 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11737 /* Create (dst p+ strlen (dst)). */
11738
11739 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11740 newdst = builtin_save_expr (newdst);
11741
11742 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11743 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11744 }
11745 return NULL_TREE;
11746 }
11747 }
11748
11749 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11750 arguments to the call.
11751
11752 Return NULL_TREE if no simplification was possible, otherwise return the
11753 simplified form of the call as a tree.
11754
11755 The simplified form may be a constant or other expression which
11756 computes the same value, but in a more efficient manner (including
11757 calls to other builtin functions).
11758
11759 The call may contain arguments which need to be evaluated, but
11760 which are not useful to determine the result of the call. In
11761 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11762 COMPOUND_EXPR will be an argument which must be evaluated.
11763 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11764 COMPOUND_EXPR in the chain will contain the tree for the simplified
11765 form of the builtin function call. */
11766
11767 static tree
11768 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11769 {
11770 if (!validate_arg (dst, POINTER_TYPE)
11771 || !validate_arg (src, POINTER_TYPE)
11772 || !validate_arg (len, INTEGER_TYPE))
11773 return NULL_TREE;
11774 else
11775 {
11776 const char *p = c_getstr (src);
11777
11778 /* If the requested length is zero, or the src parameter string
11779 length is zero, return the dst parameter. */
11780 if (integer_zerop (len) || (p && *p == '\0'))
11781 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11782
11783 /* If the requested len is greater than or equal to the string
11784 length, call strcat. */
11785 if (TREE_CODE (len) == INTEGER_CST && p
11786 && compare_tree_int (len, strlen (p)) >= 0)
11787 {
11788 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11789
11790 /* If the replacement _DECL isn't initialized, don't do the
11791 transformation. */
11792 if (!fn)
11793 return NULL_TREE;
11794
11795 return build_call_expr_loc (loc, fn, 2, dst, src);
11796 }
11797 return NULL_TREE;
11798 }
11799 }
11800
11801 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11802 to the call.
11803
11804 Return NULL_TREE if no simplification was possible, otherwise return the
11805 simplified form of the call as a tree.
11806
11807 The simplified form may be a constant or other expression which
11808 computes the same value, but in a more efficient manner (including
11809 calls to other builtin functions).
11810
11811 The call may contain arguments which need to be evaluated, but
11812 which are not useful to determine the result of the call. In
11813 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11814 COMPOUND_EXPR will be an argument which must be evaluated.
11815 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11816 COMPOUND_EXPR in the chain will contain the tree for the simplified
11817 form of the builtin function call. */
11818
11819 static tree
11820 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11821 {
11822 if (!validate_arg (s1, POINTER_TYPE)
11823 || !validate_arg (s2, POINTER_TYPE))
11824 return NULL_TREE;
11825 else
11826 {
11827 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11828
11829 /* If both arguments are constants, evaluate at compile-time. */
11830 if (p1 && p2)
11831 {
11832 const size_t r = strspn (p1, p2);
11833 return size_int (r);
11834 }
11835
11836 /* If either argument is "", return NULL_TREE. */
11837 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11838 /* Evaluate and ignore both arguments in case either one has
11839 side-effects. */
11840 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11841 s1, s2);
11842 return NULL_TREE;
11843 }
11844 }
11845
11846 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11847 to the call.
11848
11849 Return NULL_TREE if no simplification was possible, otherwise return the
11850 simplified form of the call as a tree.
11851
11852 The simplified form may be a constant or other expression which
11853 computes the same value, but in a more efficient manner (including
11854 calls to other builtin functions).
11855
11856 The call may contain arguments which need to be evaluated, but
11857 which are not useful to determine the result of the call. In
11858 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11859 COMPOUND_EXPR will be an argument which must be evaluated.
11860 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11861 COMPOUND_EXPR in the chain will contain the tree for the simplified
11862 form of the builtin function call. */
11863
11864 static tree
11865 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11866 {
11867 if (!validate_arg (s1, POINTER_TYPE)
11868 || !validate_arg (s2, POINTER_TYPE))
11869 return NULL_TREE;
11870 else
11871 {
11872 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11873
11874 /* If both arguments are constants, evaluate at compile-time. */
11875 if (p1 && p2)
11876 {
11877 const size_t r = strcspn (p1, p2);
11878 return size_int (r);
11879 }
11880
11881 /* If the first argument is "", return NULL_TREE. */
11882 if (p1 && *p1 == '\0')
11883 {
11884 /* Evaluate and ignore argument s2 in case it has
11885 side-effects. */
11886 return omit_one_operand_loc (loc, size_type_node,
11887 size_zero_node, s2);
11888 }
11889
11890 /* If the second argument is "", return __builtin_strlen(s1). */
11891 if (p2 && *p2 == '\0')
11892 {
11893 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11894
11895 /* If the replacement _DECL isn't initialized, don't do the
11896 transformation. */
11897 if (!fn)
11898 return NULL_TREE;
11899
11900 return build_call_expr_loc (loc, fn, 1, s1);
11901 }
11902 return NULL_TREE;
11903 }
11904 }
11905
11906 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11907 to the call. IGNORE is true if the value returned
11908 by the builtin will be ignored. UNLOCKED is true is true if this
11909 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11910 the known length of the string. Return NULL_TREE if no simplification
11911 was possible. */
11912
11913 tree
11914 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11915 bool ignore, bool unlocked, tree len)
11916 {
11917 /* If we're using an unlocked function, assume the other unlocked
11918 functions exist explicitly. */
11919 tree const fn_fputc = (unlocked
11920 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
11921 : builtin_decl_implicit (BUILT_IN_FPUTC));
11922 tree const fn_fwrite = (unlocked
11923 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
11924 : builtin_decl_implicit (BUILT_IN_FWRITE));
11925
11926 /* If the return value is used, don't do the transformation. */
11927 if (!ignore)
11928 return NULL_TREE;
11929
11930 /* Verify the arguments in the original call. */
11931 if (!validate_arg (arg0, POINTER_TYPE)
11932 || !validate_arg (arg1, POINTER_TYPE))
11933 return NULL_TREE;
11934
11935 if (! len)
11936 len = c_strlen (arg0, 0);
11937
11938 /* Get the length of the string passed to fputs. If the length
11939 can't be determined, punt. */
11940 if (!len
11941 || TREE_CODE (len) != INTEGER_CST)
11942 return NULL_TREE;
11943
11944 switch (compare_tree_int (len, 1))
11945 {
11946 case -1: /* length is 0, delete the call entirely . */
11947 return omit_one_operand_loc (loc, integer_type_node,
11948 integer_zero_node, arg1);;
11949
11950 case 0: /* length is 1, call fputc. */
11951 {
11952 const char *p = c_getstr (arg0);
11953
11954 if (p != NULL)
11955 {
11956 if (fn_fputc)
11957 return build_call_expr_loc (loc, fn_fputc, 2,
11958 build_int_cst
11959 (integer_type_node, p[0]), arg1);
11960 else
11961 return NULL_TREE;
11962 }
11963 }
11964 /* FALLTHROUGH */
11965 case 1: /* length is greater than 1, call fwrite. */
11966 {
11967 /* If optimizing for size keep fputs. */
11968 if (optimize_function_for_size_p (cfun))
11969 return NULL_TREE;
11970 /* New argument list transforming fputs(string, stream) to
11971 fwrite(string, 1, len, stream). */
11972 if (fn_fwrite)
11973 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11974 size_one_node, len, arg1);
11975 else
11976 return NULL_TREE;
11977 }
11978 default:
11979 gcc_unreachable ();
11980 }
11981 return NULL_TREE;
11982 }
11983
11984 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11985 produced. False otherwise. This is done so that we don't output the error
11986 or warning twice or three times. */
11987
11988 bool
11989 fold_builtin_next_arg (tree exp, bool va_start_p)
11990 {
11991 tree fntype = TREE_TYPE (current_function_decl);
11992 int nargs = call_expr_nargs (exp);
11993 tree arg;
11994
11995 if (!stdarg_p (fntype))
11996 {
11997 error ("%<va_start%> used in function with fixed args");
11998 return true;
11999 }
12000
12001 if (va_start_p)
12002 {
12003 if (va_start_p && (nargs != 2))
12004 {
12005 error ("wrong number of arguments to function %<va_start%>");
12006 return true;
12007 }
12008 arg = CALL_EXPR_ARG (exp, 1);
12009 }
12010 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12011 when we checked the arguments and if needed issued a warning. */
12012 else
12013 {
12014 if (nargs == 0)
12015 {
12016 /* Evidently an out of date version of <stdarg.h>; can't validate
12017 va_start's second argument, but can still work as intended. */
12018 warning (0, "%<__builtin_next_arg%> called without an argument");
12019 return true;
12020 }
12021 else if (nargs > 1)
12022 {
12023 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12024 return true;
12025 }
12026 arg = CALL_EXPR_ARG (exp, 0);
12027 }
12028
12029 if (TREE_CODE (arg) == SSA_NAME)
12030 arg = SSA_NAME_VAR (arg);
12031
12032 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12033 or __builtin_next_arg (0) the first time we see it, after checking
12034 the arguments and if needed issuing a warning. */
12035 if (!integer_zerop (arg))
12036 {
12037 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12038
12039 /* Strip off all nops for the sake of the comparison. This
12040 is not quite the same as STRIP_NOPS. It does more.
12041 We must also strip off INDIRECT_EXPR for C++ reference
12042 parameters. */
12043 while (CONVERT_EXPR_P (arg)
12044 || TREE_CODE (arg) == INDIRECT_REF)
12045 arg = TREE_OPERAND (arg, 0);
12046 if (arg != last_parm)
12047 {
12048 /* FIXME: Sometimes with the tree optimizers we can get the
12049 not the last argument even though the user used the last
12050 argument. We just warn and set the arg to be the last
12051 argument so that we will get wrong-code because of
12052 it. */
12053 warning (0, "second parameter of %<va_start%> not last named argument");
12054 }
12055
12056 /* Undefined by C99 7.15.1.4p4 (va_start):
12057 "If the parameter parmN is declared with the register storage
12058 class, with a function or array type, or with a type that is
12059 not compatible with the type that results after application of
12060 the default argument promotions, the behavior is undefined."
12061 */
12062 else if (DECL_REGISTER (arg))
12063 warning (0, "undefined behaviour when second parameter of "
12064 "%<va_start%> is declared with %<register%> storage");
12065
12066 /* We want to verify the second parameter just once before the tree
12067 optimizers are run and then avoid keeping it in the tree,
12068 as otherwise we could warn even for correct code like:
12069 void foo (int i, ...)
12070 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12071 if (va_start_p)
12072 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12073 else
12074 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12075 }
12076 return false;
12077 }
12078
12079
12080 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12081 ORIG may be null if this is a 2-argument call. We don't attempt to
12082 simplify calls with more than 3 arguments.
12083
12084 Return NULL_TREE if no simplification was possible, otherwise return the
12085 simplified form of the call as a tree. If IGNORED is true, it means that
12086 the caller does not use the returned value of the function. */
12087
12088 static tree
12089 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12090 tree orig, int ignored)
12091 {
12092 tree call, retval;
12093 const char *fmt_str = NULL;
12094
12095 /* Verify the required arguments in the original call. We deal with two
12096 types of sprintf() calls: 'sprintf (str, fmt)' and
12097 'sprintf (dest, "%s", orig)'. */
12098 if (!validate_arg (dest, POINTER_TYPE)
12099 || !validate_arg (fmt, POINTER_TYPE))
12100 return NULL_TREE;
12101 if (orig && !validate_arg (orig, POINTER_TYPE))
12102 return NULL_TREE;
12103
12104 /* Check whether the format is a literal string constant. */
12105 fmt_str = c_getstr (fmt);
12106 if (fmt_str == NULL)
12107 return NULL_TREE;
12108
12109 call = NULL_TREE;
12110 retval = NULL_TREE;
12111
12112 if (!init_target_chars ())
12113 return NULL_TREE;
12114
12115 /* If the format doesn't contain % args or %%, use strcpy. */
12116 if (strchr (fmt_str, target_percent) == NULL)
12117 {
12118 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12119
12120 if (!fn)
12121 return NULL_TREE;
12122
12123 /* Don't optimize sprintf (buf, "abc", ptr++). */
12124 if (orig)
12125 return NULL_TREE;
12126
12127 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12128 'format' is known to contain no % formats. */
12129 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12130 if (!ignored)
12131 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12132 }
12133
12134 /* If the format is "%s", use strcpy if the result isn't used. */
12135 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12136 {
12137 tree fn;
12138 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12139
12140 if (!fn)
12141 return NULL_TREE;
12142
12143 /* Don't crash on sprintf (str1, "%s"). */
12144 if (!orig)
12145 return NULL_TREE;
12146
12147 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12148 if (!ignored)
12149 {
12150 retval = c_strlen (orig, 1);
12151 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12152 return NULL_TREE;
12153 }
12154 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12155 }
12156
12157 if (call && retval)
12158 {
12159 retval = fold_convert_loc
12160 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12161 retval);
12162 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12163 }
12164 else
12165 return call;
12166 }
12167
12168 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12169 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12170 attempt to simplify calls with more than 4 arguments.
12171
12172 Return NULL_TREE if no simplification was possible, otherwise return the
12173 simplified form of the call as a tree. If IGNORED is true, it means that
12174 the caller does not use the returned value of the function. */
12175
12176 static tree
12177 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12178 tree orig, int ignored)
12179 {
12180 tree call, retval;
12181 const char *fmt_str = NULL;
12182 unsigned HOST_WIDE_INT destlen;
12183
12184 /* Verify the required arguments in the original call. We deal with two
12185 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12186 'snprintf (dest, cst, "%s", orig)'. */
12187 if (!validate_arg (dest, POINTER_TYPE)
12188 || !validate_arg (destsize, INTEGER_TYPE)
12189 || !validate_arg (fmt, POINTER_TYPE))
12190 return NULL_TREE;
12191 if (orig && !validate_arg (orig, POINTER_TYPE))
12192 return NULL_TREE;
12193
12194 if (!host_integerp (destsize, 1))
12195 return NULL_TREE;
12196
12197 /* Check whether the format is a literal string constant. */
12198 fmt_str = c_getstr (fmt);
12199 if (fmt_str == NULL)
12200 return NULL_TREE;
12201
12202 call = NULL_TREE;
12203 retval = NULL_TREE;
12204
12205 if (!init_target_chars ())
12206 return NULL_TREE;
12207
12208 destlen = tree_low_cst (destsize, 1);
12209
12210 /* If the format doesn't contain % args or %%, use strcpy. */
12211 if (strchr (fmt_str, target_percent) == NULL)
12212 {
12213 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12214 size_t len = strlen (fmt_str);
12215
12216 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12217 if (orig)
12218 return NULL_TREE;
12219
12220 /* We could expand this as
12221 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12222 or to
12223 memcpy (str, fmt_with_nul_at_cstm1, cst);
12224 but in the former case that might increase code size
12225 and in the latter case grow .rodata section too much.
12226 So punt for now. */
12227 if (len >= destlen)
12228 return NULL_TREE;
12229
12230 if (!fn)
12231 return NULL_TREE;
12232
12233 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12234 'format' is known to contain no % formats and
12235 strlen (fmt) < cst. */
12236 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12237
12238 if (!ignored)
12239 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12240 }
12241
12242 /* If the format is "%s", use strcpy if the result isn't used. */
12243 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12244 {
12245 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12246 unsigned HOST_WIDE_INT origlen;
12247
12248 /* Don't crash on snprintf (str1, cst, "%s"). */
12249 if (!orig)
12250 return NULL_TREE;
12251
12252 retval = c_strlen (orig, 1);
12253 if (!retval || !host_integerp (retval, 1))
12254 return NULL_TREE;
12255
12256 origlen = tree_low_cst (retval, 1);
12257 /* We could expand this as
12258 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12259 or to
12260 memcpy (str1, str2_with_nul_at_cstm1, cst);
12261 but in the former case that might increase code size
12262 and in the latter case grow .rodata section too much.
12263 So punt for now. */
12264 if (origlen >= destlen)
12265 return NULL_TREE;
12266
12267 /* Convert snprintf (str1, cst, "%s", str2) into
12268 strcpy (str1, str2) if strlen (str2) < cst. */
12269 if (!fn)
12270 return NULL_TREE;
12271
12272 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12273
12274 if (ignored)
12275 retval = NULL_TREE;
12276 }
12277
12278 if (call && retval)
12279 {
12280 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12281 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12282 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12283 }
12284 else
12285 return call;
12286 }
12287
12288 /* Expand a call EXP to __builtin_object_size. */
12289
12290 rtx
12291 expand_builtin_object_size (tree exp)
12292 {
12293 tree ost;
12294 int object_size_type;
12295 tree fndecl = get_callee_fndecl (exp);
12296
12297 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12298 {
12299 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12300 exp, fndecl);
12301 expand_builtin_trap ();
12302 return const0_rtx;
12303 }
12304
12305 ost = CALL_EXPR_ARG (exp, 1);
12306 STRIP_NOPS (ost);
12307
12308 if (TREE_CODE (ost) != INTEGER_CST
12309 || tree_int_cst_sgn (ost) < 0
12310 || compare_tree_int (ost, 3) > 0)
12311 {
12312 error ("%Klast argument of %D is not integer constant between 0 and 3",
12313 exp, fndecl);
12314 expand_builtin_trap ();
12315 return const0_rtx;
12316 }
12317
12318 object_size_type = tree_low_cst (ost, 0);
12319
12320 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12321 }
12322
12323 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12324 FCODE is the BUILT_IN_* to use.
12325 Return NULL_RTX if we failed; the caller should emit a normal call,
12326 otherwise try to get the result in TARGET, if convenient (and in
12327 mode MODE if that's convenient). */
12328
12329 static rtx
12330 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12331 enum built_in_function fcode)
12332 {
12333 tree dest, src, len, size;
12334
12335 if (!validate_arglist (exp,
12336 POINTER_TYPE,
12337 fcode == BUILT_IN_MEMSET_CHK
12338 ? INTEGER_TYPE : POINTER_TYPE,
12339 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12340 return NULL_RTX;
12341
12342 dest = CALL_EXPR_ARG (exp, 0);
12343 src = CALL_EXPR_ARG (exp, 1);
12344 len = CALL_EXPR_ARG (exp, 2);
12345 size = CALL_EXPR_ARG (exp, 3);
12346
12347 if (! host_integerp (size, 1))
12348 return NULL_RTX;
12349
12350 if (host_integerp (len, 1) || integer_all_onesp (size))
12351 {
12352 tree fn;
12353
12354 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12355 {
12356 warning_at (tree_nonartificial_location (exp),
12357 0, "%Kcall to %D will always overflow destination buffer",
12358 exp, get_callee_fndecl (exp));
12359 return NULL_RTX;
12360 }
12361
12362 fn = NULL_TREE;
12363 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12364 mem{cpy,pcpy,move,set} is available. */
12365 switch (fcode)
12366 {
12367 case BUILT_IN_MEMCPY_CHK:
12368 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12369 break;
12370 case BUILT_IN_MEMPCPY_CHK:
12371 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12372 break;
12373 case BUILT_IN_MEMMOVE_CHK:
12374 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12375 break;
12376 case BUILT_IN_MEMSET_CHK:
12377 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12378 break;
12379 default:
12380 break;
12381 }
12382
12383 if (! fn)
12384 return NULL_RTX;
12385
12386 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12387 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12388 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12389 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12390 }
12391 else if (fcode == BUILT_IN_MEMSET_CHK)
12392 return NULL_RTX;
12393 else
12394 {
12395 unsigned int dest_align = get_pointer_alignment (dest);
12396
12397 /* If DEST is not a pointer type, call the normal function. */
12398 if (dest_align == 0)
12399 return NULL_RTX;
12400
12401 /* If SRC and DEST are the same (and not volatile), do nothing. */
12402 if (operand_equal_p (src, dest, 0))
12403 {
12404 tree expr;
12405
12406 if (fcode != BUILT_IN_MEMPCPY_CHK)
12407 {
12408 /* Evaluate and ignore LEN in case it has side-effects. */
12409 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12410 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12411 }
12412
12413 expr = fold_build_pointer_plus (dest, len);
12414 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12415 }
12416
12417 /* __memmove_chk special case. */
12418 if (fcode == BUILT_IN_MEMMOVE_CHK)
12419 {
12420 unsigned int src_align = get_pointer_alignment (src);
12421
12422 if (src_align == 0)
12423 return NULL_RTX;
12424
12425 /* If src is categorized for a readonly section we can use
12426 normal __memcpy_chk. */
12427 if (readonly_data_expr (src))
12428 {
12429 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12430 if (!fn)
12431 return NULL_RTX;
12432 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12433 dest, src, len, size);
12434 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12435 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12436 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12437 }
12438 }
12439 return NULL_RTX;
12440 }
12441 }
12442
12443 /* Emit warning if a buffer overflow is detected at compile time. */
12444
12445 static void
12446 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12447 {
12448 int is_strlen = 0;
12449 tree len, size;
12450 location_t loc = tree_nonartificial_location (exp);
12451
12452 switch (fcode)
12453 {
12454 case BUILT_IN_STRCPY_CHK:
12455 case BUILT_IN_STPCPY_CHK:
12456 /* For __strcat_chk the warning will be emitted only if overflowing
12457 by at least strlen (dest) + 1 bytes. */
12458 case BUILT_IN_STRCAT_CHK:
12459 len = CALL_EXPR_ARG (exp, 1);
12460 size = CALL_EXPR_ARG (exp, 2);
12461 is_strlen = 1;
12462 break;
12463 case BUILT_IN_STRNCAT_CHK:
12464 case BUILT_IN_STRNCPY_CHK:
12465 len = CALL_EXPR_ARG (exp, 2);
12466 size = CALL_EXPR_ARG (exp, 3);
12467 break;
12468 case BUILT_IN_SNPRINTF_CHK:
12469 case BUILT_IN_VSNPRINTF_CHK:
12470 len = CALL_EXPR_ARG (exp, 1);
12471 size = CALL_EXPR_ARG (exp, 3);
12472 break;
12473 default:
12474 gcc_unreachable ();
12475 }
12476
12477 if (!len || !size)
12478 return;
12479
12480 if (! host_integerp (size, 1) || integer_all_onesp (size))
12481 return;
12482
12483 if (is_strlen)
12484 {
12485 len = c_strlen (len, 1);
12486 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12487 return;
12488 }
12489 else if (fcode == BUILT_IN_STRNCAT_CHK)
12490 {
12491 tree src = CALL_EXPR_ARG (exp, 1);
12492 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12493 return;
12494 src = c_strlen (src, 1);
12495 if (! src || ! host_integerp (src, 1))
12496 {
12497 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12498 exp, get_callee_fndecl (exp));
12499 return;
12500 }
12501 else if (tree_int_cst_lt (src, size))
12502 return;
12503 }
12504 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12505 return;
12506
12507 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12508 exp, get_callee_fndecl (exp));
12509 }
12510
12511 /* Emit warning if a buffer overflow is detected at compile time
12512 in __sprintf_chk/__vsprintf_chk calls. */
12513
12514 static void
12515 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12516 {
12517 tree size, len, fmt;
12518 const char *fmt_str;
12519 int nargs = call_expr_nargs (exp);
12520
12521 /* Verify the required arguments in the original call. */
12522
12523 if (nargs < 4)
12524 return;
12525 size = CALL_EXPR_ARG (exp, 2);
12526 fmt = CALL_EXPR_ARG (exp, 3);
12527
12528 if (! host_integerp (size, 1) || integer_all_onesp (size))
12529 return;
12530
12531 /* Check whether the format is a literal string constant. */
12532 fmt_str = c_getstr (fmt);
12533 if (fmt_str == NULL)
12534 return;
12535
12536 if (!init_target_chars ())
12537 return;
12538
12539 /* If the format doesn't contain % args or %%, we know its size. */
12540 if (strchr (fmt_str, target_percent) == 0)
12541 len = build_int_cstu (size_type_node, strlen (fmt_str));
12542 /* If the format is "%s" and first ... argument is a string literal,
12543 we know it too. */
12544 else if (fcode == BUILT_IN_SPRINTF_CHK
12545 && strcmp (fmt_str, target_percent_s) == 0)
12546 {
12547 tree arg;
12548
12549 if (nargs < 5)
12550 return;
12551 arg = CALL_EXPR_ARG (exp, 4);
12552 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12553 return;
12554
12555 len = c_strlen (arg, 1);
12556 if (!len || ! host_integerp (len, 1))
12557 return;
12558 }
12559 else
12560 return;
12561
12562 if (! tree_int_cst_lt (len, size))
12563 warning_at (tree_nonartificial_location (exp),
12564 0, "%Kcall to %D will always overflow destination buffer",
12565 exp, get_callee_fndecl (exp));
12566 }
12567
12568 /* Emit warning if a free is called with address of a variable. */
12569
12570 static void
12571 maybe_emit_free_warning (tree exp)
12572 {
12573 tree arg = CALL_EXPR_ARG (exp, 0);
12574
12575 STRIP_NOPS (arg);
12576 if (TREE_CODE (arg) != ADDR_EXPR)
12577 return;
12578
12579 arg = get_base_address (TREE_OPERAND (arg, 0));
12580 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12581 return;
12582
12583 if (SSA_VAR_P (arg))
12584 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12585 "%Kattempt to free a non-heap object %qD", exp, arg);
12586 else
12587 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12588 "%Kattempt to free a non-heap object", exp);
12589 }
12590
12591 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12592 if possible. */
12593
12594 tree
12595 fold_builtin_object_size (tree ptr, tree ost)
12596 {
12597 unsigned HOST_WIDE_INT bytes;
12598 int object_size_type;
12599
12600 if (!validate_arg (ptr, POINTER_TYPE)
12601 || !validate_arg (ost, INTEGER_TYPE))
12602 return NULL_TREE;
12603
12604 STRIP_NOPS (ost);
12605
12606 if (TREE_CODE (ost) != INTEGER_CST
12607 || tree_int_cst_sgn (ost) < 0
12608 || compare_tree_int (ost, 3) > 0)
12609 return NULL_TREE;
12610
12611 object_size_type = tree_low_cst (ost, 0);
12612
12613 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12614 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12615 and (size_t) 0 for types 2 and 3. */
12616 if (TREE_SIDE_EFFECTS (ptr))
12617 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12618
12619 if (TREE_CODE (ptr) == ADDR_EXPR)
12620 {
12621 bytes = compute_builtin_object_size (ptr, object_size_type);
12622 if (double_int_fits_to_tree_p (size_type_node,
12623 uhwi_to_double_int (bytes)))
12624 return build_int_cstu (size_type_node, bytes);
12625 }
12626 else if (TREE_CODE (ptr) == SSA_NAME)
12627 {
12628 /* If object size is not known yet, delay folding until
12629 later. Maybe subsequent passes will help determining
12630 it. */
12631 bytes = compute_builtin_object_size (ptr, object_size_type);
12632 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12633 && double_int_fits_to_tree_p (size_type_node,
12634 uhwi_to_double_int (bytes)))
12635 return build_int_cstu (size_type_node, bytes);
12636 }
12637
12638 return NULL_TREE;
12639 }
12640
12641 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12642 DEST, SRC, LEN, and SIZE are the arguments to the call.
12643 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12644 code of the builtin. If MAXLEN is not NULL, it is maximum length
12645 passed as third argument. */
12646
12647 tree
12648 fold_builtin_memory_chk (location_t loc, tree fndecl,
12649 tree dest, tree src, tree len, tree size,
12650 tree maxlen, bool ignore,
12651 enum built_in_function fcode)
12652 {
12653 tree fn;
12654
12655 if (!validate_arg (dest, POINTER_TYPE)
12656 || !validate_arg (src,
12657 (fcode == BUILT_IN_MEMSET_CHK
12658 ? INTEGER_TYPE : POINTER_TYPE))
12659 || !validate_arg (len, INTEGER_TYPE)
12660 || !validate_arg (size, INTEGER_TYPE))
12661 return NULL_TREE;
12662
12663 /* If SRC and DEST are the same (and not volatile), return DEST
12664 (resp. DEST+LEN for __mempcpy_chk). */
12665 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12666 {
12667 if (fcode != BUILT_IN_MEMPCPY_CHK)
12668 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12669 dest, len);
12670 else
12671 {
12672 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12673 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12674 }
12675 }
12676
12677 if (! host_integerp (size, 1))
12678 return NULL_TREE;
12679
12680 if (! integer_all_onesp (size))
12681 {
12682 if (! host_integerp (len, 1))
12683 {
12684 /* If LEN is not constant, try MAXLEN too.
12685 For MAXLEN only allow optimizing into non-_ocs function
12686 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12687 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12688 {
12689 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12690 {
12691 /* (void) __mempcpy_chk () can be optimized into
12692 (void) __memcpy_chk (). */
12693 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12694 if (!fn)
12695 return NULL_TREE;
12696
12697 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12698 }
12699 return NULL_TREE;
12700 }
12701 }
12702 else
12703 maxlen = len;
12704
12705 if (tree_int_cst_lt (size, maxlen))
12706 return NULL_TREE;
12707 }
12708
12709 fn = NULL_TREE;
12710 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12711 mem{cpy,pcpy,move,set} is available. */
12712 switch (fcode)
12713 {
12714 case BUILT_IN_MEMCPY_CHK:
12715 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12716 break;
12717 case BUILT_IN_MEMPCPY_CHK:
12718 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12719 break;
12720 case BUILT_IN_MEMMOVE_CHK:
12721 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12722 break;
12723 case BUILT_IN_MEMSET_CHK:
12724 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12725 break;
12726 default:
12727 break;
12728 }
12729
12730 if (!fn)
12731 return NULL_TREE;
12732
12733 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12734 }
12735
12736 /* Fold a call to the __st[rp]cpy_chk builtin.
12737 DEST, SRC, and SIZE are the arguments to the call.
12738 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12739 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12740 strings passed as second argument. */
12741
12742 tree
12743 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12744 tree src, tree size,
12745 tree maxlen, bool ignore,
12746 enum built_in_function fcode)
12747 {
12748 tree len, fn;
12749
12750 if (!validate_arg (dest, POINTER_TYPE)
12751 || !validate_arg (src, POINTER_TYPE)
12752 || !validate_arg (size, INTEGER_TYPE))
12753 return NULL_TREE;
12754
12755 /* If SRC and DEST are the same (and not volatile), return DEST. */
12756 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12757 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12758
12759 if (! host_integerp (size, 1))
12760 return NULL_TREE;
12761
12762 if (! integer_all_onesp (size))
12763 {
12764 len = c_strlen (src, 1);
12765 if (! len || ! host_integerp (len, 1))
12766 {
12767 /* If LEN is not constant, try MAXLEN too.
12768 For MAXLEN only allow optimizing into non-_ocs function
12769 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12770 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12771 {
12772 if (fcode == BUILT_IN_STPCPY_CHK)
12773 {
12774 if (! ignore)
12775 return NULL_TREE;
12776
12777 /* If return value of __stpcpy_chk is ignored,
12778 optimize into __strcpy_chk. */
12779 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12780 if (!fn)
12781 return NULL_TREE;
12782
12783 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12784 }
12785
12786 if (! len || TREE_SIDE_EFFECTS (len))
12787 return NULL_TREE;
12788
12789 /* If c_strlen returned something, but not a constant,
12790 transform __strcpy_chk into __memcpy_chk. */
12791 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12792 if (!fn)
12793 return NULL_TREE;
12794
12795 len = fold_convert_loc (loc, size_type_node, len);
12796 len = size_binop_loc (loc, PLUS_EXPR, len,
12797 build_int_cst (size_type_node, 1));
12798 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12799 build_call_expr_loc (loc, fn, 4,
12800 dest, src, len, size));
12801 }
12802 }
12803 else
12804 maxlen = len;
12805
12806 if (! tree_int_cst_lt (maxlen, size))
12807 return NULL_TREE;
12808 }
12809
12810 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12811 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12812 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12813 if (!fn)
12814 return NULL_TREE;
12815
12816 return build_call_expr_loc (loc, fn, 2, dest, src);
12817 }
12818
12819 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12820 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12821 length passed as third argument. */
12822
12823 tree
12824 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12825 tree len, tree size, tree maxlen)
12826 {
12827 tree fn;
12828
12829 if (!validate_arg (dest, POINTER_TYPE)
12830 || !validate_arg (src, POINTER_TYPE)
12831 || !validate_arg (len, INTEGER_TYPE)
12832 || !validate_arg (size, INTEGER_TYPE))
12833 return NULL_TREE;
12834
12835 if (! host_integerp (size, 1))
12836 return NULL_TREE;
12837
12838 if (! integer_all_onesp (size))
12839 {
12840 if (! host_integerp (len, 1))
12841 {
12842 /* If LEN is not constant, try MAXLEN too.
12843 For MAXLEN only allow optimizing into non-_ocs function
12844 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12845 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12846 return NULL_TREE;
12847 }
12848 else
12849 maxlen = len;
12850
12851 if (tree_int_cst_lt (size, maxlen))
12852 return NULL_TREE;
12853 }
12854
12855 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12856 fn = builtin_decl_explicit (BUILT_IN_STRNCPY);
12857 if (!fn)
12858 return NULL_TREE;
12859
12860 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12861 }
12862
12863 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12864 are the arguments to the call. */
12865
12866 static tree
12867 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12868 tree src, tree size)
12869 {
12870 tree fn;
12871 const char *p;
12872
12873 if (!validate_arg (dest, POINTER_TYPE)
12874 || !validate_arg (src, POINTER_TYPE)
12875 || !validate_arg (size, INTEGER_TYPE))
12876 return NULL_TREE;
12877
12878 p = c_getstr (src);
12879 /* If the SRC parameter is "", return DEST. */
12880 if (p && *p == '\0')
12881 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12882
12883 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12884 return NULL_TREE;
12885
12886 /* If __builtin_strcat_chk is used, assume strcat is available. */
12887 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
12888 if (!fn)
12889 return NULL_TREE;
12890
12891 return build_call_expr_loc (loc, fn, 2, dest, src);
12892 }
12893
12894 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12895 LEN, and SIZE. */
12896
12897 static tree
12898 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12899 tree dest, tree src, tree len, tree size)
12900 {
12901 tree fn;
12902 const char *p;
12903
12904 if (!validate_arg (dest, POINTER_TYPE)
12905 || !validate_arg (src, POINTER_TYPE)
12906 || !validate_arg (size, INTEGER_TYPE)
12907 || !validate_arg (size, INTEGER_TYPE))
12908 return NULL_TREE;
12909
12910 p = c_getstr (src);
12911 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12912 if (p && *p == '\0')
12913 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12914 else if (integer_zerop (len))
12915 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12916
12917 if (! host_integerp (size, 1))
12918 return NULL_TREE;
12919
12920 if (! integer_all_onesp (size))
12921 {
12922 tree src_len = c_strlen (src, 1);
12923 if (src_len
12924 && host_integerp (src_len, 1)
12925 && host_integerp (len, 1)
12926 && ! tree_int_cst_lt (len, src_len))
12927 {
12928 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12929 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
12930 if (!fn)
12931 return NULL_TREE;
12932
12933 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12934 }
12935 return NULL_TREE;
12936 }
12937
12938 /* If __builtin_strncat_chk is used, assume strncat is available. */
12939 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
12940 if (!fn)
12941 return NULL_TREE;
12942
12943 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12944 }
12945
12946 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12947 Return NULL_TREE if a normal call should be emitted rather than
12948 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12949 or BUILT_IN_VSPRINTF_CHK. */
12950
12951 static tree
12952 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
12953 enum built_in_function fcode)
12954 {
12955 tree dest, size, len, fn, fmt, flag;
12956 const char *fmt_str;
12957
12958 /* Verify the required arguments in the original call. */
12959 if (nargs < 4)
12960 return NULL_TREE;
12961 dest = args[0];
12962 if (!validate_arg (dest, POINTER_TYPE))
12963 return NULL_TREE;
12964 flag = args[1];
12965 if (!validate_arg (flag, INTEGER_TYPE))
12966 return NULL_TREE;
12967 size = args[2];
12968 if (!validate_arg (size, INTEGER_TYPE))
12969 return NULL_TREE;
12970 fmt = args[3];
12971 if (!validate_arg (fmt, POINTER_TYPE))
12972 return NULL_TREE;
12973
12974 if (! host_integerp (size, 1))
12975 return NULL_TREE;
12976
12977 len = NULL_TREE;
12978
12979 if (!init_target_chars ())
12980 return NULL_TREE;
12981
12982 /* Check whether the format is a literal string constant. */
12983 fmt_str = c_getstr (fmt);
12984 if (fmt_str != NULL)
12985 {
12986 /* If the format doesn't contain % args or %%, we know the size. */
12987 if (strchr (fmt_str, target_percent) == 0)
12988 {
12989 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12990 len = build_int_cstu (size_type_node, strlen (fmt_str));
12991 }
12992 /* If the format is "%s" and first ... argument is a string literal,
12993 we know the size too. */
12994 else if (fcode == BUILT_IN_SPRINTF_CHK
12995 && strcmp (fmt_str, target_percent_s) == 0)
12996 {
12997 tree arg;
12998
12999 if (nargs == 5)
13000 {
13001 arg = args[4];
13002 if (validate_arg (arg, POINTER_TYPE))
13003 {
13004 len = c_strlen (arg, 1);
13005 if (! len || ! host_integerp (len, 1))
13006 len = NULL_TREE;
13007 }
13008 }
13009 }
13010 }
13011
13012 if (! integer_all_onesp (size))
13013 {
13014 if (! len || ! tree_int_cst_lt (len, size))
13015 return NULL_TREE;
13016 }
13017
13018 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13019 or if format doesn't contain % chars or is "%s". */
13020 if (! integer_zerop (flag))
13021 {
13022 if (fmt_str == NULL)
13023 return NULL_TREE;
13024 if (strchr (fmt_str, target_percent) != NULL
13025 && strcmp (fmt_str, target_percent_s))
13026 return NULL_TREE;
13027 }
13028
13029 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13030 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13031 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13032 if (!fn)
13033 return NULL_TREE;
13034
13035 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13036 }
13037
13038 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13039 a normal call should be emitted rather than expanding the function
13040 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13041
13042 static tree
13043 fold_builtin_sprintf_chk (location_t loc, tree exp,
13044 enum built_in_function fcode)
13045 {
13046 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13047 CALL_EXPR_ARGP (exp), fcode);
13048 }
13049
13050 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13051 NULL_TREE if a normal call should be emitted rather than expanding
13052 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13053 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13054 passed as second argument. */
13055
13056 static tree
13057 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13058 tree maxlen, enum built_in_function fcode)
13059 {
13060 tree dest, size, len, fn, fmt, flag;
13061 const char *fmt_str;
13062
13063 /* Verify the required arguments in the original call. */
13064 if (nargs < 5)
13065 return NULL_TREE;
13066 dest = args[0];
13067 if (!validate_arg (dest, POINTER_TYPE))
13068 return NULL_TREE;
13069 len = args[1];
13070 if (!validate_arg (len, INTEGER_TYPE))
13071 return NULL_TREE;
13072 flag = args[2];
13073 if (!validate_arg (flag, INTEGER_TYPE))
13074 return NULL_TREE;
13075 size = args[3];
13076 if (!validate_arg (size, INTEGER_TYPE))
13077 return NULL_TREE;
13078 fmt = args[4];
13079 if (!validate_arg (fmt, POINTER_TYPE))
13080 return NULL_TREE;
13081
13082 if (! host_integerp (size, 1))
13083 return NULL_TREE;
13084
13085 if (! integer_all_onesp (size))
13086 {
13087 if (! host_integerp (len, 1))
13088 {
13089 /* If LEN is not constant, try MAXLEN too.
13090 For MAXLEN only allow optimizing into non-_ocs function
13091 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13092 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13093 return NULL_TREE;
13094 }
13095 else
13096 maxlen = len;
13097
13098 if (tree_int_cst_lt (size, maxlen))
13099 return NULL_TREE;
13100 }
13101
13102 if (!init_target_chars ())
13103 return NULL_TREE;
13104
13105 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13106 or if format doesn't contain % chars or is "%s". */
13107 if (! integer_zerop (flag))
13108 {
13109 fmt_str = c_getstr (fmt);
13110 if (fmt_str == NULL)
13111 return NULL_TREE;
13112 if (strchr (fmt_str, target_percent) != NULL
13113 && strcmp (fmt_str, target_percent_s))
13114 return NULL_TREE;
13115 }
13116
13117 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13118 available. */
13119 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13120 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13121 if (!fn)
13122 return NULL_TREE;
13123
13124 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13125 }
13126
13127 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13128 a normal call should be emitted rather than expanding the function
13129 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13130 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13131 passed as second argument. */
13132
13133 tree
13134 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13135 enum built_in_function fcode)
13136 {
13137 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13138 CALL_EXPR_ARGP (exp), maxlen, fcode);
13139 }
13140
13141 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13142 FMT and ARG are the arguments to the call; we don't fold cases with
13143 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13144
13145 Return NULL_TREE if no simplification was possible, otherwise return the
13146 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13147 code of the function to be simplified. */
13148
13149 static tree
13150 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13151 tree arg, bool ignore,
13152 enum built_in_function fcode)
13153 {
13154 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13155 const char *fmt_str = NULL;
13156
13157 /* If the return value is used, don't do the transformation. */
13158 if (! ignore)
13159 return NULL_TREE;
13160
13161 /* Verify the required arguments in the original call. */
13162 if (!validate_arg (fmt, POINTER_TYPE))
13163 return NULL_TREE;
13164
13165 /* Check whether the format is a literal string constant. */
13166 fmt_str = c_getstr (fmt);
13167 if (fmt_str == NULL)
13168 return NULL_TREE;
13169
13170 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13171 {
13172 /* If we're using an unlocked function, assume the other
13173 unlocked functions exist explicitly. */
13174 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13175 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13176 }
13177 else
13178 {
13179 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13180 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13181 }
13182
13183 if (!init_target_chars ())
13184 return NULL_TREE;
13185
13186 if (strcmp (fmt_str, target_percent_s) == 0
13187 || strchr (fmt_str, target_percent) == NULL)
13188 {
13189 const char *str;
13190
13191 if (strcmp (fmt_str, target_percent_s) == 0)
13192 {
13193 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13194 return NULL_TREE;
13195
13196 if (!arg || !validate_arg (arg, POINTER_TYPE))
13197 return NULL_TREE;
13198
13199 str = c_getstr (arg);
13200 if (str == NULL)
13201 return NULL_TREE;
13202 }
13203 else
13204 {
13205 /* The format specifier doesn't contain any '%' characters. */
13206 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13207 && arg)
13208 return NULL_TREE;
13209 str = fmt_str;
13210 }
13211
13212 /* If the string was "", printf does nothing. */
13213 if (str[0] == '\0')
13214 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13215
13216 /* If the string has length of 1, call putchar. */
13217 if (str[1] == '\0')
13218 {
13219 /* Given printf("c"), (where c is any one character,)
13220 convert "c"[0] to an int and pass that to the replacement
13221 function. */
13222 newarg = build_int_cst (integer_type_node, str[0]);
13223 if (fn_putchar)
13224 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13225 }
13226 else
13227 {
13228 /* If the string was "string\n", call puts("string"). */
13229 size_t len = strlen (str);
13230 if ((unsigned char)str[len - 1] == target_newline
13231 && (size_t) (int) len == len
13232 && (int) len > 0)
13233 {
13234 char *newstr;
13235 tree offset_node, string_cst;
13236
13237 /* Create a NUL-terminated string that's one char shorter
13238 than the original, stripping off the trailing '\n'. */
13239 newarg = build_string_literal (len, str);
13240 string_cst = string_constant (newarg, &offset_node);
13241 gcc_checking_assert (string_cst
13242 && (TREE_STRING_LENGTH (string_cst)
13243 == (int) len)
13244 && integer_zerop (offset_node)
13245 && (unsigned char)
13246 TREE_STRING_POINTER (string_cst)[len - 1]
13247 == target_newline);
13248 /* build_string_literal creates a new STRING_CST,
13249 modify it in place to avoid double copying. */
13250 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13251 newstr[len - 1] = '\0';
13252 if (fn_puts)
13253 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13254 }
13255 else
13256 /* We'd like to arrange to call fputs(string,stdout) here,
13257 but we need stdout and don't have a way to get it yet. */
13258 return NULL_TREE;
13259 }
13260 }
13261
13262 /* The other optimizations can be done only on the non-va_list variants. */
13263 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13264 return NULL_TREE;
13265
13266 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13267 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13268 {
13269 if (!arg || !validate_arg (arg, POINTER_TYPE))
13270 return NULL_TREE;
13271 if (fn_puts)
13272 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13273 }
13274
13275 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13276 else if (strcmp (fmt_str, target_percent_c) == 0)
13277 {
13278 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13279 return NULL_TREE;
13280 if (fn_putchar)
13281 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13282 }
13283
13284 if (!call)
13285 return NULL_TREE;
13286
13287 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13288 }
13289
13290 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13291 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13292 more than 3 arguments, and ARG may be null in the 2-argument case.
13293
13294 Return NULL_TREE if no simplification was possible, otherwise return the
13295 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13296 code of the function to be simplified. */
13297
13298 static tree
13299 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13300 tree fmt, tree arg, bool ignore,
13301 enum built_in_function fcode)
13302 {
13303 tree fn_fputc, fn_fputs, call = NULL_TREE;
13304 const char *fmt_str = NULL;
13305
13306 /* If the return value is used, don't do the transformation. */
13307 if (! ignore)
13308 return NULL_TREE;
13309
13310 /* Verify the required arguments in the original call. */
13311 if (!validate_arg (fp, POINTER_TYPE))
13312 return NULL_TREE;
13313 if (!validate_arg (fmt, POINTER_TYPE))
13314 return NULL_TREE;
13315
13316 /* Check whether the format is a literal string constant. */
13317 fmt_str = c_getstr (fmt);
13318 if (fmt_str == NULL)
13319 return NULL_TREE;
13320
13321 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13322 {
13323 /* If we're using an unlocked function, assume the other
13324 unlocked functions exist explicitly. */
13325 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13326 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13327 }
13328 else
13329 {
13330 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13331 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13332 }
13333
13334 if (!init_target_chars ())
13335 return NULL_TREE;
13336
13337 /* If the format doesn't contain % args or %%, use strcpy. */
13338 if (strchr (fmt_str, target_percent) == NULL)
13339 {
13340 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13341 && arg)
13342 return NULL_TREE;
13343
13344 /* If the format specifier was "", fprintf does nothing. */
13345 if (fmt_str[0] == '\0')
13346 {
13347 /* If FP has side-effects, just wait until gimplification is
13348 done. */
13349 if (TREE_SIDE_EFFECTS (fp))
13350 return NULL_TREE;
13351
13352 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13353 }
13354
13355 /* When "string" doesn't contain %, replace all cases of
13356 fprintf (fp, string) with fputs (string, fp). The fputs
13357 builtin will take care of special cases like length == 1. */
13358 if (fn_fputs)
13359 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13360 }
13361
13362 /* The other optimizations can be done only on the non-va_list variants. */
13363 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13364 return NULL_TREE;
13365
13366 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13367 else if (strcmp (fmt_str, target_percent_s) == 0)
13368 {
13369 if (!arg || !validate_arg (arg, POINTER_TYPE))
13370 return NULL_TREE;
13371 if (fn_fputs)
13372 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13373 }
13374
13375 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13376 else if (strcmp (fmt_str, target_percent_c) == 0)
13377 {
13378 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13379 return NULL_TREE;
13380 if (fn_fputc)
13381 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13382 }
13383
13384 if (!call)
13385 return NULL_TREE;
13386 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13387 }
13388
13389 /* Initialize format string characters in the target charset. */
13390
13391 static bool
13392 init_target_chars (void)
13393 {
13394 static bool init;
13395 if (!init)
13396 {
13397 target_newline = lang_hooks.to_target_charset ('\n');
13398 target_percent = lang_hooks.to_target_charset ('%');
13399 target_c = lang_hooks.to_target_charset ('c');
13400 target_s = lang_hooks.to_target_charset ('s');
13401 if (target_newline == 0 || target_percent == 0 || target_c == 0
13402 || target_s == 0)
13403 return false;
13404
13405 target_percent_c[0] = target_percent;
13406 target_percent_c[1] = target_c;
13407 target_percent_c[2] = '\0';
13408
13409 target_percent_s[0] = target_percent;
13410 target_percent_s[1] = target_s;
13411 target_percent_s[2] = '\0';
13412
13413 target_percent_s_newline[0] = target_percent;
13414 target_percent_s_newline[1] = target_s;
13415 target_percent_s_newline[2] = target_newline;
13416 target_percent_s_newline[3] = '\0';
13417
13418 init = true;
13419 }
13420 return true;
13421 }
13422
13423 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13424 and no overflow/underflow occurred. INEXACT is true if M was not
13425 exactly calculated. TYPE is the tree type for the result. This
13426 function assumes that you cleared the MPFR flags and then
13427 calculated M to see if anything subsequently set a flag prior to
13428 entering this function. Return NULL_TREE if any checks fail. */
13429
13430 static tree
13431 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13432 {
13433 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13434 overflow/underflow occurred. If -frounding-math, proceed iff the
13435 result of calling FUNC was exact. */
13436 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13437 && (!flag_rounding_math || !inexact))
13438 {
13439 REAL_VALUE_TYPE rr;
13440
13441 real_from_mpfr (&rr, m, type, GMP_RNDN);
13442 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13443 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13444 but the mpft_t is not, then we underflowed in the
13445 conversion. */
13446 if (real_isfinite (&rr)
13447 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13448 {
13449 REAL_VALUE_TYPE rmode;
13450
13451 real_convert (&rmode, TYPE_MODE (type), &rr);
13452 /* Proceed iff the specified mode can hold the value. */
13453 if (real_identical (&rmode, &rr))
13454 return build_real (type, rmode);
13455 }
13456 }
13457 return NULL_TREE;
13458 }
13459
13460 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13461 number and no overflow/underflow occurred. INEXACT is true if M
13462 was not exactly calculated. TYPE is the tree type for the result.
13463 This function assumes that you cleared the MPFR flags and then
13464 calculated M to see if anything subsequently set a flag prior to
13465 entering this function. Return NULL_TREE if any checks fail, if
13466 FORCE_CONVERT is true, then bypass the checks. */
13467
13468 static tree
13469 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13470 {
13471 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13472 overflow/underflow occurred. If -frounding-math, proceed iff the
13473 result of calling FUNC was exact. */
13474 if (force_convert
13475 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13476 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13477 && (!flag_rounding_math || !inexact)))
13478 {
13479 REAL_VALUE_TYPE re, im;
13480
13481 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13482 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13483 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13484 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13485 but the mpft_t is not, then we underflowed in the
13486 conversion. */
13487 if (force_convert
13488 || (real_isfinite (&re) && real_isfinite (&im)
13489 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13490 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13491 {
13492 REAL_VALUE_TYPE re_mode, im_mode;
13493
13494 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13495 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13496 /* Proceed iff the specified mode can hold the value. */
13497 if (force_convert
13498 || (real_identical (&re_mode, &re)
13499 && real_identical (&im_mode, &im)))
13500 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13501 build_real (TREE_TYPE (type), im_mode));
13502 }
13503 }
13504 return NULL_TREE;
13505 }
13506
13507 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13508 FUNC on it and return the resulting value as a tree with type TYPE.
13509 If MIN and/or MAX are not NULL, then the supplied ARG must be
13510 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13511 acceptable values, otherwise they are not. The mpfr precision is
13512 set to the precision of TYPE. We assume that function FUNC returns
13513 zero if the result could be calculated exactly within the requested
13514 precision. */
13515
13516 static tree
13517 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13518 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13519 bool inclusive)
13520 {
13521 tree result = NULL_TREE;
13522
13523 STRIP_NOPS (arg);
13524
13525 /* To proceed, MPFR must exactly represent the target floating point
13526 format, which only happens when the target base equals two. */
13527 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13528 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13529 {
13530 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13531
13532 if (real_isfinite (ra)
13533 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13534 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13535 {
13536 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13537 const int prec = fmt->p;
13538 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13539 int inexact;
13540 mpfr_t m;
13541
13542 mpfr_init2 (m, prec);
13543 mpfr_from_real (m, ra, GMP_RNDN);
13544 mpfr_clear_flags ();
13545 inexact = func (m, m, rnd);
13546 result = do_mpfr_ckconv (m, type, inexact);
13547 mpfr_clear (m);
13548 }
13549 }
13550
13551 return result;
13552 }
13553
13554 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13555 FUNC on it and return the resulting value as a tree with type TYPE.
13556 The mpfr precision is set to the precision of TYPE. We assume that
13557 function FUNC returns zero if the result could be calculated
13558 exactly within the requested precision. */
13559
13560 static tree
13561 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13562 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13563 {
13564 tree result = NULL_TREE;
13565
13566 STRIP_NOPS (arg1);
13567 STRIP_NOPS (arg2);
13568
13569 /* To proceed, MPFR must exactly represent the target floating point
13570 format, which only happens when the target base equals two. */
13571 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13572 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13573 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13574 {
13575 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13576 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13577
13578 if (real_isfinite (ra1) && real_isfinite (ra2))
13579 {
13580 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13581 const int prec = fmt->p;
13582 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13583 int inexact;
13584 mpfr_t m1, m2;
13585
13586 mpfr_inits2 (prec, m1, m2, NULL);
13587 mpfr_from_real (m1, ra1, GMP_RNDN);
13588 mpfr_from_real (m2, ra2, GMP_RNDN);
13589 mpfr_clear_flags ();
13590 inexact = func (m1, m1, m2, rnd);
13591 result = do_mpfr_ckconv (m1, type, inexact);
13592 mpfr_clears (m1, m2, NULL);
13593 }
13594 }
13595
13596 return result;
13597 }
13598
13599 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13600 FUNC on it and return the resulting value as a tree with type TYPE.
13601 The mpfr precision is set to the precision of TYPE. We assume that
13602 function FUNC returns zero if the result could be calculated
13603 exactly within the requested precision. */
13604
13605 static tree
13606 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13607 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13608 {
13609 tree result = NULL_TREE;
13610
13611 STRIP_NOPS (arg1);
13612 STRIP_NOPS (arg2);
13613 STRIP_NOPS (arg3);
13614
13615 /* To proceed, MPFR must exactly represent the target floating point
13616 format, which only happens when the target base equals two. */
13617 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13618 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13619 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13620 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13621 {
13622 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13623 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13624 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13625
13626 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13627 {
13628 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13629 const int prec = fmt->p;
13630 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13631 int inexact;
13632 mpfr_t m1, m2, m3;
13633
13634 mpfr_inits2 (prec, m1, m2, m3, NULL);
13635 mpfr_from_real (m1, ra1, GMP_RNDN);
13636 mpfr_from_real (m2, ra2, GMP_RNDN);
13637 mpfr_from_real (m3, ra3, GMP_RNDN);
13638 mpfr_clear_flags ();
13639 inexact = func (m1, m1, m2, m3, rnd);
13640 result = do_mpfr_ckconv (m1, type, inexact);
13641 mpfr_clears (m1, m2, m3, NULL);
13642 }
13643 }
13644
13645 return result;
13646 }
13647
13648 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13649 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13650 If ARG_SINP and ARG_COSP are NULL then the result is returned
13651 as a complex value.
13652 The type is taken from the type of ARG and is used for setting the
13653 precision of the calculation and results. */
13654
13655 static tree
13656 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13657 {
13658 tree const type = TREE_TYPE (arg);
13659 tree result = NULL_TREE;
13660
13661 STRIP_NOPS (arg);
13662
13663 /* To proceed, MPFR must exactly represent the target floating point
13664 format, which only happens when the target base equals two. */
13665 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13666 && TREE_CODE (arg) == REAL_CST
13667 && !TREE_OVERFLOW (arg))
13668 {
13669 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13670
13671 if (real_isfinite (ra))
13672 {
13673 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13674 const int prec = fmt->p;
13675 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13676 tree result_s, result_c;
13677 int inexact;
13678 mpfr_t m, ms, mc;
13679
13680 mpfr_inits2 (prec, m, ms, mc, NULL);
13681 mpfr_from_real (m, ra, GMP_RNDN);
13682 mpfr_clear_flags ();
13683 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13684 result_s = do_mpfr_ckconv (ms, type, inexact);
13685 result_c = do_mpfr_ckconv (mc, type, inexact);
13686 mpfr_clears (m, ms, mc, NULL);
13687 if (result_s && result_c)
13688 {
13689 /* If we are to return in a complex value do so. */
13690 if (!arg_sinp && !arg_cosp)
13691 return build_complex (build_complex_type (type),
13692 result_c, result_s);
13693
13694 /* Dereference the sin/cos pointer arguments. */
13695 arg_sinp = build_fold_indirect_ref (arg_sinp);
13696 arg_cosp = build_fold_indirect_ref (arg_cosp);
13697 /* Proceed if valid pointer type were passed in. */
13698 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13699 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13700 {
13701 /* Set the values. */
13702 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13703 result_s);
13704 TREE_SIDE_EFFECTS (result_s) = 1;
13705 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13706 result_c);
13707 TREE_SIDE_EFFECTS (result_c) = 1;
13708 /* Combine the assignments into a compound expr. */
13709 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13710 result_s, result_c));
13711 }
13712 }
13713 }
13714 }
13715 return result;
13716 }
13717
13718 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13719 two-argument mpfr order N Bessel function FUNC on them and return
13720 the resulting value as a tree with type TYPE. The mpfr precision
13721 is set to the precision of TYPE. We assume that function FUNC
13722 returns zero if the result could be calculated exactly within the
13723 requested precision. */
13724 static tree
13725 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13726 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13727 const REAL_VALUE_TYPE *min, bool inclusive)
13728 {
13729 tree result = NULL_TREE;
13730
13731 STRIP_NOPS (arg1);
13732 STRIP_NOPS (arg2);
13733
13734 /* To proceed, MPFR must exactly represent the target floating point
13735 format, which only happens when the target base equals two. */
13736 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13737 && host_integerp (arg1, 0)
13738 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13739 {
13740 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13741 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13742
13743 if (n == (long)n
13744 && real_isfinite (ra)
13745 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13746 {
13747 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13748 const int prec = fmt->p;
13749 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13750 int inexact;
13751 mpfr_t m;
13752
13753 mpfr_init2 (m, prec);
13754 mpfr_from_real (m, ra, GMP_RNDN);
13755 mpfr_clear_flags ();
13756 inexact = func (m, n, m, rnd);
13757 result = do_mpfr_ckconv (m, type, inexact);
13758 mpfr_clear (m);
13759 }
13760 }
13761
13762 return result;
13763 }
13764
13765 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13766 the pointer *(ARG_QUO) and return the result. The type is taken
13767 from the type of ARG0 and is used for setting the precision of the
13768 calculation and results. */
13769
13770 static tree
13771 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13772 {
13773 tree const type = TREE_TYPE (arg0);
13774 tree result = NULL_TREE;
13775
13776 STRIP_NOPS (arg0);
13777 STRIP_NOPS (arg1);
13778
13779 /* To proceed, MPFR must exactly represent the target floating point
13780 format, which only happens when the target base equals two. */
13781 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13782 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13783 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13784 {
13785 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13786 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13787
13788 if (real_isfinite (ra0) && real_isfinite (ra1))
13789 {
13790 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13791 const int prec = fmt->p;
13792 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13793 tree result_rem;
13794 long integer_quo;
13795 mpfr_t m0, m1;
13796
13797 mpfr_inits2 (prec, m0, m1, NULL);
13798 mpfr_from_real (m0, ra0, GMP_RNDN);
13799 mpfr_from_real (m1, ra1, GMP_RNDN);
13800 mpfr_clear_flags ();
13801 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13802 /* Remquo is independent of the rounding mode, so pass
13803 inexact=0 to do_mpfr_ckconv(). */
13804 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13805 mpfr_clears (m0, m1, NULL);
13806 if (result_rem)
13807 {
13808 /* MPFR calculates quo in the host's long so it may
13809 return more bits in quo than the target int can hold
13810 if sizeof(host long) > sizeof(target int). This can
13811 happen even for native compilers in LP64 mode. In
13812 these cases, modulo the quo value with the largest
13813 number that the target int can hold while leaving one
13814 bit for the sign. */
13815 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13816 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13817
13818 /* Dereference the quo pointer argument. */
13819 arg_quo = build_fold_indirect_ref (arg_quo);
13820 /* Proceed iff a valid pointer type was passed in. */
13821 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13822 {
13823 /* Set the value. */
13824 tree result_quo
13825 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13826 build_int_cst (TREE_TYPE (arg_quo),
13827 integer_quo));
13828 TREE_SIDE_EFFECTS (result_quo) = 1;
13829 /* Combine the quo assignment with the rem. */
13830 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13831 result_quo, result_rem));
13832 }
13833 }
13834 }
13835 }
13836 return result;
13837 }
13838
13839 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13840 resulting value as a tree with type TYPE. The mpfr precision is
13841 set to the precision of TYPE. We assume that this mpfr function
13842 returns zero if the result could be calculated exactly within the
13843 requested precision. In addition, the integer pointer represented
13844 by ARG_SG will be dereferenced and set to the appropriate signgam
13845 (-1,1) value. */
13846
13847 static tree
13848 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13849 {
13850 tree result = NULL_TREE;
13851
13852 STRIP_NOPS (arg);
13853
13854 /* To proceed, MPFR must exactly represent the target floating point
13855 format, which only happens when the target base equals two. Also
13856 verify ARG is a constant and that ARG_SG is an int pointer. */
13857 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13858 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13859 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13860 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13861 {
13862 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13863
13864 /* In addition to NaN and Inf, the argument cannot be zero or a
13865 negative integer. */
13866 if (real_isfinite (ra)
13867 && ra->cl != rvc_zero
13868 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13869 {
13870 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13871 const int prec = fmt->p;
13872 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13873 int inexact, sg;
13874 mpfr_t m;
13875 tree result_lg;
13876
13877 mpfr_init2 (m, prec);
13878 mpfr_from_real (m, ra, GMP_RNDN);
13879 mpfr_clear_flags ();
13880 inexact = mpfr_lgamma (m, &sg, m, rnd);
13881 result_lg = do_mpfr_ckconv (m, type, inexact);
13882 mpfr_clear (m);
13883 if (result_lg)
13884 {
13885 tree result_sg;
13886
13887 /* Dereference the arg_sg pointer argument. */
13888 arg_sg = build_fold_indirect_ref (arg_sg);
13889 /* Assign the signgam value into *arg_sg. */
13890 result_sg = fold_build2 (MODIFY_EXPR,
13891 TREE_TYPE (arg_sg), arg_sg,
13892 build_int_cst (TREE_TYPE (arg_sg), sg));
13893 TREE_SIDE_EFFECTS (result_sg) = 1;
13894 /* Combine the signgam assignment with the lgamma result. */
13895 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13896 result_sg, result_lg));
13897 }
13898 }
13899 }
13900
13901 return result;
13902 }
13903
13904 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13905 function FUNC on it and return the resulting value as a tree with
13906 type TYPE. The mpfr precision is set to the precision of TYPE. We
13907 assume that function FUNC returns zero if the result could be
13908 calculated exactly within the requested precision. */
13909
13910 static tree
13911 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13912 {
13913 tree result = NULL_TREE;
13914
13915 STRIP_NOPS (arg);
13916
13917 /* To proceed, MPFR must exactly represent the target floating point
13918 format, which only happens when the target base equals two. */
13919 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13920 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13921 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13922 {
13923 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13924 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13925
13926 if (real_isfinite (re) && real_isfinite (im))
13927 {
13928 const struct real_format *const fmt =
13929 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13930 const int prec = fmt->p;
13931 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13932 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13933 int inexact;
13934 mpc_t m;
13935
13936 mpc_init2 (m, prec);
13937 mpfr_from_real (mpc_realref(m), re, rnd);
13938 mpfr_from_real (mpc_imagref(m), im, rnd);
13939 mpfr_clear_flags ();
13940 inexact = func (m, m, crnd);
13941 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13942 mpc_clear (m);
13943 }
13944 }
13945
13946 return result;
13947 }
13948
13949 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13950 mpc function FUNC on it and return the resulting value as a tree
13951 with type TYPE. The mpfr precision is set to the precision of
13952 TYPE. We assume that function FUNC returns zero if the result
13953 could be calculated exactly within the requested precision. If
13954 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13955 in the arguments and/or results. */
13956
13957 tree
13958 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13959 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13960 {
13961 tree result = NULL_TREE;
13962
13963 STRIP_NOPS (arg0);
13964 STRIP_NOPS (arg1);
13965
13966 /* To proceed, MPFR must exactly represent the target floating point
13967 format, which only happens when the target base equals two. */
13968 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13969 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13970 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13971 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13972 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13973 {
13974 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13975 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13976 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13977 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13978
13979 if (do_nonfinite
13980 || (real_isfinite (re0) && real_isfinite (im0)
13981 && real_isfinite (re1) && real_isfinite (im1)))
13982 {
13983 const struct real_format *const fmt =
13984 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13985 const int prec = fmt->p;
13986 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13987 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13988 int inexact;
13989 mpc_t m0, m1;
13990
13991 mpc_init2 (m0, prec);
13992 mpc_init2 (m1, prec);
13993 mpfr_from_real (mpc_realref(m0), re0, rnd);
13994 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13995 mpfr_from_real (mpc_realref(m1), re1, rnd);
13996 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13997 mpfr_clear_flags ();
13998 inexact = func (m0, m0, m1, crnd);
13999 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14000 mpc_clear (m0);
14001 mpc_clear (m1);
14002 }
14003 }
14004
14005 return result;
14006 }
14007
14008 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14009 a normal call should be emitted rather than expanding the function
14010 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14011
14012 static tree
14013 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14014 {
14015 int nargs = gimple_call_num_args (stmt);
14016
14017 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14018 (nargs > 0
14019 ? gimple_call_arg_ptr (stmt, 0)
14020 : &error_mark_node), fcode);
14021 }
14022
14023 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14024 a normal call should be emitted rather than expanding the function
14025 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14026 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14027 passed as second argument. */
14028
14029 tree
14030 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14031 enum built_in_function fcode)
14032 {
14033 int nargs = gimple_call_num_args (stmt);
14034
14035 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14036 (nargs > 0
14037 ? gimple_call_arg_ptr (stmt, 0)
14038 : &error_mark_node), maxlen, fcode);
14039 }
14040
14041 /* Builtins with folding operations that operate on "..." arguments
14042 need special handling; we need to store the arguments in a convenient
14043 data structure before attempting any folding. Fortunately there are
14044 only a few builtins that fall into this category. FNDECL is the
14045 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14046 result of the function call is ignored. */
14047
14048 static tree
14049 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14050 bool ignore ATTRIBUTE_UNUSED)
14051 {
14052 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14053 tree ret = NULL_TREE;
14054
14055 switch (fcode)
14056 {
14057 case BUILT_IN_SPRINTF_CHK:
14058 case BUILT_IN_VSPRINTF_CHK:
14059 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14060 break;
14061
14062 case BUILT_IN_SNPRINTF_CHK:
14063 case BUILT_IN_VSNPRINTF_CHK:
14064 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14065
14066 default:
14067 break;
14068 }
14069 if (ret)
14070 {
14071 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14072 TREE_NO_WARNING (ret) = 1;
14073 return ret;
14074 }
14075 return NULL_TREE;
14076 }
14077
14078 /* A wrapper function for builtin folding that prevents warnings for
14079 "statement without effect" and the like, caused by removing the
14080 call node earlier than the warning is generated. */
14081
14082 tree
14083 fold_call_stmt (gimple stmt, bool ignore)
14084 {
14085 tree ret = NULL_TREE;
14086 tree fndecl = gimple_call_fndecl (stmt);
14087 location_t loc = gimple_location (stmt);
14088 if (fndecl
14089 && TREE_CODE (fndecl) == FUNCTION_DECL
14090 && DECL_BUILT_IN (fndecl)
14091 && !gimple_call_va_arg_pack_p (stmt))
14092 {
14093 int nargs = gimple_call_num_args (stmt);
14094 tree *args = (nargs > 0
14095 ? gimple_call_arg_ptr (stmt, 0)
14096 : &error_mark_node);
14097
14098 if (avoid_folding_inline_builtin (fndecl))
14099 return NULL_TREE;
14100 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14101 {
14102 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14103 }
14104 else
14105 {
14106 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14107 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14108 if (!ret)
14109 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14110 if (ret)
14111 {
14112 /* Propagate location information from original call to
14113 expansion of builtin. Otherwise things like
14114 maybe_emit_chk_warning, that operate on the expansion
14115 of a builtin, will use the wrong location information. */
14116 if (gimple_has_location (stmt))
14117 {
14118 tree realret = ret;
14119 if (TREE_CODE (ret) == NOP_EXPR)
14120 realret = TREE_OPERAND (ret, 0);
14121 if (CAN_HAVE_LOCATION_P (realret)
14122 && !EXPR_HAS_LOCATION (realret))
14123 SET_EXPR_LOCATION (realret, loc);
14124 return realret;
14125 }
14126 return ret;
14127 }
14128 }
14129 }
14130 return NULL_TREE;
14131 }
14132
14133 /* Look up the function in builtin_decl that corresponds to DECL
14134 and set ASMSPEC as its user assembler name. DECL must be a
14135 function decl that declares a builtin. */
14136
14137 void
14138 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14139 {
14140 tree builtin;
14141 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14142 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14143 && asmspec != 0);
14144
14145 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14146 set_user_assembler_name (builtin, asmspec);
14147 switch (DECL_FUNCTION_CODE (decl))
14148 {
14149 case BUILT_IN_MEMCPY:
14150 init_block_move_fn (asmspec);
14151 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14152 break;
14153 case BUILT_IN_MEMSET:
14154 init_block_clear_fn (asmspec);
14155 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14156 break;
14157 case BUILT_IN_MEMMOVE:
14158 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14159 break;
14160 case BUILT_IN_MEMCMP:
14161 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14162 break;
14163 case BUILT_IN_ABORT:
14164 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14165 break;
14166 case BUILT_IN_FFS:
14167 if (INT_TYPE_SIZE < BITS_PER_WORD)
14168 {
14169 set_user_assembler_libfunc ("ffs", asmspec);
14170 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14171 MODE_INT, 0), "ffs");
14172 }
14173 break;
14174 default:
14175 break;
14176 }
14177 }
14178
14179 /* Return true if DECL is a builtin that expands to a constant or similarly
14180 simple code. */
14181 bool
14182 is_simple_builtin (tree decl)
14183 {
14184 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14185 switch (DECL_FUNCTION_CODE (decl))
14186 {
14187 /* Builtins that expand to constants. */
14188 case BUILT_IN_CONSTANT_P:
14189 case BUILT_IN_EXPECT:
14190 case BUILT_IN_OBJECT_SIZE:
14191 case BUILT_IN_UNREACHABLE:
14192 /* Simple register moves or loads from stack. */
14193 case BUILT_IN_ASSUME_ALIGNED:
14194 case BUILT_IN_RETURN_ADDRESS:
14195 case BUILT_IN_EXTRACT_RETURN_ADDR:
14196 case BUILT_IN_FROB_RETURN_ADDR:
14197 case BUILT_IN_RETURN:
14198 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14199 case BUILT_IN_FRAME_ADDRESS:
14200 case BUILT_IN_VA_END:
14201 case BUILT_IN_STACK_SAVE:
14202 case BUILT_IN_STACK_RESTORE:
14203 /* Exception state returns or moves registers around. */
14204 case BUILT_IN_EH_FILTER:
14205 case BUILT_IN_EH_POINTER:
14206 case BUILT_IN_EH_COPY_VALUES:
14207 return true;
14208
14209 default:
14210 return false;
14211 }
14212
14213 return false;
14214 }
14215
14216 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14217 most probably expanded inline into reasonably simple code. This is a
14218 superset of is_simple_builtin. */
14219 bool
14220 is_inexpensive_builtin (tree decl)
14221 {
14222 if (!decl)
14223 return false;
14224 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14225 return true;
14226 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14227 switch (DECL_FUNCTION_CODE (decl))
14228 {
14229 case BUILT_IN_ABS:
14230 case BUILT_IN_ALLOCA:
14231 case BUILT_IN_ALLOCA_WITH_ALIGN:
14232 case BUILT_IN_BSWAP32:
14233 case BUILT_IN_BSWAP64:
14234 case BUILT_IN_CLZ:
14235 case BUILT_IN_CLZIMAX:
14236 case BUILT_IN_CLZL:
14237 case BUILT_IN_CLZLL:
14238 case BUILT_IN_CTZ:
14239 case BUILT_IN_CTZIMAX:
14240 case BUILT_IN_CTZL:
14241 case BUILT_IN_CTZLL:
14242 case BUILT_IN_FFS:
14243 case BUILT_IN_FFSIMAX:
14244 case BUILT_IN_FFSL:
14245 case BUILT_IN_FFSLL:
14246 case BUILT_IN_IMAXABS:
14247 case BUILT_IN_FINITE:
14248 case BUILT_IN_FINITEF:
14249 case BUILT_IN_FINITEL:
14250 case BUILT_IN_FINITED32:
14251 case BUILT_IN_FINITED64:
14252 case BUILT_IN_FINITED128:
14253 case BUILT_IN_FPCLASSIFY:
14254 case BUILT_IN_ISFINITE:
14255 case BUILT_IN_ISINF_SIGN:
14256 case BUILT_IN_ISINF:
14257 case BUILT_IN_ISINFF:
14258 case BUILT_IN_ISINFL:
14259 case BUILT_IN_ISINFD32:
14260 case BUILT_IN_ISINFD64:
14261 case BUILT_IN_ISINFD128:
14262 case BUILT_IN_ISNAN:
14263 case BUILT_IN_ISNANF:
14264 case BUILT_IN_ISNANL:
14265 case BUILT_IN_ISNAND32:
14266 case BUILT_IN_ISNAND64:
14267 case BUILT_IN_ISNAND128:
14268 case BUILT_IN_ISNORMAL:
14269 case BUILT_IN_ISGREATER:
14270 case BUILT_IN_ISGREATEREQUAL:
14271 case BUILT_IN_ISLESS:
14272 case BUILT_IN_ISLESSEQUAL:
14273 case BUILT_IN_ISLESSGREATER:
14274 case BUILT_IN_ISUNORDERED:
14275 case BUILT_IN_VA_ARG_PACK:
14276 case BUILT_IN_VA_ARG_PACK_LEN:
14277 case BUILT_IN_VA_COPY:
14278 case BUILT_IN_TRAP:
14279 case BUILT_IN_SAVEREGS:
14280 case BUILT_IN_POPCOUNTL:
14281 case BUILT_IN_POPCOUNTLL:
14282 case BUILT_IN_POPCOUNTIMAX:
14283 case BUILT_IN_POPCOUNT:
14284 case BUILT_IN_PARITYL:
14285 case BUILT_IN_PARITYLL:
14286 case BUILT_IN_PARITYIMAX:
14287 case BUILT_IN_PARITY:
14288 case BUILT_IN_LABS:
14289 case BUILT_IN_LLABS:
14290 case BUILT_IN_PREFETCH:
14291 return true;
14292
14293 default:
14294 return is_simple_builtin (decl);
14295 }
14296
14297 return false;
14298 }