Convert standard builtin functions from being arrays to using a functional interface
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "realmpfr.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
47 #include "basic-block.h"
48 #include "tree-mudflap.h"
49 #include "tree-flow.h"
50 #include "value-prof.h"
51 #include "diagnostic-core.h"
52 #include "builtins.h"
53
54
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
57 #endif
58 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
59
60 struct target_builtins default_target_builtins;
61 #if SWITCHABLE_TARGET
62 struct target_builtins *this_target_builtins = &default_target_builtins;
63 #endif
64
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
68
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
71 {
72 #include "builtins.def"
73 };
74 #undef DEF_BUILTIN
75
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 builtin_info_type builtin_info;
79
80 static const char *c_getstr (tree);
81 static rtx c_readstr (const char *, enum machine_mode);
82 static int target_char_cast (tree, char *);
83 static rtx get_memory_rtx (tree, tree);
84 static int apply_args_size (void);
85 static int apply_result_size (void);
86 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
87 static rtx result_vector (int, rtx);
88 #endif
89 static void expand_builtin_update_setjmp_buf (rtx);
90 static void expand_builtin_prefetch (tree);
91 static rtx expand_builtin_apply_args (void);
92 static rtx expand_builtin_apply_args_1 (void);
93 static rtx expand_builtin_apply (rtx, rtx, rtx);
94 static void expand_builtin_return (rtx);
95 static enum type_class type_to_class (tree);
96 static rtx expand_builtin_classify_type (tree);
97 static void expand_errno_check (tree, rtx);
98 static rtx expand_builtin_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
102 static rtx expand_builtin_interclass_mathfn (tree, rtx);
103 static rtx expand_builtin_sincos (tree);
104 static rtx expand_builtin_cexpi (tree, rtx);
105 static rtx expand_builtin_int_roundingfn (tree, rtx);
106 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
107 static rtx expand_builtin_next_arg (void);
108 static rtx expand_builtin_va_start (tree);
109 static rtx expand_builtin_va_end (tree);
110 static rtx expand_builtin_va_copy (tree);
111 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_strcmp (tree, rtx);
113 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
114 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
115 static rtx expand_builtin_memcpy (tree, rtx);
116 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
118 enum machine_mode, int);
119 static rtx expand_builtin_strcpy (tree, rtx);
120 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
121 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strncpy (tree, rtx);
123 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
124 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
126 static rtx expand_builtin_bzero (tree);
127 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_alloca (tree, bool);
129 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
130 static rtx expand_builtin_frame_address (tree, tree);
131 static tree stabilize_va_list_loc (location_t, tree, int);
132 static rtx expand_builtin_expect (tree, rtx);
133 static tree fold_builtin_constant_p (tree);
134 static tree fold_builtin_expect (location_t, tree, tree);
135 static tree fold_builtin_classify_type (tree);
136 static tree fold_builtin_strlen (location_t, tree, tree);
137 static tree fold_builtin_inf (location_t, tree, int);
138 static tree fold_builtin_nan (tree, tree, int);
139 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
140 static bool validate_arg (const_tree, enum tree_code code);
141 static bool integer_valued_real_p (tree);
142 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
143 static bool readonly_data_expr (tree);
144 static rtx expand_builtin_fabs (tree, rtx, rtx);
145 static rtx expand_builtin_signbit (tree, rtx);
146 static tree fold_builtin_sqrt (location_t, tree, tree);
147 static tree fold_builtin_cbrt (location_t, tree, tree);
148 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
149 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
150 static tree fold_builtin_cos (location_t, tree, tree, tree);
151 static tree fold_builtin_cosh (location_t, tree, tree, tree);
152 static tree fold_builtin_tan (tree, tree);
153 static tree fold_builtin_trunc (location_t, tree, tree);
154 static tree fold_builtin_floor (location_t, tree, tree);
155 static tree fold_builtin_ceil (location_t, tree, tree);
156 static tree fold_builtin_round (location_t, tree, tree);
157 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
158 static tree fold_builtin_bitop (tree, tree);
159 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
160 static tree fold_builtin_strchr (location_t, tree, tree, tree);
161 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163 static tree fold_builtin_strcmp (location_t, tree, tree);
164 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
165 static tree fold_builtin_signbit (location_t, tree, tree);
166 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
167 static tree fold_builtin_isascii (location_t, tree);
168 static tree fold_builtin_toascii (location_t, tree);
169 static tree fold_builtin_isdigit (location_t, tree);
170 static tree fold_builtin_fabs (location_t, tree, tree);
171 static tree fold_builtin_abs (location_t, tree, tree);
172 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
173 enum tree_code);
174 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
175 static tree fold_builtin_0 (location_t, tree, bool);
176 static tree fold_builtin_1 (location_t, tree, tree, bool);
177 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
178 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
179 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
180 static tree fold_builtin_varargs (location_t, tree, tree, bool);
181
182 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
183 static tree fold_builtin_strstr (location_t, tree, tree, tree);
184 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
185 static tree fold_builtin_strcat (location_t, tree, tree);
186 static tree fold_builtin_strncat (location_t, tree, tree, tree);
187 static tree fold_builtin_strspn (location_t, tree, tree);
188 static tree fold_builtin_strcspn (location_t, tree, tree);
189 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
190 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
191
192 static rtx expand_builtin_object_size (tree);
193 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
194 enum built_in_function);
195 static void maybe_emit_chk_warning (tree, enum built_in_function);
196 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_free_warning (tree);
198 static tree fold_builtin_object_size (tree, tree);
199 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
200 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
201 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
202 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
203 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
204 enum built_in_function);
205 static bool init_target_chars (void);
206
207 static unsigned HOST_WIDE_INT target_newline;
208 static unsigned HOST_WIDE_INT target_percent;
209 static unsigned HOST_WIDE_INT target_c;
210 static unsigned HOST_WIDE_INT target_s;
211 static char target_percent_c[3];
212 static char target_percent_s[3];
213 static char target_percent_s_newline[4];
214 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
215 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
216 static tree do_mpfr_arg2 (tree, tree, tree,
217 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
218 static tree do_mpfr_arg3 (tree, tree, tree, tree,
219 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
220 static tree do_mpfr_sincos (tree, tree, tree);
221 static tree do_mpfr_bessel_n (tree, tree, tree,
222 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
223 const REAL_VALUE_TYPE *, bool);
224 static tree do_mpfr_remquo (tree, tree, tree);
225 static tree do_mpfr_lgamma_r (tree, tree, tree);
226
227 /* Return true if NAME starts with __builtin_ or __sync_. */
228
229 bool
230 is_builtin_name (const char *name)
231 {
232 if (strncmp (name, "__builtin_", 10) == 0)
233 return true;
234 if (strncmp (name, "__sync_", 7) == 0)
235 return true;
236 return false;
237 }
238
239
240 /* Return true if DECL is a function symbol representing a built-in. */
241
242 bool
243 is_builtin_fn (tree decl)
244 {
245 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
246 }
247
248
249 /* Return true if NODE should be considered for inline expansion regardless
250 of the optimization level. This means whenever a function is invoked with
251 its "internal" name, which normally contains the prefix "__builtin". */
252
253 static bool
254 called_as_built_in (tree node)
255 {
256 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
257 we want the name used to call the function, not the name it
258 will have. */
259 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
260 return is_builtin_name (name);
261 }
262
263 /* Compute values M and N such that M divides (address of EXP - N) and
264 such that N < M. Store N in *BITPOSP and return M.
265
266 Note that the address (and thus the alignment) computed here is based
267 on the address to which a symbol resolves, whereas DECL_ALIGN is based
268 on the address at which an object is actually located. These two
269 addresses are not always the same. For example, on ARM targets,
270 the address &foo of a Thumb function foo() has the lowest bit set,
271 whereas foo() itself starts on an even address. */
272
273 unsigned int
274 get_object_alignment_1 (tree exp, unsigned HOST_WIDE_INT *bitposp)
275 {
276 HOST_WIDE_INT bitsize, bitpos;
277 tree offset;
278 enum machine_mode mode;
279 int unsignedp, volatilep;
280 unsigned int align, inner;
281
282 /* Get the innermost object and the constant (bitpos) and possibly
283 variable (offset) offset of the access. */
284 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
285 &mode, &unsignedp, &volatilep, true);
286
287 /* Extract alignment information from the innermost object and
288 possibly adjust bitpos and offset. */
289 if (TREE_CODE (exp) == CONST_DECL)
290 exp = DECL_INITIAL (exp);
291 if (DECL_P (exp)
292 && TREE_CODE (exp) != LABEL_DECL)
293 {
294 if (TREE_CODE (exp) == FUNCTION_DECL)
295 {
296 /* Function addresses can encode extra information besides their
297 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
298 allows the low bit to be used as a virtual bit, we know
299 that the address itself must be 2-byte aligned. */
300 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
301 align = 2 * BITS_PER_UNIT;
302 else
303 align = BITS_PER_UNIT;
304 }
305 else
306 align = DECL_ALIGN (exp);
307 }
308 else if (CONSTANT_CLASS_P (exp))
309 {
310 align = TYPE_ALIGN (TREE_TYPE (exp));
311 #ifdef CONSTANT_ALIGNMENT
312 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
313 #endif
314 }
315 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
316 align = TYPE_ALIGN (TREE_TYPE (exp));
317 else if (TREE_CODE (exp) == INDIRECT_REF)
318 align = TYPE_ALIGN (TREE_TYPE (exp));
319 else if (TREE_CODE (exp) == MEM_REF)
320 {
321 tree addr = TREE_OPERAND (exp, 0);
322 struct ptr_info_def *pi;
323 if (TREE_CODE (addr) == BIT_AND_EXPR
324 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
325 {
326 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
327 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
328 align *= BITS_PER_UNIT;
329 addr = TREE_OPERAND (addr, 0);
330 }
331 else
332 align = BITS_PER_UNIT;
333 if (TREE_CODE (addr) == SSA_NAME
334 && (pi = SSA_NAME_PTR_INFO (addr)))
335 {
336 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
337 align = MAX (pi->align * BITS_PER_UNIT, align);
338 }
339 else if (TREE_CODE (addr) == ADDR_EXPR)
340 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0)));
341 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
342 }
343 else if (TREE_CODE (exp) == TARGET_MEM_REF)
344 {
345 struct ptr_info_def *pi;
346 tree addr = TMR_BASE (exp);
347 if (TREE_CODE (addr) == BIT_AND_EXPR
348 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
349 {
350 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
351 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
352 align *= BITS_PER_UNIT;
353 addr = TREE_OPERAND (addr, 0);
354 }
355 else
356 align = BITS_PER_UNIT;
357 if (TREE_CODE (addr) == SSA_NAME
358 && (pi = SSA_NAME_PTR_INFO (addr)))
359 {
360 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
361 align = MAX (pi->align * BITS_PER_UNIT, align);
362 }
363 else if (TREE_CODE (addr) == ADDR_EXPR)
364 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0)));
365 if (TMR_OFFSET (exp))
366 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
367 if (TMR_INDEX (exp) && TMR_STEP (exp))
368 {
369 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
370 align = MIN (align, (step & -step) * BITS_PER_UNIT);
371 }
372 else if (TMR_INDEX (exp))
373 align = BITS_PER_UNIT;
374 if (TMR_INDEX2 (exp))
375 align = BITS_PER_UNIT;
376 }
377 else
378 align = BITS_PER_UNIT;
379
380 /* If there is a non-constant offset part extract the maximum
381 alignment that can prevail. */
382 inner = ~0U;
383 while (offset)
384 {
385 tree next_offset;
386
387 if (TREE_CODE (offset) == PLUS_EXPR)
388 {
389 next_offset = TREE_OPERAND (offset, 0);
390 offset = TREE_OPERAND (offset, 1);
391 }
392 else
393 next_offset = NULL;
394 if (host_integerp (offset, 1))
395 {
396 /* Any overflow in calculating offset_bits won't change
397 the alignment. */
398 unsigned offset_bits
399 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
400
401 if (offset_bits)
402 inner = MIN (inner, (offset_bits & -offset_bits));
403 }
404 else if (TREE_CODE (offset) == MULT_EXPR
405 && host_integerp (TREE_OPERAND (offset, 1), 1))
406 {
407 /* Any overflow in calculating offset_factor won't change
408 the alignment. */
409 unsigned offset_factor
410 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
411 * BITS_PER_UNIT);
412
413 if (offset_factor)
414 inner = MIN (inner, (offset_factor & -offset_factor));
415 }
416 else
417 {
418 inner = MIN (inner, BITS_PER_UNIT);
419 break;
420 }
421 offset = next_offset;
422 }
423
424 /* Alignment is innermost object alignment adjusted by the constant
425 and non-constant offset parts. */
426 align = MIN (align, inner);
427 bitpos = bitpos & (align - 1);
428
429 *bitposp = bitpos;
430 return align;
431 }
432
433 /* Return the alignment in bits of EXP, an object. */
434
435 unsigned int
436 get_object_alignment (tree exp)
437 {
438 unsigned HOST_WIDE_INT bitpos = 0;
439 unsigned int align;
440
441 align = get_object_alignment_1 (exp, &bitpos);
442
443 /* align and bitpos now specify known low bits of the pointer.
444 ptr & (align - 1) == bitpos. */
445
446 if (bitpos != 0)
447 align = (bitpos & -bitpos);
448
449 return align;
450 }
451
452 /* Return the alignment in bits of EXP, a pointer valued expression.
453 The alignment returned is, by default, the alignment of the thing that
454 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
455
456 Otherwise, look at the expression to see if we can do better, i.e., if the
457 expression is actually pointing at an object whose alignment is tighter. */
458
459 unsigned int
460 get_pointer_alignment (tree exp)
461 {
462 STRIP_NOPS (exp);
463
464 if (TREE_CODE (exp) == ADDR_EXPR)
465 return get_object_alignment (TREE_OPERAND (exp, 0));
466 else if (TREE_CODE (exp) == SSA_NAME
467 && POINTER_TYPE_P (TREE_TYPE (exp)))
468 {
469 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
470 unsigned align;
471 if (!pi)
472 return BITS_PER_UNIT;
473 if (pi->misalign != 0)
474 align = (pi->misalign & -pi->misalign);
475 else
476 align = pi->align;
477 return align * BITS_PER_UNIT;
478 }
479
480 return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
481 }
482
483 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
484 way, because it could contain a zero byte in the middle.
485 TREE_STRING_LENGTH is the size of the character array, not the string.
486
487 ONLY_VALUE should be nonzero if the result is not going to be emitted
488 into the instruction stream and zero if it is going to be expanded.
489 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
490 is returned, otherwise NULL, since
491 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
492 evaluate the side-effects.
493
494 The value returned is of type `ssizetype'.
495
496 Unfortunately, string_constant can't access the values of const char
497 arrays with initializers, so neither can we do so here. */
498
499 tree
500 c_strlen (tree src, int only_value)
501 {
502 tree offset_node;
503 HOST_WIDE_INT offset;
504 int max;
505 const char *ptr;
506 location_t loc;
507
508 STRIP_NOPS (src);
509 if (TREE_CODE (src) == COND_EXPR
510 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
511 {
512 tree len1, len2;
513
514 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
515 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
516 if (tree_int_cst_equal (len1, len2))
517 return len1;
518 }
519
520 if (TREE_CODE (src) == COMPOUND_EXPR
521 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
522 return c_strlen (TREE_OPERAND (src, 1), only_value);
523
524 loc = EXPR_LOC_OR_HERE (src);
525
526 src = string_constant (src, &offset_node);
527 if (src == 0)
528 return NULL_TREE;
529
530 max = TREE_STRING_LENGTH (src) - 1;
531 ptr = TREE_STRING_POINTER (src);
532
533 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
534 {
535 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
536 compute the offset to the following null if we don't know where to
537 start searching for it. */
538 int i;
539
540 for (i = 0; i < max; i++)
541 if (ptr[i] == 0)
542 return NULL_TREE;
543
544 /* We don't know the starting offset, but we do know that the string
545 has no internal zero bytes. We can assume that the offset falls
546 within the bounds of the string; otherwise, the programmer deserves
547 what he gets. Subtract the offset from the length of the string,
548 and return that. This would perhaps not be valid if we were dealing
549 with named arrays in addition to literal string constants. */
550
551 return size_diffop_loc (loc, size_int (max), offset_node);
552 }
553
554 /* We have a known offset into the string. Start searching there for
555 a null character if we can represent it as a single HOST_WIDE_INT. */
556 if (offset_node == 0)
557 offset = 0;
558 else if (! host_integerp (offset_node, 0))
559 offset = -1;
560 else
561 offset = tree_low_cst (offset_node, 0);
562
563 /* If the offset is known to be out of bounds, warn, and call strlen at
564 runtime. */
565 if (offset < 0 || offset > max)
566 {
567 /* Suppress multiple warnings for propagated constant strings. */
568 if (! TREE_NO_WARNING (src))
569 {
570 warning_at (loc, 0, "offset outside bounds of constant string");
571 TREE_NO_WARNING (src) = 1;
572 }
573 return NULL_TREE;
574 }
575
576 /* Use strlen to search for the first zero byte. Since any strings
577 constructed with build_string will have nulls appended, we win even
578 if we get handed something like (char[4])"abcd".
579
580 Since OFFSET is our starting index into the string, no further
581 calculation is needed. */
582 return ssize_int (strlen (ptr + offset));
583 }
584
585 /* Return a char pointer for a C string if it is a string constant
586 or sum of string constant and integer constant. */
587
588 static const char *
589 c_getstr (tree src)
590 {
591 tree offset_node;
592
593 src = string_constant (src, &offset_node);
594 if (src == 0)
595 return 0;
596
597 if (offset_node == 0)
598 return TREE_STRING_POINTER (src);
599 else if (!host_integerp (offset_node, 1)
600 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
601 return 0;
602
603 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
604 }
605
606 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
607 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
608
609 static rtx
610 c_readstr (const char *str, enum machine_mode mode)
611 {
612 HOST_WIDE_INT c[2];
613 HOST_WIDE_INT ch;
614 unsigned int i, j;
615
616 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
617
618 c[0] = 0;
619 c[1] = 0;
620 ch = 1;
621 for (i = 0; i < GET_MODE_SIZE (mode); i++)
622 {
623 j = i;
624 if (WORDS_BIG_ENDIAN)
625 j = GET_MODE_SIZE (mode) - i - 1;
626 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
627 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
628 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
629 j *= BITS_PER_UNIT;
630 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
631
632 if (ch)
633 ch = (unsigned char) str[i];
634 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
635 }
636 return immed_double_const (c[0], c[1], mode);
637 }
638
639 /* Cast a target constant CST to target CHAR and if that value fits into
640 host char type, return zero and put that value into variable pointed to by
641 P. */
642
643 static int
644 target_char_cast (tree cst, char *p)
645 {
646 unsigned HOST_WIDE_INT val, hostval;
647
648 if (TREE_CODE (cst) != INTEGER_CST
649 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
650 return 1;
651
652 val = TREE_INT_CST_LOW (cst);
653 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
654 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
655
656 hostval = val;
657 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
658 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
659
660 if (val != hostval)
661 return 1;
662
663 *p = hostval;
664 return 0;
665 }
666
667 /* Similar to save_expr, but assumes that arbitrary code is not executed
668 in between the multiple evaluations. In particular, we assume that a
669 non-addressable local variable will not be modified. */
670
671 static tree
672 builtin_save_expr (tree exp)
673 {
674 if (TREE_CODE (exp) == SSA_NAME
675 || (TREE_ADDRESSABLE (exp) == 0
676 && (TREE_CODE (exp) == PARM_DECL
677 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
678 return exp;
679
680 return save_expr (exp);
681 }
682
683 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
684 times to get the address of either a higher stack frame, or a return
685 address located within it (depending on FNDECL_CODE). */
686
687 static rtx
688 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
689 {
690 int i;
691
692 #ifdef INITIAL_FRAME_ADDRESS_RTX
693 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
694 #else
695 rtx tem;
696
697 /* For a zero count with __builtin_return_address, we don't care what
698 frame address we return, because target-specific definitions will
699 override us. Therefore frame pointer elimination is OK, and using
700 the soft frame pointer is OK.
701
702 For a nonzero count, or a zero count with __builtin_frame_address,
703 we require a stable offset from the current frame pointer to the
704 previous one, so we must use the hard frame pointer, and
705 we must disable frame pointer elimination. */
706 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
707 tem = frame_pointer_rtx;
708 else
709 {
710 tem = hard_frame_pointer_rtx;
711
712 /* Tell reload not to eliminate the frame pointer. */
713 crtl->accesses_prior_frames = 1;
714 }
715 #endif
716
717 /* Some machines need special handling before we can access
718 arbitrary frames. For example, on the SPARC, we must first flush
719 all register windows to the stack. */
720 #ifdef SETUP_FRAME_ADDRESSES
721 if (count > 0)
722 SETUP_FRAME_ADDRESSES ();
723 #endif
724
725 /* On the SPARC, the return address is not in the frame, it is in a
726 register. There is no way to access it off of the current frame
727 pointer, but it can be accessed off the previous frame pointer by
728 reading the value from the register window save area. */
729 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
730 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
731 count--;
732 #endif
733
734 /* Scan back COUNT frames to the specified frame. */
735 for (i = 0; i < count; i++)
736 {
737 /* Assume the dynamic chain pointer is in the word that the
738 frame address points to, unless otherwise specified. */
739 #ifdef DYNAMIC_CHAIN_ADDRESS
740 tem = DYNAMIC_CHAIN_ADDRESS (tem);
741 #endif
742 tem = memory_address (Pmode, tem);
743 tem = gen_frame_mem (Pmode, tem);
744 tem = copy_to_reg (tem);
745 }
746
747 /* For __builtin_frame_address, return what we've got. But, on
748 the SPARC for example, we may have to add a bias. */
749 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
750 #ifdef FRAME_ADDR_RTX
751 return FRAME_ADDR_RTX (tem);
752 #else
753 return tem;
754 #endif
755
756 /* For __builtin_return_address, get the return address from that frame. */
757 #ifdef RETURN_ADDR_RTX
758 tem = RETURN_ADDR_RTX (count, tem);
759 #else
760 tem = memory_address (Pmode,
761 plus_constant (tem, GET_MODE_SIZE (Pmode)));
762 tem = gen_frame_mem (Pmode, tem);
763 #endif
764 return tem;
765 }
766
767 /* Alias set used for setjmp buffer. */
768 static alias_set_type setjmp_alias_set = -1;
769
770 /* Construct the leading half of a __builtin_setjmp call. Control will
771 return to RECEIVER_LABEL. This is also called directly by the SJLJ
772 exception handling code. */
773
774 void
775 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
776 {
777 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
778 rtx stack_save;
779 rtx mem;
780
781 if (setjmp_alias_set == -1)
782 setjmp_alias_set = new_alias_set ();
783
784 buf_addr = convert_memory_address (Pmode, buf_addr);
785
786 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
787
788 /* We store the frame pointer and the address of receiver_label in
789 the buffer and use the rest of it for the stack save area, which
790 is machine-dependent. */
791
792 mem = gen_rtx_MEM (Pmode, buf_addr);
793 set_mem_alias_set (mem, setjmp_alias_set);
794 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
795
796 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
797 set_mem_alias_set (mem, setjmp_alias_set);
798
799 emit_move_insn (validize_mem (mem),
800 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
801
802 stack_save = gen_rtx_MEM (sa_mode,
803 plus_constant (buf_addr,
804 2 * GET_MODE_SIZE (Pmode)));
805 set_mem_alias_set (stack_save, setjmp_alias_set);
806 emit_stack_save (SAVE_NONLOCAL, &stack_save);
807
808 /* If there is further processing to do, do it. */
809 #ifdef HAVE_builtin_setjmp_setup
810 if (HAVE_builtin_setjmp_setup)
811 emit_insn (gen_builtin_setjmp_setup (buf_addr));
812 #endif
813
814 /* We have a nonlocal label. */
815 cfun->has_nonlocal_label = 1;
816 }
817
818 /* Construct the trailing part of a __builtin_setjmp call. This is
819 also called directly by the SJLJ exception handling code. */
820
821 void
822 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
823 {
824 rtx chain;
825
826 /* Clobber the FP when we get here, so we have to make sure it's
827 marked as used by this function. */
828 emit_use (hard_frame_pointer_rtx);
829
830 /* Mark the static chain as clobbered here so life information
831 doesn't get messed up for it. */
832 chain = targetm.calls.static_chain (current_function_decl, true);
833 if (chain && REG_P (chain))
834 emit_clobber (chain);
835
836 /* Now put in the code to restore the frame pointer, and argument
837 pointer, if needed. */
838 #ifdef HAVE_nonlocal_goto
839 if (! HAVE_nonlocal_goto)
840 #endif
841 {
842 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
843 /* This might change the hard frame pointer in ways that aren't
844 apparent to early optimization passes, so force a clobber. */
845 emit_clobber (hard_frame_pointer_rtx);
846 }
847
848 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
849 if (fixed_regs[ARG_POINTER_REGNUM])
850 {
851 #ifdef ELIMINABLE_REGS
852 size_t i;
853 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
854
855 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
856 if (elim_regs[i].from == ARG_POINTER_REGNUM
857 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
858 break;
859
860 if (i == ARRAY_SIZE (elim_regs))
861 #endif
862 {
863 /* Now restore our arg pointer from the address at which it
864 was saved in our stack frame. */
865 emit_move_insn (crtl->args.internal_arg_pointer,
866 copy_to_reg (get_arg_pointer_save_area ()));
867 }
868 }
869 #endif
870
871 #ifdef HAVE_builtin_setjmp_receiver
872 if (HAVE_builtin_setjmp_receiver)
873 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
874 else
875 #endif
876 #ifdef HAVE_nonlocal_goto_receiver
877 if (HAVE_nonlocal_goto_receiver)
878 emit_insn (gen_nonlocal_goto_receiver ());
879 else
880 #endif
881 { /* Nothing */ }
882
883 /* We must not allow the code we just generated to be reordered by
884 scheduling. Specifically, the update of the frame pointer must
885 happen immediately, not later. */
886 emit_insn (gen_blockage ());
887 }
888
889 /* __builtin_longjmp is passed a pointer to an array of five words (not
890 all will be used on all machines). It operates similarly to the C
891 library function of the same name, but is more efficient. Much of
892 the code below is copied from the handling of non-local gotos. */
893
894 static void
895 expand_builtin_longjmp (rtx buf_addr, rtx value)
896 {
897 rtx fp, lab, stack, insn, last;
898 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
899
900 /* DRAP is needed for stack realign if longjmp is expanded to current
901 function */
902 if (SUPPORTS_STACK_ALIGNMENT)
903 crtl->need_drap = true;
904
905 if (setjmp_alias_set == -1)
906 setjmp_alias_set = new_alias_set ();
907
908 buf_addr = convert_memory_address (Pmode, buf_addr);
909
910 buf_addr = force_reg (Pmode, buf_addr);
911
912 /* We require that the user must pass a second argument of 1, because
913 that is what builtin_setjmp will return. */
914 gcc_assert (value == const1_rtx);
915
916 last = get_last_insn ();
917 #ifdef HAVE_builtin_longjmp
918 if (HAVE_builtin_longjmp)
919 emit_insn (gen_builtin_longjmp (buf_addr));
920 else
921 #endif
922 {
923 fp = gen_rtx_MEM (Pmode, buf_addr);
924 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
925 GET_MODE_SIZE (Pmode)));
926
927 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
928 2 * GET_MODE_SIZE (Pmode)));
929 set_mem_alias_set (fp, setjmp_alias_set);
930 set_mem_alias_set (lab, setjmp_alias_set);
931 set_mem_alias_set (stack, setjmp_alias_set);
932
933 /* Pick up FP, label, and SP from the block and jump. This code is
934 from expand_goto in stmt.c; see there for detailed comments. */
935 #ifdef HAVE_nonlocal_goto
936 if (HAVE_nonlocal_goto)
937 /* We have to pass a value to the nonlocal_goto pattern that will
938 get copied into the static_chain pointer, but it does not matter
939 what that value is, because builtin_setjmp does not use it. */
940 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
941 else
942 #endif
943 {
944 lab = copy_to_reg (lab);
945
946 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
947 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
948
949 emit_move_insn (hard_frame_pointer_rtx, fp);
950 emit_stack_restore (SAVE_NONLOCAL, stack);
951
952 emit_use (hard_frame_pointer_rtx);
953 emit_use (stack_pointer_rtx);
954 emit_indirect_jump (lab);
955 }
956 }
957
958 /* Search backwards and mark the jump insn as a non-local goto.
959 Note that this precludes the use of __builtin_longjmp to a
960 __builtin_setjmp target in the same function. However, we've
961 already cautioned the user that these functions are for
962 internal exception handling use only. */
963 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
964 {
965 gcc_assert (insn != last);
966
967 if (JUMP_P (insn))
968 {
969 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
970 break;
971 }
972 else if (CALL_P (insn))
973 break;
974 }
975 }
976
977 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
978 and the address of the save area. */
979
980 static rtx
981 expand_builtin_nonlocal_goto (tree exp)
982 {
983 tree t_label, t_save_area;
984 rtx r_label, r_save_area, r_fp, r_sp, insn;
985
986 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
987 return NULL_RTX;
988
989 t_label = CALL_EXPR_ARG (exp, 0);
990 t_save_area = CALL_EXPR_ARG (exp, 1);
991
992 r_label = expand_normal (t_label);
993 r_label = convert_memory_address (Pmode, r_label);
994 r_save_area = expand_normal (t_save_area);
995 r_save_area = convert_memory_address (Pmode, r_save_area);
996 /* Copy the address of the save location to a register just in case it was
997 based on the frame pointer. */
998 r_save_area = copy_to_reg (r_save_area);
999 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1000 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1001 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
1002
1003 crtl->has_nonlocal_goto = 1;
1004
1005 #ifdef HAVE_nonlocal_goto
1006 /* ??? We no longer need to pass the static chain value, afaik. */
1007 if (HAVE_nonlocal_goto)
1008 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1009 else
1010 #endif
1011 {
1012 r_label = copy_to_reg (r_label);
1013
1014 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1015 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1016
1017 /* Restore frame pointer for containing function. */
1018 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1019 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1020
1021 /* USE of hard_frame_pointer_rtx added for consistency;
1022 not clear if really needed. */
1023 emit_use (hard_frame_pointer_rtx);
1024 emit_use (stack_pointer_rtx);
1025
1026 /* If the architecture is using a GP register, we must
1027 conservatively assume that the target function makes use of it.
1028 The prologue of functions with nonlocal gotos must therefore
1029 initialize the GP register to the appropriate value, and we
1030 must then make sure that this value is live at the point
1031 of the jump. (Note that this doesn't necessarily apply
1032 to targets with a nonlocal_goto pattern; they are free
1033 to implement it in their own way. Note also that this is
1034 a no-op if the GP register is a global invariant.) */
1035 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1036 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1037 emit_use (pic_offset_table_rtx);
1038
1039 emit_indirect_jump (r_label);
1040 }
1041
1042 /* Search backwards to the jump insn and mark it as a
1043 non-local goto. */
1044 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1045 {
1046 if (JUMP_P (insn))
1047 {
1048 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1049 break;
1050 }
1051 else if (CALL_P (insn))
1052 break;
1053 }
1054
1055 return const0_rtx;
1056 }
1057
1058 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1059 (not all will be used on all machines) that was passed to __builtin_setjmp.
1060 It updates the stack pointer in that block to correspond to the current
1061 stack pointer. */
1062
1063 static void
1064 expand_builtin_update_setjmp_buf (rtx buf_addr)
1065 {
1066 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1067 rtx stack_save
1068 = gen_rtx_MEM (sa_mode,
1069 memory_address
1070 (sa_mode,
1071 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1072
1073 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1074 }
1075
1076 /* Expand a call to __builtin_prefetch. For a target that does not support
1077 data prefetch, evaluate the memory address argument in case it has side
1078 effects. */
1079
1080 static void
1081 expand_builtin_prefetch (tree exp)
1082 {
1083 tree arg0, arg1, arg2;
1084 int nargs;
1085 rtx op0, op1, op2;
1086
1087 if (!validate_arglist (exp, POINTER_TYPE, 0))
1088 return;
1089
1090 arg0 = CALL_EXPR_ARG (exp, 0);
1091
1092 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1093 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1094 locality). */
1095 nargs = call_expr_nargs (exp);
1096 if (nargs > 1)
1097 arg1 = CALL_EXPR_ARG (exp, 1);
1098 else
1099 arg1 = integer_zero_node;
1100 if (nargs > 2)
1101 arg2 = CALL_EXPR_ARG (exp, 2);
1102 else
1103 arg2 = integer_three_node;
1104
1105 /* Argument 0 is an address. */
1106 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1107
1108 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1109 if (TREE_CODE (arg1) != INTEGER_CST)
1110 {
1111 error ("second argument to %<__builtin_prefetch%> must be a constant");
1112 arg1 = integer_zero_node;
1113 }
1114 op1 = expand_normal (arg1);
1115 /* Argument 1 must be either zero or one. */
1116 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1117 {
1118 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1119 " using zero");
1120 op1 = const0_rtx;
1121 }
1122
1123 /* Argument 2 (locality) must be a compile-time constant int. */
1124 if (TREE_CODE (arg2) != INTEGER_CST)
1125 {
1126 error ("third argument to %<__builtin_prefetch%> must be a constant");
1127 arg2 = integer_zero_node;
1128 }
1129 op2 = expand_normal (arg2);
1130 /* Argument 2 must be 0, 1, 2, or 3. */
1131 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1132 {
1133 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1134 op2 = const0_rtx;
1135 }
1136
1137 #ifdef HAVE_prefetch
1138 if (HAVE_prefetch)
1139 {
1140 struct expand_operand ops[3];
1141
1142 create_address_operand (&ops[0], op0);
1143 create_integer_operand (&ops[1], INTVAL (op1));
1144 create_integer_operand (&ops[2], INTVAL (op2));
1145 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1146 return;
1147 }
1148 #endif
1149
1150 /* Don't do anything with direct references to volatile memory, but
1151 generate code to handle other side effects. */
1152 if (!MEM_P (op0) && side_effects_p (op0))
1153 emit_insn (op0);
1154 }
1155
1156 /* Get a MEM rtx for expression EXP which is the address of an operand
1157 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1158 the maximum length of the block of memory that might be accessed or
1159 NULL if unknown. */
1160
1161 static rtx
1162 get_memory_rtx (tree exp, tree len)
1163 {
1164 tree orig_exp = exp;
1165 rtx addr, mem;
1166 HOST_WIDE_INT off;
1167
1168 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1169 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1170 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1171 exp = TREE_OPERAND (exp, 0);
1172
1173 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1174 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1175
1176 /* Get an expression we can use to find the attributes to assign to MEM.
1177 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1178 we can. First remove any nops. */
1179 while (CONVERT_EXPR_P (exp)
1180 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1181 exp = TREE_OPERAND (exp, 0);
1182
1183 off = 0;
1184 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1185 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1186 && host_integerp (TREE_OPERAND (exp, 1), 0)
1187 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1188 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1189 else if (TREE_CODE (exp) == ADDR_EXPR)
1190 exp = TREE_OPERAND (exp, 0);
1191 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1192 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1193 else
1194 exp = NULL;
1195
1196 /* Honor attributes derived from exp, except for the alias set
1197 (as builtin stringops may alias with anything) and the size
1198 (as stringops may access multiple array elements). */
1199 if (exp)
1200 {
1201 set_mem_attributes (mem, exp, 0);
1202
1203 if (off)
1204 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1205
1206 /* Allow the string and memory builtins to overflow from one
1207 field into another, see http://gcc.gnu.org/PR23561.
1208 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1209 memory accessed by the string or memory builtin will fit
1210 within the field. */
1211 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1212 {
1213 tree mem_expr = MEM_EXPR (mem);
1214 HOST_WIDE_INT offset = -1, length = -1;
1215 tree inner = exp;
1216
1217 while (TREE_CODE (inner) == ARRAY_REF
1218 || CONVERT_EXPR_P (inner)
1219 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1220 || TREE_CODE (inner) == SAVE_EXPR)
1221 inner = TREE_OPERAND (inner, 0);
1222
1223 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1224
1225 if (MEM_OFFSET_KNOWN_P (mem))
1226 offset = MEM_OFFSET (mem);
1227
1228 if (offset >= 0 && len && host_integerp (len, 0))
1229 length = tree_low_cst (len, 0);
1230
1231 while (TREE_CODE (inner) == COMPONENT_REF)
1232 {
1233 tree field = TREE_OPERAND (inner, 1);
1234 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1235 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1236
1237 /* Bitfields are generally not byte-addressable. */
1238 gcc_assert (!DECL_BIT_FIELD (field)
1239 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1240 % BITS_PER_UNIT) == 0
1241 && host_integerp (DECL_SIZE (field), 0)
1242 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1243 % BITS_PER_UNIT) == 0));
1244
1245 /* If we can prove that the memory starting at XEXP (mem, 0) and
1246 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1247 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1248 fields without DECL_SIZE_UNIT like flexible array members. */
1249 if (length >= 0
1250 && DECL_SIZE_UNIT (field)
1251 && host_integerp (DECL_SIZE_UNIT (field), 0))
1252 {
1253 HOST_WIDE_INT size
1254 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1255 if (offset <= size
1256 && length <= size
1257 && offset + length <= size)
1258 break;
1259 }
1260
1261 if (offset >= 0
1262 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1263 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1264 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1265 / BITS_PER_UNIT;
1266 else
1267 {
1268 offset = -1;
1269 length = -1;
1270 }
1271
1272 mem_expr = TREE_OPERAND (mem_expr, 0);
1273 inner = TREE_OPERAND (inner, 0);
1274 }
1275
1276 if (mem_expr == NULL)
1277 offset = -1;
1278 if (mem_expr != MEM_EXPR (mem))
1279 {
1280 set_mem_expr (mem, mem_expr);
1281 if (offset >= 0)
1282 set_mem_offset (mem, offset);
1283 else
1284 clear_mem_offset (mem);
1285 }
1286 }
1287 set_mem_alias_set (mem, 0);
1288 clear_mem_size (mem);
1289 }
1290
1291 return mem;
1292 }
1293 \f
1294 /* Built-in functions to perform an untyped call and return. */
1295
1296 #define apply_args_mode \
1297 (this_target_builtins->x_apply_args_mode)
1298 #define apply_result_mode \
1299 (this_target_builtins->x_apply_result_mode)
1300
1301 /* Return the size required for the block returned by __builtin_apply_args,
1302 and initialize apply_args_mode. */
1303
1304 static int
1305 apply_args_size (void)
1306 {
1307 static int size = -1;
1308 int align;
1309 unsigned int regno;
1310 enum machine_mode mode;
1311
1312 /* The values computed by this function never change. */
1313 if (size < 0)
1314 {
1315 /* The first value is the incoming arg-pointer. */
1316 size = GET_MODE_SIZE (Pmode);
1317
1318 /* The second value is the structure value address unless this is
1319 passed as an "invisible" first argument. */
1320 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1321 size += GET_MODE_SIZE (Pmode);
1322
1323 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1324 if (FUNCTION_ARG_REGNO_P (regno))
1325 {
1326 mode = targetm.calls.get_raw_arg_mode (regno);
1327
1328 gcc_assert (mode != VOIDmode);
1329
1330 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1331 if (size % align != 0)
1332 size = CEIL (size, align) * align;
1333 size += GET_MODE_SIZE (mode);
1334 apply_args_mode[regno] = mode;
1335 }
1336 else
1337 {
1338 apply_args_mode[regno] = VOIDmode;
1339 }
1340 }
1341 return size;
1342 }
1343
1344 /* Return the size required for the block returned by __builtin_apply,
1345 and initialize apply_result_mode. */
1346
1347 static int
1348 apply_result_size (void)
1349 {
1350 static int size = -1;
1351 int align, regno;
1352 enum machine_mode mode;
1353
1354 /* The values computed by this function never change. */
1355 if (size < 0)
1356 {
1357 size = 0;
1358
1359 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1360 if (targetm.calls.function_value_regno_p (regno))
1361 {
1362 mode = targetm.calls.get_raw_result_mode (regno);
1363
1364 gcc_assert (mode != VOIDmode);
1365
1366 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1367 if (size % align != 0)
1368 size = CEIL (size, align) * align;
1369 size += GET_MODE_SIZE (mode);
1370 apply_result_mode[regno] = mode;
1371 }
1372 else
1373 apply_result_mode[regno] = VOIDmode;
1374
1375 /* Allow targets that use untyped_call and untyped_return to override
1376 the size so that machine-specific information can be stored here. */
1377 #ifdef APPLY_RESULT_SIZE
1378 size = APPLY_RESULT_SIZE;
1379 #endif
1380 }
1381 return size;
1382 }
1383
1384 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1385 /* Create a vector describing the result block RESULT. If SAVEP is true,
1386 the result block is used to save the values; otherwise it is used to
1387 restore the values. */
1388
1389 static rtx
1390 result_vector (int savep, rtx result)
1391 {
1392 int regno, size, align, nelts;
1393 enum machine_mode mode;
1394 rtx reg, mem;
1395 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1396
1397 size = nelts = 0;
1398 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1399 if ((mode = apply_result_mode[regno]) != VOIDmode)
1400 {
1401 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1402 if (size % align != 0)
1403 size = CEIL (size, align) * align;
1404 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1405 mem = adjust_address (result, mode, size);
1406 savevec[nelts++] = (savep
1407 ? gen_rtx_SET (VOIDmode, mem, reg)
1408 : gen_rtx_SET (VOIDmode, reg, mem));
1409 size += GET_MODE_SIZE (mode);
1410 }
1411 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1412 }
1413 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1414
1415 /* Save the state required to perform an untyped call with the same
1416 arguments as were passed to the current function. */
1417
1418 static rtx
1419 expand_builtin_apply_args_1 (void)
1420 {
1421 rtx registers, tem;
1422 int size, align, regno;
1423 enum machine_mode mode;
1424 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1425
1426 /* Create a block where the arg-pointer, structure value address,
1427 and argument registers can be saved. */
1428 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1429
1430 /* Walk past the arg-pointer and structure value address. */
1431 size = GET_MODE_SIZE (Pmode);
1432 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1433 size += GET_MODE_SIZE (Pmode);
1434
1435 /* Save each register used in calling a function to the block. */
1436 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1437 if ((mode = apply_args_mode[regno]) != VOIDmode)
1438 {
1439 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1440 if (size % align != 0)
1441 size = CEIL (size, align) * align;
1442
1443 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1444
1445 emit_move_insn (adjust_address (registers, mode, size), tem);
1446 size += GET_MODE_SIZE (mode);
1447 }
1448
1449 /* Save the arg pointer to the block. */
1450 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1451 #ifdef STACK_GROWS_DOWNWARD
1452 /* We need the pointer as the caller actually passed them to us, not
1453 as we might have pretended they were passed. Make sure it's a valid
1454 operand, as emit_move_insn isn't expected to handle a PLUS. */
1455 tem
1456 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1457 NULL_RTX);
1458 #endif
1459 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1460
1461 size = GET_MODE_SIZE (Pmode);
1462
1463 /* Save the structure value address unless this is passed as an
1464 "invisible" first argument. */
1465 if (struct_incoming_value)
1466 {
1467 emit_move_insn (adjust_address (registers, Pmode, size),
1468 copy_to_reg (struct_incoming_value));
1469 size += GET_MODE_SIZE (Pmode);
1470 }
1471
1472 /* Return the address of the block. */
1473 return copy_addr_to_reg (XEXP (registers, 0));
1474 }
1475
1476 /* __builtin_apply_args returns block of memory allocated on
1477 the stack into which is stored the arg pointer, structure
1478 value address, static chain, and all the registers that might
1479 possibly be used in performing a function call. The code is
1480 moved to the start of the function so the incoming values are
1481 saved. */
1482
1483 static rtx
1484 expand_builtin_apply_args (void)
1485 {
1486 /* Don't do __builtin_apply_args more than once in a function.
1487 Save the result of the first call and reuse it. */
1488 if (apply_args_value != 0)
1489 return apply_args_value;
1490 {
1491 /* When this function is called, it means that registers must be
1492 saved on entry to this function. So we migrate the
1493 call to the first insn of this function. */
1494 rtx temp;
1495 rtx seq;
1496
1497 start_sequence ();
1498 temp = expand_builtin_apply_args_1 ();
1499 seq = get_insns ();
1500 end_sequence ();
1501
1502 apply_args_value = temp;
1503
1504 /* Put the insns after the NOTE that starts the function.
1505 If this is inside a start_sequence, make the outer-level insn
1506 chain current, so the code is placed at the start of the
1507 function. If internal_arg_pointer is a non-virtual pseudo,
1508 it needs to be placed after the function that initializes
1509 that pseudo. */
1510 push_topmost_sequence ();
1511 if (REG_P (crtl->args.internal_arg_pointer)
1512 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1513 emit_insn_before (seq, parm_birth_insn);
1514 else
1515 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1516 pop_topmost_sequence ();
1517 return temp;
1518 }
1519 }
1520
1521 /* Perform an untyped call and save the state required to perform an
1522 untyped return of whatever value was returned by the given function. */
1523
1524 static rtx
1525 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1526 {
1527 int size, align, regno;
1528 enum machine_mode mode;
1529 rtx incoming_args, result, reg, dest, src, call_insn;
1530 rtx old_stack_level = 0;
1531 rtx call_fusage = 0;
1532 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1533
1534 arguments = convert_memory_address (Pmode, arguments);
1535
1536 /* Create a block where the return registers can be saved. */
1537 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1538
1539 /* Fetch the arg pointer from the ARGUMENTS block. */
1540 incoming_args = gen_reg_rtx (Pmode);
1541 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1542 #ifndef STACK_GROWS_DOWNWARD
1543 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1544 incoming_args, 0, OPTAB_LIB_WIDEN);
1545 #endif
1546
1547 /* Push a new argument block and copy the arguments. Do not allow
1548 the (potential) memcpy call below to interfere with our stack
1549 manipulations. */
1550 do_pending_stack_adjust ();
1551 NO_DEFER_POP;
1552
1553 /* Save the stack with nonlocal if available. */
1554 #ifdef HAVE_save_stack_nonlocal
1555 if (HAVE_save_stack_nonlocal)
1556 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1557 else
1558 #endif
1559 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1560
1561 /* Allocate a block of memory onto the stack and copy the memory
1562 arguments to the outgoing arguments address. We can pass TRUE
1563 as the 4th argument because we just saved the stack pointer
1564 and will restore it right after the call. */
1565 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1566
1567 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1568 may have already set current_function_calls_alloca to true.
1569 current_function_calls_alloca won't be set if argsize is zero,
1570 so we have to guarantee need_drap is true here. */
1571 if (SUPPORTS_STACK_ALIGNMENT)
1572 crtl->need_drap = true;
1573
1574 dest = virtual_outgoing_args_rtx;
1575 #ifndef STACK_GROWS_DOWNWARD
1576 if (CONST_INT_P (argsize))
1577 dest = plus_constant (dest, -INTVAL (argsize));
1578 else
1579 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1580 #endif
1581 dest = gen_rtx_MEM (BLKmode, dest);
1582 set_mem_align (dest, PARM_BOUNDARY);
1583 src = gen_rtx_MEM (BLKmode, incoming_args);
1584 set_mem_align (src, PARM_BOUNDARY);
1585 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1586
1587 /* Refer to the argument block. */
1588 apply_args_size ();
1589 arguments = gen_rtx_MEM (BLKmode, arguments);
1590 set_mem_align (arguments, PARM_BOUNDARY);
1591
1592 /* Walk past the arg-pointer and structure value address. */
1593 size = GET_MODE_SIZE (Pmode);
1594 if (struct_value)
1595 size += GET_MODE_SIZE (Pmode);
1596
1597 /* Restore each of the registers previously saved. Make USE insns
1598 for each of these registers for use in making the call. */
1599 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1600 if ((mode = apply_args_mode[regno]) != VOIDmode)
1601 {
1602 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1603 if (size % align != 0)
1604 size = CEIL (size, align) * align;
1605 reg = gen_rtx_REG (mode, regno);
1606 emit_move_insn (reg, adjust_address (arguments, mode, size));
1607 use_reg (&call_fusage, reg);
1608 size += GET_MODE_SIZE (mode);
1609 }
1610
1611 /* Restore the structure value address unless this is passed as an
1612 "invisible" first argument. */
1613 size = GET_MODE_SIZE (Pmode);
1614 if (struct_value)
1615 {
1616 rtx value = gen_reg_rtx (Pmode);
1617 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1618 emit_move_insn (struct_value, value);
1619 if (REG_P (struct_value))
1620 use_reg (&call_fusage, struct_value);
1621 size += GET_MODE_SIZE (Pmode);
1622 }
1623
1624 /* All arguments and registers used for the call are set up by now! */
1625 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1626
1627 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1628 and we don't want to load it into a register as an optimization,
1629 because prepare_call_address already did it if it should be done. */
1630 if (GET_CODE (function) != SYMBOL_REF)
1631 function = memory_address (FUNCTION_MODE, function);
1632
1633 /* Generate the actual call instruction and save the return value. */
1634 #ifdef HAVE_untyped_call
1635 if (HAVE_untyped_call)
1636 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1637 result, result_vector (1, result)));
1638 else
1639 #endif
1640 #ifdef HAVE_call_value
1641 if (HAVE_call_value)
1642 {
1643 rtx valreg = 0;
1644
1645 /* Locate the unique return register. It is not possible to
1646 express a call that sets more than one return register using
1647 call_value; use untyped_call for that. In fact, untyped_call
1648 only needs to save the return registers in the given block. */
1649 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1650 if ((mode = apply_result_mode[regno]) != VOIDmode)
1651 {
1652 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1653
1654 valreg = gen_rtx_REG (mode, regno);
1655 }
1656
1657 emit_call_insn (GEN_CALL_VALUE (valreg,
1658 gen_rtx_MEM (FUNCTION_MODE, function),
1659 const0_rtx, NULL_RTX, const0_rtx));
1660
1661 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1662 }
1663 else
1664 #endif
1665 gcc_unreachable ();
1666
1667 /* Find the CALL insn we just emitted, and attach the register usage
1668 information. */
1669 call_insn = last_call_insn ();
1670 add_function_usage_to (call_insn, call_fusage);
1671
1672 /* Restore the stack. */
1673 #ifdef HAVE_save_stack_nonlocal
1674 if (HAVE_save_stack_nonlocal)
1675 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1676 else
1677 #endif
1678 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1679 fixup_args_size_notes (call_insn, get_last_insn(), 0);
1680
1681 OK_DEFER_POP;
1682
1683 /* Return the address of the result block. */
1684 result = copy_addr_to_reg (XEXP (result, 0));
1685 return convert_memory_address (ptr_mode, result);
1686 }
1687
1688 /* Perform an untyped return. */
1689
1690 static void
1691 expand_builtin_return (rtx result)
1692 {
1693 int size, align, regno;
1694 enum machine_mode mode;
1695 rtx reg;
1696 rtx call_fusage = 0;
1697
1698 result = convert_memory_address (Pmode, result);
1699
1700 apply_result_size ();
1701 result = gen_rtx_MEM (BLKmode, result);
1702
1703 #ifdef HAVE_untyped_return
1704 if (HAVE_untyped_return)
1705 {
1706 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1707 emit_barrier ();
1708 return;
1709 }
1710 #endif
1711
1712 /* Restore the return value and note that each value is used. */
1713 size = 0;
1714 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1715 if ((mode = apply_result_mode[regno]) != VOIDmode)
1716 {
1717 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1718 if (size % align != 0)
1719 size = CEIL (size, align) * align;
1720 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1721 emit_move_insn (reg, adjust_address (result, mode, size));
1722
1723 push_to_sequence (call_fusage);
1724 emit_use (reg);
1725 call_fusage = get_insns ();
1726 end_sequence ();
1727 size += GET_MODE_SIZE (mode);
1728 }
1729
1730 /* Put the USE insns before the return. */
1731 emit_insn (call_fusage);
1732
1733 /* Return whatever values was restored by jumping directly to the end
1734 of the function. */
1735 expand_naked_return ();
1736 }
1737
1738 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1739
1740 static enum type_class
1741 type_to_class (tree type)
1742 {
1743 switch (TREE_CODE (type))
1744 {
1745 case VOID_TYPE: return void_type_class;
1746 case INTEGER_TYPE: return integer_type_class;
1747 case ENUMERAL_TYPE: return enumeral_type_class;
1748 case BOOLEAN_TYPE: return boolean_type_class;
1749 case POINTER_TYPE: return pointer_type_class;
1750 case REFERENCE_TYPE: return reference_type_class;
1751 case OFFSET_TYPE: return offset_type_class;
1752 case REAL_TYPE: return real_type_class;
1753 case COMPLEX_TYPE: return complex_type_class;
1754 case FUNCTION_TYPE: return function_type_class;
1755 case METHOD_TYPE: return method_type_class;
1756 case RECORD_TYPE: return record_type_class;
1757 case UNION_TYPE:
1758 case QUAL_UNION_TYPE: return union_type_class;
1759 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1760 ? string_type_class : array_type_class);
1761 case LANG_TYPE: return lang_type_class;
1762 default: return no_type_class;
1763 }
1764 }
1765
1766 /* Expand a call EXP to __builtin_classify_type. */
1767
1768 static rtx
1769 expand_builtin_classify_type (tree exp)
1770 {
1771 if (call_expr_nargs (exp))
1772 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1773 return GEN_INT (no_type_class);
1774 }
1775
1776 /* This helper macro, meant to be used in mathfn_built_in below,
1777 determines which among a set of three builtin math functions is
1778 appropriate for a given type mode. The `F' and `L' cases are
1779 automatically generated from the `double' case. */
1780 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1781 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1782 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1783 fcodel = BUILT_IN_MATHFN##L ; break;
1784 /* Similar to above, but appends _R after any F/L suffix. */
1785 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1786 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1787 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1788 fcodel = BUILT_IN_MATHFN##L_R ; break;
1789
1790 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1791 if available. If IMPLICIT is true use the implicit builtin declaration,
1792 otherwise use the explicit declaration. If we can't do the conversion,
1793 return zero. */
1794
1795 static tree
1796 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1797 {
1798 enum built_in_function fcode, fcodef, fcodel, fcode2;
1799
1800 switch (fn)
1801 {
1802 CASE_MATHFN (BUILT_IN_ACOS)
1803 CASE_MATHFN (BUILT_IN_ACOSH)
1804 CASE_MATHFN (BUILT_IN_ASIN)
1805 CASE_MATHFN (BUILT_IN_ASINH)
1806 CASE_MATHFN (BUILT_IN_ATAN)
1807 CASE_MATHFN (BUILT_IN_ATAN2)
1808 CASE_MATHFN (BUILT_IN_ATANH)
1809 CASE_MATHFN (BUILT_IN_CBRT)
1810 CASE_MATHFN (BUILT_IN_CEIL)
1811 CASE_MATHFN (BUILT_IN_CEXPI)
1812 CASE_MATHFN (BUILT_IN_COPYSIGN)
1813 CASE_MATHFN (BUILT_IN_COS)
1814 CASE_MATHFN (BUILT_IN_COSH)
1815 CASE_MATHFN (BUILT_IN_DREM)
1816 CASE_MATHFN (BUILT_IN_ERF)
1817 CASE_MATHFN (BUILT_IN_ERFC)
1818 CASE_MATHFN (BUILT_IN_EXP)
1819 CASE_MATHFN (BUILT_IN_EXP10)
1820 CASE_MATHFN (BUILT_IN_EXP2)
1821 CASE_MATHFN (BUILT_IN_EXPM1)
1822 CASE_MATHFN (BUILT_IN_FABS)
1823 CASE_MATHFN (BUILT_IN_FDIM)
1824 CASE_MATHFN (BUILT_IN_FLOOR)
1825 CASE_MATHFN (BUILT_IN_FMA)
1826 CASE_MATHFN (BUILT_IN_FMAX)
1827 CASE_MATHFN (BUILT_IN_FMIN)
1828 CASE_MATHFN (BUILT_IN_FMOD)
1829 CASE_MATHFN (BUILT_IN_FREXP)
1830 CASE_MATHFN (BUILT_IN_GAMMA)
1831 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1832 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1833 CASE_MATHFN (BUILT_IN_HYPOT)
1834 CASE_MATHFN (BUILT_IN_ILOGB)
1835 CASE_MATHFN (BUILT_IN_ICEIL)
1836 CASE_MATHFN (BUILT_IN_IFLOOR)
1837 CASE_MATHFN (BUILT_IN_INF)
1838 CASE_MATHFN (BUILT_IN_IRINT)
1839 CASE_MATHFN (BUILT_IN_IROUND)
1840 CASE_MATHFN (BUILT_IN_ISINF)
1841 CASE_MATHFN (BUILT_IN_J0)
1842 CASE_MATHFN (BUILT_IN_J1)
1843 CASE_MATHFN (BUILT_IN_JN)
1844 CASE_MATHFN (BUILT_IN_LCEIL)
1845 CASE_MATHFN (BUILT_IN_LDEXP)
1846 CASE_MATHFN (BUILT_IN_LFLOOR)
1847 CASE_MATHFN (BUILT_IN_LGAMMA)
1848 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1849 CASE_MATHFN (BUILT_IN_LLCEIL)
1850 CASE_MATHFN (BUILT_IN_LLFLOOR)
1851 CASE_MATHFN (BUILT_IN_LLRINT)
1852 CASE_MATHFN (BUILT_IN_LLROUND)
1853 CASE_MATHFN (BUILT_IN_LOG)
1854 CASE_MATHFN (BUILT_IN_LOG10)
1855 CASE_MATHFN (BUILT_IN_LOG1P)
1856 CASE_MATHFN (BUILT_IN_LOG2)
1857 CASE_MATHFN (BUILT_IN_LOGB)
1858 CASE_MATHFN (BUILT_IN_LRINT)
1859 CASE_MATHFN (BUILT_IN_LROUND)
1860 CASE_MATHFN (BUILT_IN_MODF)
1861 CASE_MATHFN (BUILT_IN_NAN)
1862 CASE_MATHFN (BUILT_IN_NANS)
1863 CASE_MATHFN (BUILT_IN_NEARBYINT)
1864 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1865 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1866 CASE_MATHFN (BUILT_IN_POW)
1867 CASE_MATHFN (BUILT_IN_POWI)
1868 CASE_MATHFN (BUILT_IN_POW10)
1869 CASE_MATHFN (BUILT_IN_REMAINDER)
1870 CASE_MATHFN (BUILT_IN_REMQUO)
1871 CASE_MATHFN (BUILT_IN_RINT)
1872 CASE_MATHFN (BUILT_IN_ROUND)
1873 CASE_MATHFN (BUILT_IN_SCALB)
1874 CASE_MATHFN (BUILT_IN_SCALBLN)
1875 CASE_MATHFN (BUILT_IN_SCALBN)
1876 CASE_MATHFN (BUILT_IN_SIGNBIT)
1877 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1878 CASE_MATHFN (BUILT_IN_SIN)
1879 CASE_MATHFN (BUILT_IN_SINCOS)
1880 CASE_MATHFN (BUILT_IN_SINH)
1881 CASE_MATHFN (BUILT_IN_SQRT)
1882 CASE_MATHFN (BUILT_IN_TAN)
1883 CASE_MATHFN (BUILT_IN_TANH)
1884 CASE_MATHFN (BUILT_IN_TGAMMA)
1885 CASE_MATHFN (BUILT_IN_TRUNC)
1886 CASE_MATHFN (BUILT_IN_Y0)
1887 CASE_MATHFN (BUILT_IN_Y1)
1888 CASE_MATHFN (BUILT_IN_YN)
1889
1890 default:
1891 return NULL_TREE;
1892 }
1893
1894 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1895 fcode2 = fcode;
1896 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1897 fcode2 = fcodef;
1898 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1899 fcode2 = fcodel;
1900 else
1901 return NULL_TREE;
1902
1903 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1904 return NULL_TREE;
1905
1906 return builtin_decl_explicit (fcode2);
1907 }
1908
1909 /* Like mathfn_built_in_1(), but always use the implicit array. */
1910
1911 tree
1912 mathfn_built_in (tree type, enum built_in_function fn)
1913 {
1914 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1915 }
1916
1917 /* If errno must be maintained, expand the RTL to check if the result,
1918 TARGET, of a built-in function call, EXP, is NaN, and if so set
1919 errno to EDOM. */
1920
1921 static void
1922 expand_errno_check (tree exp, rtx target)
1923 {
1924 rtx lab = gen_label_rtx ();
1925
1926 /* Test the result; if it is NaN, set errno=EDOM because
1927 the argument was not in the domain. */
1928 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1929 NULL_RTX, NULL_RTX, lab,
1930 /* The jump is very likely. */
1931 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1932
1933 #ifdef TARGET_EDOM
1934 /* If this built-in doesn't throw an exception, set errno directly. */
1935 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1936 {
1937 #ifdef GEN_ERRNO_RTX
1938 rtx errno_rtx = GEN_ERRNO_RTX;
1939 #else
1940 rtx errno_rtx
1941 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1942 #endif
1943 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1944 emit_label (lab);
1945 return;
1946 }
1947 #endif
1948
1949 /* Make sure the library call isn't expanded as a tail call. */
1950 CALL_EXPR_TAILCALL (exp) = 0;
1951
1952 /* We can't set errno=EDOM directly; let the library call do it.
1953 Pop the arguments right away in case the call gets deleted. */
1954 NO_DEFER_POP;
1955 expand_call (exp, target, 0);
1956 OK_DEFER_POP;
1957 emit_label (lab);
1958 }
1959
1960 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1961 Return NULL_RTX if a normal call should be emitted rather than expanding
1962 the function in-line. EXP is the expression that is a call to the builtin
1963 function; if convenient, the result should be placed in TARGET.
1964 SUBTARGET may be used as the target for computing one of EXP's operands. */
1965
1966 static rtx
1967 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1968 {
1969 optab builtin_optab;
1970 rtx op0, insns;
1971 tree fndecl = get_callee_fndecl (exp);
1972 enum machine_mode mode;
1973 bool errno_set = false;
1974 tree arg;
1975
1976 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1977 return NULL_RTX;
1978
1979 arg = CALL_EXPR_ARG (exp, 0);
1980
1981 switch (DECL_FUNCTION_CODE (fndecl))
1982 {
1983 CASE_FLT_FN (BUILT_IN_SQRT):
1984 errno_set = ! tree_expr_nonnegative_p (arg);
1985 builtin_optab = sqrt_optab;
1986 break;
1987 CASE_FLT_FN (BUILT_IN_EXP):
1988 errno_set = true; builtin_optab = exp_optab; break;
1989 CASE_FLT_FN (BUILT_IN_EXP10):
1990 CASE_FLT_FN (BUILT_IN_POW10):
1991 errno_set = true; builtin_optab = exp10_optab; break;
1992 CASE_FLT_FN (BUILT_IN_EXP2):
1993 errno_set = true; builtin_optab = exp2_optab; break;
1994 CASE_FLT_FN (BUILT_IN_EXPM1):
1995 errno_set = true; builtin_optab = expm1_optab; break;
1996 CASE_FLT_FN (BUILT_IN_LOGB):
1997 errno_set = true; builtin_optab = logb_optab; break;
1998 CASE_FLT_FN (BUILT_IN_LOG):
1999 errno_set = true; builtin_optab = log_optab; break;
2000 CASE_FLT_FN (BUILT_IN_LOG10):
2001 errno_set = true; builtin_optab = log10_optab; break;
2002 CASE_FLT_FN (BUILT_IN_LOG2):
2003 errno_set = true; builtin_optab = log2_optab; break;
2004 CASE_FLT_FN (BUILT_IN_LOG1P):
2005 errno_set = true; builtin_optab = log1p_optab; break;
2006 CASE_FLT_FN (BUILT_IN_ASIN):
2007 builtin_optab = asin_optab; break;
2008 CASE_FLT_FN (BUILT_IN_ACOS):
2009 builtin_optab = acos_optab; break;
2010 CASE_FLT_FN (BUILT_IN_TAN):
2011 builtin_optab = tan_optab; break;
2012 CASE_FLT_FN (BUILT_IN_ATAN):
2013 builtin_optab = atan_optab; break;
2014 CASE_FLT_FN (BUILT_IN_FLOOR):
2015 builtin_optab = floor_optab; break;
2016 CASE_FLT_FN (BUILT_IN_CEIL):
2017 builtin_optab = ceil_optab; break;
2018 CASE_FLT_FN (BUILT_IN_TRUNC):
2019 builtin_optab = btrunc_optab; break;
2020 CASE_FLT_FN (BUILT_IN_ROUND):
2021 builtin_optab = round_optab; break;
2022 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2023 builtin_optab = nearbyint_optab;
2024 if (flag_trapping_math)
2025 break;
2026 /* Else fallthrough and expand as rint. */
2027 CASE_FLT_FN (BUILT_IN_RINT):
2028 builtin_optab = rint_optab; break;
2029 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2030 builtin_optab = significand_optab; break;
2031 default:
2032 gcc_unreachable ();
2033 }
2034
2035 /* Make a suitable register to place result in. */
2036 mode = TYPE_MODE (TREE_TYPE (exp));
2037
2038 if (! flag_errno_math || ! HONOR_NANS (mode))
2039 errno_set = false;
2040
2041 /* Before working hard, check whether the instruction is available. */
2042 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2043 && (!errno_set || !optimize_insn_for_size_p ()))
2044 {
2045 target = gen_reg_rtx (mode);
2046
2047 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2048 need to expand the argument again. This way, we will not perform
2049 side-effects more the once. */
2050 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2051
2052 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2053
2054 start_sequence ();
2055
2056 /* Compute into TARGET.
2057 Set TARGET to wherever the result comes back. */
2058 target = expand_unop (mode, builtin_optab, op0, target, 0);
2059
2060 if (target != 0)
2061 {
2062 if (errno_set)
2063 expand_errno_check (exp, target);
2064
2065 /* Output the entire sequence. */
2066 insns = get_insns ();
2067 end_sequence ();
2068 emit_insn (insns);
2069 return target;
2070 }
2071
2072 /* If we were unable to expand via the builtin, stop the sequence
2073 (without outputting the insns) and call to the library function
2074 with the stabilized argument list. */
2075 end_sequence ();
2076 }
2077
2078 return expand_call (exp, target, target == const0_rtx);
2079 }
2080
2081 /* Expand a call to the builtin binary math functions (pow and atan2).
2082 Return NULL_RTX if a normal call should be emitted rather than expanding the
2083 function in-line. EXP is the expression that is a call to the builtin
2084 function; if convenient, the result should be placed in TARGET.
2085 SUBTARGET may be used as the target for computing one of EXP's
2086 operands. */
2087
2088 static rtx
2089 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2090 {
2091 optab builtin_optab;
2092 rtx op0, op1, insns;
2093 int op1_type = REAL_TYPE;
2094 tree fndecl = get_callee_fndecl (exp);
2095 tree arg0, arg1;
2096 enum machine_mode mode;
2097 bool errno_set = true;
2098
2099 switch (DECL_FUNCTION_CODE (fndecl))
2100 {
2101 CASE_FLT_FN (BUILT_IN_SCALBN):
2102 CASE_FLT_FN (BUILT_IN_SCALBLN):
2103 CASE_FLT_FN (BUILT_IN_LDEXP):
2104 op1_type = INTEGER_TYPE;
2105 default:
2106 break;
2107 }
2108
2109 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2110 return NULL_RTX;
2111
2112 arg0 = CALL_EXPR_ARG (exp, 0);
2113 arg1 = CALL_EXPR_ARG (exp, 1);
2114
2115 switch (DECL_FUNCTION_CODE (fndecl))
2116 {
2117 CASE_FLT_FN (BUILT_IN_POW):
2118 builtin_optab = pow_optab; break;
2119 CASE_FLT_FN (BUILT_IN_ATAN2):
2120 builtin_optab = atan2_optab; break;
2121 CASE_FLT_FN (BUILT_IN_SCALB):
2122 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2123 return 0;
2124 builtin_optab = scalb_optab; break;
2125 CASE_FLT_FN (BUILT_IN_SCALBN):
2126 CASE_FLT_FN (BUILT_IN_SCALBLN):
2127 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2128 return 0;
2129 /* Fall through... */
2130 CASE_FLT_FN (BUILT_IN_LDEXP):
2131 builtin_optab = ldexp_optab; break;
2132 CASE_FLT_FN (BUILT_IN_FMOD):
2133 builtin_optab = fmod_optab; break;
2134 CASE_FLT_FN (BUILT_IN_REMAINDER):
2135 CASE_FLT_FN (BUILT_IN_DREM):
2136 builtin_optab = remainder_optab; break;
2137 default:
2138 gcc_unreachable ();
2139 }
2140
2141 /* Make a suitable register to place result in. */
2142 mode = TYPE_MODE (TREE_TYPE (exp));
2143
2144 /* Before working hard, check whether the instruction is available. */
2145 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2146 return NULL_RTX;
2147
2148 target = gen_reg_rtx (mode);
2149
2150 if (! flag_errno_math || ! HONOR_NANS (mode))
2151 errno_set = false;
2152
2153 if (errno_set && optimize_insn_for_size_p ())
2154 return 0;
2155
2156 /* Always stabilize the argument list. */
2157 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2158 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2159
2160 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2161 op1 = expand_normal (arg1);
2162
2163 start_sequence ();
2164
2165 /* Compute into TARGET.
2166 Set TARGET to wherever the result comes back. */
2167 target = expand_binop (mode, builtin_optab, op0, op1,
2168 target, 0, OPTAB_DIRECT);
2169
2170 /* If we were unable to expand via the builtin, stop the sequence
2171 (without outputting the insns) and call to the library function
2172 with the stabilized argument list. */
2173 if (target == 0)
2174 {
2175 end_sequence ();
2176 return expand_call (exp, target, target == const0_rtx);
2177 }
2178
2179 if (errno_set)
2180 expand_errno_check (exp, target);
2181
2182 /* Output the entire sequence. */
2183 insns = get_insns ();
2184 end_sequence ();
2185 emit_insn (insns);
2186
2187 return target;
2188 }
2189
2190 /* Expand a call to the builtin trinary math functions (fma).
2191 Return NULL_RTX if a normal call should be emitted rather than expanding the
2192 function in-line. EXP is the expression that is a call to the builtin
2193 function; if convenient, the result should be placed in TARGET.
2194 SUBTARGET may be used as the target for computing one of EXP's
2195 operands. */
2196
2197 static rtx
2198 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2199 {
2200 optab builtin_optab;
2201 rtx op0, op1, op2, insns;
2202 tree fndecl = get_callee_fndecl (exp);
2203 tree arg0, arg1, arg2;
2204 enum machine_mode mode;
2205
2206 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2207 return NULL_RTX;
2208
2209 arg0 = CALL_EXPR_ARG (exp, 0);
2210 arg1 = CALL_EXPR_ARG (exp, 1);
2211 arg2 = CALL_EXPR_ARG (exp, 2);
2212
2213 switch (DECL_FUNCTION_CODE (fndecl))
2214 {
2215 CASE_FLT_FN (BUILT_IN_FMA):
2216 builtin_optab = fma_optab; break;
2217 default:
2218 gcc_unreachable ();
2219 }
2220
2221 /* Make a suitable register to place result in. */
2222 mode = TYPE_MODE (TREE_TYPE (exp));
2223
2224 /* Before working hard, check whether the instruction is available. */
2225 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2226 return NULL_RTX;
2227
2228 target = gen_reg_rtx (mode);
2229
2230 /* Always stabilize the argument list. */
2231 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2232 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2233 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2234
2235 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2236 op1 = expand_normal (arg1);
2237 op2 = expand_normal (arg2);
2238
2239 start_sequence ();
2240
2241 /* Compute into TARGET.
2242 Set TARGET to wherever the result comes back. */
2243 target = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2244 target, 0);
2245
2246 /* If we were unable to expand via the builtin, stop the sequence
2247 (without outputting the insns) and call to the library function
2248 with the stabilized argument list. */
2249 if (target == 0)
2250 {
2251 end_sequence ();
2252 return expand_call (exp, target, target == const0_rtx);
2253 }
2254
2255 /* Output the entire sequence. */
2256 insns = get_insns ();
2257 end_sequence ();
2258 emit_insn (insns);
2259
2260 return target;
2261 }
2262
2263 /* Expand a call to the builtin sin and cos math functions.
2264 Return NULL_RTX if a normal call should be emitted rather than expanding the
2265 function in-line. EXP is the expression that is a call to the builtin
2266 function; if convenient, the result should be placed in TARGET.
2267 SUBTARGET may be used as the target for computing one of EXP's
2268 operands. */
2269
2270 static rtx
2271 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2272 {
2273 optab builtin_optab;
2274 rtx op0, insns;
2275 tree fndecl = get_callee_fndecl (exp);
2276 enum machine_mode mode;
2277 tree arg;
2278
2279 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2280 return NULL_RTX;
2281
2282 arg = CALL_EXPR_ARG (exp, 0);
2283
2284 switch (DECL_FUNCTION_CODE (fndecl))
2285 {
2286 CASE_FLT_FN (BUILT_IN_SIN):
2287 CASE_FLT_FN (BUILT_IN_COS):
2288 builtin_optab = sincos_optab; break;
2289 default:
2290 gcc_unreachable ();
2291 }
2292
2293 /* Make a suitable register to place result in. */
2294 mode = TYPE_MODE (TREE_TYPE (exp));
2295
2296 /* Check if sincos insn is available, otherwise fallback
2297 to sin or cos insn. */
2298 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2299 switch (DECL_FUNCTION_CODE (fndecl))
2300 {
2301 CASE_FLT_FN (BUILT_IN_SIN):
2302 builtin_optab = sin_optab; break;
2303 CASE_FLT_FN (BUILT_IN_COS):
2304 builtin_optab = cos_optab; break;
2305 default:
2306 gcc_unreachable ();
2307 }
2308
2309 /* Before working hard, check whether the instruction is available. */
2310 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2311 {
2312 target = gen_reg_rtx (mode);
2313
2314 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2315 need to expand the argument again. This way, we will not perform
2316 side-effects more the once. */
2317 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2318
2319 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2320
2321 start_sequence ();
2322
2323 /* Compute into TARGET.
2324 Set TARGET to wherever the result comes back. */
2325 if (builtin_optab == sincos_optab)
2326 {
2327 int result;
2328
2329 switch (DECL_FUNCTION_CODE (fndecl))
2330 {
2331 CASE_FLT_FN (BUILT_IN_SIN):
2332 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2333 break;
2334 CASE_FLT_FN (BUILT_IN_COS):
2335 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2336 break;
2337 default:
2338 gcc_unreachable ();
2339 }
2340 gcc_assert (result);
2341 }
2342 else
2343 {
2344 target = expand_unop (mode, builtin_optab, op0, target, 0);
2345 }
2346
2347 if (target != 0)
2348 {
2349 /* Output the entire sequence. */
2350 insns = get_insns ();
2351 end_sequence ();
2352 emit_insn (insns);
2353 return target;
2354 }
2355
2356 /* If we were unable to expand via the builtin, stop the sequence
2357 (without outputting the insns) and call to the library function
2358 with the stabilized argument list. */
2359 end_sequence ();
2360 }
2361
2362 target = expand_call (exp, target, target == const0_rtx);
2363
2364 return target;
2365 }
2366
2367 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2368 return an RTL instruction code that implements the functionality.
2369 If that isn't possible or available return CODE_FOR_nothing. */
2370
2371 static enum insn_code
2372 interclass_mathfn_icode (tree arg, tree fndecl)
2373 {
2374 bool errno_set = false;
2375 optab builtin_optab = 0;
2376 enum machine_mode mode;
2377
2378 switch (DECL_FUNCTION_CODE (fndecl))
2379 {
2380 CASE_FLT_FN (BUILT_IN_ILOGB):
2381 errno_set = true; builtin_optab = ilogb_optab; break;
2382 CASE_FLT_FN (BUILT_IN_ISINF):
2383 builtin_optab = isinf_optab; break;
2384 case BUILT_IN_ISNORMAL:
2385 case BUILT_IN_ISFINITE:
2386 CASE_FLT_FN (BUILT_IN_FINITE):
2387 case BUILT_IN_FINITED32:
2388 case BUILT_IN_FINITED64:
2389 case BUILT_IN_FINITED128:
2390 case BUILT_IN_ISINFD32:
2391 case BUILT_IN_ISINFD64:
2392 case BUILT_IN_ISINFD128:
2393 /* These builtins have no optabs (yet). */
2394 break;
2395 default:
2396 gcc_unreachable ();
2397 }
2398
2399 /* There's no easy way to detect the case we need to set EDOM. */
2400 if (flag_errno_math && errno_set)
2401 return CODE_FOR_nothing;
2402
2403 /* Optab mode depends on the mode of the input argument. */
2404 mode = TYPE_MODE (TREE_TYPE (arg));
2405
2406 if (builtin_optab)
2407 return optab_handler (builtin_optab, mode);
2408 return CODE_FOR_nothing;
2409 }
2410
2411 /* Expand a call to one of the builtin math functions that operate on
2412 floating point argument and output an integer result (ilogb, isinf,
2413 isnan, etc).
2414 Return 0 if a normal call should be emitted rather than expanding the
2415 function in-line. EXP is the expression that is a call to the builtin
2416 function; if convenient, the result should be placed in TARGET. */
2417
2418 static rtx
2419 expand_builtin_interclass_mathfn (tree exp, rtx target)
2420 {
2421 enum insn_code icode = CODE_FOR_nothing;
2422 rtx op0;
2423 tree fndecl = get_callee_fndecl (exp);
2424 enum machine_mode mode;
2425 tree arg;
2426
2427 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2428 return NULL_RTX;
2429
2430 arg = CALL_EXPR_ARG (exp, 0);
2431 icode = interclass_mathfn_icode (arg, fndecl);
2432 mode = TYPE_MODE (TREE_TYPE (arg));
2433
2434 if (icode != CODE_FOR_nothing)
2435 {
2436 struct expand_operand ops[1];
2437 rtx last = get_last_insn ();
2438 tree orig_arg = arg;
2439
2440 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2441 need to expand the argument again. This way, we will not perform
2442 side-effects more the once. */
2443 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2444
2445 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2446
2447 if (mode != GET_MODE (op0))
2448 op0 = convert_to_mode (mode, op0, 0);
2449
2450 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2451 if (maybe_legitimize_operands (icode, 0, 1, ops)
2452 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2453 return ops[0].value;
2454
2455 delete_insns_since (last);
2456 CALL_EXPR_ARG (exp, 0) = orig_arg;
2457 }
2458
2459 return NULL_RTX;
2460 }
2461
2462 /* Expand a call to the builtin sincos math function.
2463 Return NULL_RTX if a normal call should be emitted rather than expanding the
2464 function in-line. EXP is the expression that is a call to the builtin
2465 function. */
2466
2467 static rtx
2468 expand_builtin_sincos (tree exp)
2469 {
2470 rtx op0, op1, op2, target1, target2;
2471 enum machine_mode mode;
2472 tree arg, sinp, cosp;
2473 int result;
2474 location_t loc = EXPR_LOCATION (exp);
2475 tree alias_type, alias_off;
2476
2477 if (!validate_arglist (exp, REAL_TYPE,
2478 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2479 return NULL_RTX;
2480
2481 arg = CALL_EXPR_ARG (exp, 0);
2482 sinp = CALL_EXPR_ARG (exp, 1);
2483 cosp = CALL_EXPR_ARG (exp, 2);
2484
2485 /* Make a suitable register to place result in. */
2486 mode = TYPE_MODE (TREE_TYPE (arg));
2487
2488 /* Check if sincos insn is available, otherwise emit the call. */
2489 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2490 return NULL_RTX;
2491
2492 target1 = gen_reg_rtx (mode);
2493 target2 = gen_reg_rtx (mode);
2494
2495 op0 = expand_normal (arg);
2496 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2497 alias_off = build_int_cst (alias_type, 0);
2498 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2499 sinp, alias_off));
2500 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2501 cosp, alias_off));
2502
2503 /* Compute into target1 and target2.
2504 Set TARGET to wherever the result comes back. */
2505 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2506 gcc_assert (result);
2507
2508 /* Move target1 and target2 to the memory locations indicated
2509 by op1 and op2. */
2510 emit_move_insn (op1, target1);
2511 emit_move_insn (op2, target2);
2512
2513 return const0_rtx;
2514 }
2515
2516 /* Expand a call to the internal cexpi builtin to the sincos math function.
2517 EXP is the expression that is a call to the builtin function; if convenient,
2518 the result should be placed in TARGET. */
2519
2520 static rtx
2521 expand_builtin_cexpi (tree exp, rtx target)
2522 {
2523 tree fndecl = get_callee_fndecl (exp);
2524 tree arg, type;
2525 enum machine_mode mode;
2526 rtx op0, op1, op2;
2527 location_t loc = EXPR_LOCATION (exp);
2528
2529 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2530 return NULL_RTX;
2531
2532 arg = CALL_EXPR_ARG (exp, 0);
2533 type = TREE_TYPE (arg);
2534 mode = TYPE_MODE (TREE_TYPE (arg));
2535
2536 /* Try expanding via a sincos optab, fall back to emitting a libcall
2537 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2538 is only generated from sincos, cexp or if we have either of them. */
2539 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2540 {
2541 op1 = gen_reg_rtx (mode);
2542 op2 = gen_reg_rtx (mode);
2543
2544 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2545
2546 /* Compute into op1 and op2. */
2547 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2548 }
2549 else if (TARGET_HAS_SINCOS)
2550 {
2551 tree call, fn = NULL_TREE;
2552 tree top1, top2;
2553 rtx op1a, op2a;
2554
2555 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2556 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2557 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2558 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2559 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2560 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2561 else
2562 gcc_unreachable ();
2563
2564 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2565 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2566 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2567 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2568 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2569 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2570
2571 /* Make sure not to fold the sincos call again. */
2572 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2573 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2574 call, 3, arg, top1, top2));
2575 }
2576 else
2577 {
2578 tree call, fn = NULL_TREE, narg;
2579 tree ctype = build_complex_type (type);
2580
2581 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2582 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2583 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2584 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2585 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2586 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2587 else
2588 gcc_unreachable ();
2589
2590 /* If we don't have a decl for cexp create one. This is the
2591 friendliest fallback if the user calls __builtin_cexpi
2592 without full target C99 function support. */
2593 if (fn == NULL_TREE)
2594 {
2595 tree fntype;
2596 const char *name = NULL;
2597
2598 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2599 name = "cexpf";
2600 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2601 name = "cexp";
2602 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2603 name = "cexpl";
2604
2605 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2606 fn = build_fn_decl (name, fntype);
2607 }
2608
2609 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2610 build_real (type, dconst0), arg);
2611
2612 /* Make sure not to fold the cexp call again. */
2613 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2614 return expand_expr (build_call_nary (ctype, call, 1, narg),
2615 target, VOIDmode, EXPAND_NORMAL);
2616 }
2617
2618 /* Now build the proper return type. */
2619 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2620 make_tree (TREE_TYPE (arg), op2),
2621 make_tree (TREE_TYPE (arg), op1)),
2622 target, VOIDmode, EXPAND_NORMAL);
2623 }
2624
2625 /* Conveniently construct a function call expression. FNDECL names the
2626 function to be called, N is the number of arguments, and the "..."
2627 parameters are the argument expressions. Unlike build_call_exr
2628 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2629
2630 static tree
2631 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2632 {
2633 va_list ap;
2634 tree fntype = TREE_TYPE (fndecl);
2635 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2636
2637 va_start (ap, n);
2638 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2639 va_end (ap);
2640 SET_EXPR_LOCATION (fn, loc);
2641 return fn;
2642 }
2643
2644 /* Expand a call to one of the builtin rounding functions gcc defines
2645 as an extension (lfloor and lceil). As these are gcc extensions we
2646 do not need to worry about setting errno to EDOM.
2647 If expanding via optab fails, lower expression to (int)(floor(x)).
2648 EXP is the expression that is a call to the builtin function;
2649 if convenient, the result should be placed in TARGET. */
2650
2651 static rtx
2652 expand_builtin_int_roundingfn (tree exp, rtx target)
2653 {
2654 convert_optab builtin_optab;
2655 rtx op0, insns, tmp;
2656 tree fndecl = get_callee_fndecl (exp);
2657 enum built_in_function fallback_fn;
2658 tree fallback_fndecl;
2659 enum machine_mode mode;
2660 tree arg;
2661
2662 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2663 gcc_unreachable ();
2664
2665 arg = CALL_EXPR_ARG (exp, 0);
2666
2667 switch (DECL_FUNCTION_CODE (fndecl))
2668 {
2669 CASE_FLT_FN (BUILT_IN_ICEIL):
2670 CASE_FLT_FN (BUILT_IN_LCEIL):
2671 CASE_FLT_FN (BUILT_IN_LLCEIL):
2672 builtin_optab = lceil_optab;
2673 fallback_fn = BUILT_IN_CEIL;
2674 break;
2675
2676 CASE_FLT_FN (BUILT_IN_IFLOOR):
2677 CASE_FLT_FN (BUILT_IN_LFLOOR):
2678 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2679 builtin_optab = lfloor_optab;
2680 fallback_fn = BUILT_IN_FLOOR;
2681 break;
2682
2683 default:
2684 gcc_unreachable ();
2685 }
2686
2687 /* Make a suitable register to place result in. */
2688 mode = TYPE_MODE (TREE_TYPE (exp));
2689
2690 target = gen_reg_rtx (mode);
2691
2692 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2693 need to expand the argument again. This way, we will not perform
2694 side-effects more the once. */
2695 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2696
2697 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2698
2699 start_sequence ();
2700
2701 /* Compute into TARGET. */
2702 if (expand_sfix_optab (target, op0, builtin_optab))
2703 {
2704 /* Output the entire sequence. */
2705 insns = get_insns ();
2706 end_sequence ();
2707 emit_insn (insns);
2708 return target;
2709 }
2710
2711 /* If we were unable to expand via the builtin, stop the sequence
2712 (without outputting the insns). */
2713 end_sequence ();
2714
2715 /* Fall back to floating point rounding optab. */
2716 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2717
2718 /* For non-C99 targets we may end up without a fallback fndecl here
2719 if the user called __builtin_lfloor directly. In this case emit
2720 a call to the floor/ceil variants nevertheless. This should result
2721 in the best user experience for not full C99 targets. */
2722 if (fallback_fndecl == NULL_TREE)
2723 {
2724 tree fntype;
2725 const char *name = NULL;
2726
2727 switch (DECL_FUNCTION_CODE (fndecl))
2728 {
2729 case BUILT_IN_ICEIL:
2730 case BUILT_IN_LCEIL:
2731 case BUILT_IN_LLCEIL:
2732 name = "ceil";
2733 break;
2734 case BUILT_IN_ICEILF:
2735 case BUILT_IN_LCEILF:
2736 case BUILT_IN_LLCEILF:
2737 name = "ceilf";
2738 break;
2739 case BUILT_IN_ICEILL:
2740 case BUILT_IN_LCEILL:
2741 case BUILT_IN_LLCEILL:
2742 name = "ceill";
2743 break;
2744 case BUILT_IN_IFLOOR:
2745 case BUILT_IN_LFLOOR:
2746 case BUILT_IN_LLFLOOR:
2747 name = "floor";
2748 break;
2749 case BUILT_IN_IFLOORF:
2750 case BUILT_IN_LFLOORF:
2751 case BUILT_IN_LLFLOORF:
2752 name = "floorf";
2753 break;
2754 case BUILT_IN_IFLOORL:
2755 case BUILT_IN_LFLOORL:
2756 case BUILT_IN_LLFLOORL:
2757 name = "floorl";
2758 break;
2759 default:
2760 gcc_unreachable ();
2761 }
2762
2763 fntype = build_function_type_list (TREE_TYPE (arg),
2764 TREE_TYPE (arg), NULL_TREE);
2765 fallback_fndecl = build_fn_decl (name, fntype);
2766 }
2767
2768 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2769
2770 tmp = expand_normal (exp);
2771
2772 /* Truncate the result of floating point optab to integer
2773 via expand_fix (). */
2774 target = gen_reg_rtx (mode);
2775 expand_fix (target, tmp, 0);
2776
2777 return target;
2778 }
2779
2780 /* Expand a call to one of the builtin math functions doing integer
2781 conversion (lrint).
2782 Return 0 if a normal call should be emitted rather than expanding the
2783 function in-line. EXP is the expression that is a call to the builtin
2784 function; if convenient, the result should be placed in TARGET. */
2785
2786 static rtx
2787 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2788 {
2789 convert_optab builtin_optab;
2790 rtx op0, insns;
2791 tree fndecl = get_callee_fndecl (exp);
2792 tree arg;
2793 enum machine_mode mode;
2794
2795 /* There's no easy way to detect the case we need to set EDOM. */
2796 if (flag_errno_math)
2797 return NULL_RTX;
2798
2799 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2800 gcc_unreachable ();
2801
2802 arg = CALL_EXPR_ARG (exp, 0);
2803
2804 switch (DECL_FUNCTION_CODE (fndecl))
2805 {
2806 CASE_FLT_FN (BUILT_IN_IRINT):
2807 CASE_FLT_FN (BUILT_IN_LRINT):
2808 CASE_FLT_FN (BUILT_IN_LLRINT):
2809 builtin_optab = lrint_optab; break;
2810
2811 CASE_FLT_FN (BUILT_IN_IROUND):
2812 CASE_FLT_FN (BUILT_IN_LROUND):
2813 CASE_FLT_FN (BUILT_IN_LLROUND):
2814 builtin_optab = lround_optab; break;
2815
2816 default:
2817 gcc_unreachable ();
2818 }
2819
2820 /* Make a suitable register to place result in. */
2821 mode = TYPE_MODE (TREE_TYPE (exp));
2822
2823 target = gen_reg_rtx (mode);
2824
2825 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2826 need to expand the argument again. This way, we will not perform
2827 side-effects more the once. */
2828 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2829
2830 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2831
2832 start_sequence ();
2833
2834 if (expand_sfix_optab (target, op0, builtin_optab))
2835 {
2836 /* Output the entire sequence. */
2837 insns = get_insns ();
2838 end_sequence ();
2839 emit_insn (insns);
2840 return target;
2841 }
2842
2843 /* If we were unable to expand via the builtin, stop the sequence
2844 (without outputting the insns) and call to the library function
2845 with the stabilized argument list. */
2846 end_sequence ();
2847
2848 target = expand_call (exp, target, target == const0_rtx);
2849
2850 return target;
2851 }
2852
2853 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2854 a normal call should be emitted rather than expanding the function
2855 in-line. EXP is the expression that is a call to the builtin
2856 function; if convenient, the result should be placed in TARGET. */
2857
2858 static rtx
2859 expand_builtin_powi (tree exp, rtx target)
2860 {
2861 tree arg0, arg1;
2862 rtx op0, op1;
2863 enum machine_mode mode;
2864 enum machine_mode mode2;
2865
2866 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2867 return NULL_RTX;
2868
2869 arg0 = CALL_EXPR_ARG (exp, 0);
2870 arg1 = CALL_EXPR_ARG (exp, 1);
2871 mode = TYPE_MODE (TREE_TYPE (exp));
2872
2873 /* Emit a libcall to libgcc. */
2874
2875 /* Mode of the 2nd argument must match that of an int. */
2876 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2877
2878 if (target == NULL_RTX)
2879 target = gen_reg_rtx (mode);
2880
2881 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2882 if (GET_MODE (op0) != mode)
2883 op0 = convert_to_mode (mode, op0, 0);
2884 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2885 if (GET_MODE (op1) != mode2)
2886 op1 = convert_to_mode (mode2, op1, 0);
2887
2888 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2889 target, LCT_CONST, mode, 2,
2890 op0, mode, op1, mode2);
2891
2892 return target;
2893 }
2894
2895 /* Expand expression EXP which is a call to the strlen builtin. Return
2896 NULL_RTX if we failed the caller should emit a normal call, otherwise
2897 try to get the result in TARGET, if convenient. */
2898
2899 static rtx
2900 expand_builtin_strlen (tree exp, rtx target,
2901 enum machine_mode target_mode)
2902 {
2903 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2904 return NULL_RTX;
2905 else
2906 {
2907 struct expand_operand ops[4];
2908 rtx pat;
2909 tree len;
2910 tree src = CALL_EXPR_ARG (exp, 0);
2911 rtx src_reg, before_strlen;
2912 enum machine_mode insn_mode = target_mode;
2913 enum insn_code icode = CODE_FOR_nothing;
2914 unsigned int align;
2915
2916 /* If the length can be computed at compile-time, return it. */
2917 len = c_strlen (src, 0);
2918 if (len)
2919 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2920
2921 /* If the length can be computed at compile-time and is constant
2922 integer, but there are side-effects in src, evaluate
2923 src for side-effects, then return len.
2924 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2925 can be optimized into: i++; x = 3; */
2926 len = c_strlen (src, 1);
2927 if (len && TREE_CODE (len) == INTEGER_CST)
2928 {
2929 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2930 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2931 }
2932
2933 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2934
2935 /* If SRC is not a pointer type, don't do this operation inline. */
2936 if (align == 0)
2937 return NULL_RTX;
2938
2939 /* Bail out if we can't compute strlen in the right mode. */
2940 while (insn_mode != VOIDmode)
2941 {
2942 icode = optab_handler (strlen_optab, insn_mode);
2943 if (icode != CODE_FOR_nothing)
2944 break;
2945
2946 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2947 }
2948 if (insn_mode == VOIDmode)
2949 return NULL_RTX;
2950
2951 /* Make a place to hold the source address. We will not expand
2952 the actual source until we are sure that the expansion will
2953 not fail -- there are trees that cannot be expanded twice. */
2954 src_reg = gen_reg_rtx (Pmode);
2955
2956 /* Mark the beginning of the strlen sequence so we can emit the
2957 source operand later. */
2958 before_strlen = get_last_insn ();
2959
2960 create_output_operand (&ops[0], target, insn_mode);
2961 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2962 create_integer_operand (&ops[2], 0);
2963 create_integer_operand (&ops[3], align);
2964 if (!maybe_expand_insn (icode, 4, ops))
2965 return NULL_RTX;
2966
2967 /* Now that we are assured of success, expand the source. */
2968 start_sequence ();
2969 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2970 if (pat != src_reg)
2971 {
2972 #ifdef POINTERS_EXTEND_UNSIGNED
2973 if (GET_MODE (pat) != Pmode)
2974 pat = convert_to_mode (Pmode, pat,
2975 POINTERS_EXTEND_UNSIGNED);
2976 #endif
2977 emit_move_insn (src_reg, pat);
2978 }
2979 pat = get_insns ();
2980 end_sequence ();
2981
2982 if (before_strlen)
2983 emit_insn_after (pat, before_strlen);
2984 else
2985 emit_insn_before (pat, get_insns ());
2986
2987 /* Return the value in the proper mode for this function. */
2988 if (GET_MODE (ops[0].value) == target_mode)
2989 target = ops[0].value;
2990 else if (target != 0)
2991 convert_move (target, ops[0].value, 0);
2992 else
2993 target = convert_to_mode (target_mode, ops[0].value, 0);
2994
2995 return target;
2996 }
2997 }
2998
2999 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3000 bytes from constant string DATA + OFFSET and return it as target
3001 constant. */
3002
3003 static rtx
3004 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3005 enum machine_mode mode)
3006 {
3007 const char *str = (const char *) data;
3008
3009 gcc_assert (offset >= 0
3010 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3011 <= strlen (str) + 1));
3012
3013 return c_readstr (str + offset, mode);
3014 }
3015
3016 /* Expand a call EXP to the memcpy builtin.
3017 Return NULL_RTX if we failed, the caller should emit a normal call,
3018 otherwise try to get the result in TARGET, if convenient (and in
3019 mode MODE if that's convenient). */
3020
3021 static rtx
3022 expand_builtin_memcpy (tree exp, rtx target)
3023 {
3024 if (!validate_arglist (exp,
3025 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3026 return NULL_RTX;
3027 else
3028 {
3029 tree dest = CALL_EXPR_ARG (exp, 0);
3030 tree src = CALL_EXPR_ARG (exp, 1);
3031 tree len = CALL_EXPR_ARG (exp, 2);
3032 const char *src_str;
3033 unsigned int src_align = get_pointer_alignment (src);
3034 unsigned int dest_align = get_pointer_alignment (dest);
3035 rtx dest_mem, src_mem, dest_addr, len_rtx;
3036 HOST_WIDE_INT expected_size = -1;
3037 unsigned int expected_align = 0;
3038
3039 /* If DEST is not a pointer type, call the normal function. */
3040 if (dest_align == 0)
3041 return NULL_RTX;
3042
3043 /* If either SRC is not a pointer type, don't do this
3044 operation in-line. */
3045 if (src_align == 0)
3046 return NULL_RTX;
3047
3048 if (currently_expanding_gimple_stmt)
3049 stringop_block_profile (currently_expanding_gimple_stmt,
3050 &expected_align, &expected_size);
3051
3052 if (expected_align < dest_align)
3053 expected_align = dest_align;
3054 dest_mem = get_memory_rtx (dest, len);
3055 set_mem_align (dest_mem, dest_align);
3056 len_rtx = expand_normal (len);
3057 src_str = c_getstr (src);
3058
3059 /* If SRC is a string constant and block move would be done
3060 by pieces, we can avoid loading the string from memory
3061 and only stored the computed constants. */
3062 if (src_str
3063 && CONST_INT_P (len_rtx)
3064 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3065 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3066 CONST_CAST (char *, src_str),
3067 dest_align, false))
3068 {
3069 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3070 builtin_memcpy_read_str,
3071 CONST_CAST (char *, src_str),
3072 dest_align, false, 0);
3073 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3074 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3075 return dest_mem;
3076 }
3077
3078 src_mem = get_memory_rtx (src, len);
3079 set_mem_align (src_mem, src_align);
3080
3081 /* Copy word part most expediently. */
3082 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3083 CALL_EXPR_TAILCALL (exp)
3084 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3085 expected_align, expected_size);
3086
3087 if (dest_addr == 0)
3088 {
3089 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3090 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3091 }
3092 return dest_addr;
3093 }
3094 }
3095
3096 /* Expand a call EXP to the mempcpy builtin.
3097 Return NULL_RTX if we failed; the caller should emit a normal call,
3098 otherwise try to get the result in TARGET, if convenient (and in
3099 mode MODE if that's convenient). If ENDP is 0 return the
3100 destination pointer, if ENDP is 1 return the end pointer ala
3101 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3102 stpcpy. */
3103
3104 static rtx
3105 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3106 {
3107 if (!validate_arglist (exp,
3108 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3109 return NULL_RTX;
3110 else
3111 {
3112 tree dest = CALL_EXPR_ARG (exp, 0);
3113 tree src = CALL_EXPR_ARG (exp, 1);
3114 tree len = CALL_EXPR_ARG (exp, 2);
3115 return expand_builtin_mempcpy_args (dest, src, len,
3116 target, mode, /*endp=*/ 1);
3117 }
3118 }
3119
3120 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3121 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3122 so that this can also be called without constructing an actual CALL_EXPR.
3123 The other arguments and return value are the same as for
3124 expand_builtin_mempcpy. */
3125
3126 static rtx
3127 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3128 rtx target, enum machine_mode mode, int endp)
3129 {
3130 /* If return value is ignored, transform mempcpy into memcpy. */
3131 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3132 {
3133 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3134 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3135 dest, src, len);
3136 return expand_expr (result, target, mode, EXPAND_NORMAL);
3137 }
3138 else
3139 {
3140 const char *src_str;
3141 unsigned int src_align = get_pointer_alignment (src);
3142 unsigned int dest_align = get_pointer_alignment (dest);
3143 rtx dest_mem, src_mem, len_rtx;
3144
3145 /* If either SRC or DEST is not a pointer type, don't do this
3146 operation in-line. */
3147 if (dest_align == 0 || src_align == 0)
3148 return NULL_RTX;
3149
3150 /* If LEN is not constant, call the normal function. */
3151 if (! host_integerp (len, 1))
3152 return NULL_RTX;
3153
3154 len_rtx = expand_normal (len);
3155 src_str = c_getstr (src);
3156
3157 /* If SRC is a string constant and block move would be done
3158 by pieces, we can avoid loading the string from memory
3159 and only stored the computed constants. */
3160 if (src_str
3161 && CONST_INT_P (len_rtx)
3162 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3163 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3164 CONST_CAST (char *, src_str),
3165 dest_align, false))
3166 {
3167 dest_mem = get_memory_rtx (dest, len);
3168 set_mem_align (dest_mem, dest_align);
3169 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3170 builtin_memcpy_read_str,
3171 CONST_CAST (char *, src_str),
3172 dest_align, false, endp);
3173 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3174 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3175 return dest_mem;
3176 }
3177
3178 if (CONST_INT_P (len_rtx)
3179 && can_move_by_pieces (INTVAL (len_rtx),
3180 MIN (dest_align, src_align)))
3181 {
3182 dest_mem = get_memory_rtx (dest, len);
3183 set_mem_align (dest_mem, dest_align);
3184 src_mem = get_memory_rtx (src, len);
3185 set_mem_align (src_mem, src_align);
3186 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3187 MIN (dest_align, src_align), endp);
3188 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3189 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3190 return dest_mem;
3191 }
3192
3193 return NULL_RTX;
3194 }
3195 }
3196
3197 #ifndef HAVE_movstr
3198 # define HAVE_movstr 0
3199 # define CODE_FOR_movstr CODE_FOR_nothing
3200 #endif
3201
3202 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3203 we failed, the caller should emit a normal call, otherwise try to
3204 get the result in TARGET, if convenient. If ENDP is 0 return the
3205 destination pointer, if ENDP is 1 return the end pointer ala
3206 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3207 stpcpy. */
3208
3209 static rtx
3210 expand_movstr (tree dest, tree src, rtx target, int endp)
3211 {
3212 struct expand_operand ops[3];
3213 rtx dest_mem;
3214 rtx src_mem;
3215
3216 if (!HAVE_movstr)
3217 return NULL_RTX;
3218
3219 dest_mem = get_memory_rtx (dest, NULL);
3220 src_mem = get_memory_rtx (src, NULL);
3221 if (!endp)
3222 {
3223 target = force_reg (Pmode, XEXP (dest_mem, 0));
3224 dest_mem = replace_equiv_address (dest_mem, target);
3225 }
3226
3227 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3228 create_fixed_operand (&ops[1], dest_mem);
3229 create_fixed_operand (&ops[2], src_mem);
3230 expand_insn (CODE_FOR_movstr, 3, ops);
3231
3232 if (endp && target != const0_rtx)
3233 {
3234 target = ops[0].value;
3235 /* movstr is supposed to set end to the address of the NUL
3236 terminator. If the caller requested a mempcpy-like return value,
3237 adjust it. */
3238 if (endp == 1)
3239 {
3240 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), target), 1);
3241 emit_move_insn (target, force_operand (tem, NULL_RTX));
3242 }
3243 }
3244 return target;
3245 }
3246
3247 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3248 NULL_RTX if we failed the caller should emit a normal call, otherwise
3249 try to get the result in TARGET, if convenient (and in mode MODE if that's
3250 convenient). */
3251
3252 static rtx
3253 expand_builtin_strcpy (tree exp, rtx target)
3254 {
3255 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3256 {
3257 tree dest = CALL_EXPR_ARG (exp, 0);
3258 tree src = CALL_EXPR_ARG (exp, 1);
3259 return expand_builtin_strcpy_args (dest, src, target);
3260 }
3261 return NULL_RTX;
3262 }
3263
3264 /* Helper function to do the actual work for expand_builtin_strcpy. The
3265 arguments to the builtin_strcpy call DEST and SRC are broken out
3266 so that this can also be called without constructing an actual CALL_EXPR.
3267 The other arguments and return value are the same as for
3268 expand_builtin_strcpy. */
3269
3270 static rtx
3271 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3272 {
3273 return expand_movstr (dest, src, target, /*endp=*/0);
3274 }
3275
3276 /* Expand a call EXP to the stpcpy builtin.
3277 Return NULL_RTX if we failed the caller should emit a normal call,
3278 otherwise try to get the result in TARGET, if convenient (and in
3279 mode MODE if that's convenient). */
3280
3281 static rtx
3282 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3283 {
3284 tree dst, src;
3285 location_t loc = EXPR_LOCATION (exp);
3286
3287 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3288 return NULL_RTX;
3289
3290 dst = CALL_EXPR_ARG (exp, 0);
3291 src = CALL_EXPR_ARG (exp, 1);
3292
3293 /* If return value is ignored, transform stpcpy into strcpy. */
3294 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3295 {
3296 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3297 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3298 return expand_expr (result, target, mode, EXPAND_NORMAL);
3299 }
3300 else
3301 {
3302 tree len, lenp1;
3303 rtx ret;
3304
3305 /* Ensure we get an actual string whose length can be evaluated at
3306 compile-time, not an expression containing a string. This is
3307 because the latter will potentially produce pessimized code
3308 when used to produce the return value. */
3309 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3310 return expand_movstr (dst, src, target, /*endp=*/2);
3311
3312 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3313 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3314 target, mode, /*endp=*/2);
3315
3316 if (ret)
3317 return ret;
3318
3319 if (TREE_CODE (len) == INTEGER_CST)
3320 {
3321 rtx len_rtx = expand_normal (len);
3322
3323 if (CONST_INT_P (len_rtx))
3324 {
3325 ret = expand_builtin_strcpy_args (dst, src, target);
3326
3327 if (ret)
3328 {
3329 if (! target)
3330 {
3331 if (mode != VOIDmode)
3332 target = gen_reg_rtx (mode);
3333 else
3334 target = gen_reg_rtx (GET_MODE (ret));
3335 }
3336 if (GET_MODE (target) != GET_MODE (ret))
3337 ret = gen_lowpart (GET_MODE (target), ret);
3338
3339 ret = plus_constant (ret, INTVAL (len_rtx));
3340 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3341 gcc_assert (ret);
3342
3343 return target;
3344 }
3345 }
3346 }
3347
3348 return expand_movstr (dst, src, target, /*endp=*/2);
3349 }
3350 }
3351
3352 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3353 bytes from constant string DATA + OFFSET and return it as target
3354 constant. */
3355
3356 rtx
3357 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3358 enum machine_mode mode)
3359 {
3360 const char *str = (const char *) data;
3361
3362 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3363 return const0_rtx;
3364
3365 return c_readstr (str + offset, mode);
3366 }
3367
3368 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3369 NULL_RTX if we failed the caller should emit a normal call. */
3370
3371 static rtx
3372 expand_builtin_strncpy (tree exp, rtx target)
3373 {
3374 location_t loc = EXPR_LOCATION (exp);
3375
3376 if (validate_arglist (exp,
3377 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3378 {
3379 tree dest = CALL_EXPR_ARG (exp, 0);
3380 tree src = CALL_EXPR_ARG (exp, 1);
3381 tree len = CALL_EXPR_ARG (exp, 2);
3382 tree slen = c_strlen (src, 1);
3383
3384 /* We must be passed a constant len and src parameter. */
3385 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3386 return NULL_RTX;
3387
3388 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3389
3390 /* We're required to pad with trailing zeros if the requested
3391 len is greater than strlen(s2)+1. In that case try to
3392 use store_by_pieces, if it fails, punt. */
3393 if (tree_int_cst_lt (slen, len))
3394 {
3395 unsigned int dest_align = get_pointer_alignment (dest);
3396 const char *p = c_getstr (src);
3397 rtx dest_mem;
3398
3399 if (!p || dest_align == 0 || !host_integerp (len, 1)
3400 || !can_store_by_pieces (tree_low_cst (len, 1),
3401 builtin_strncpy_read_str,
3402 CONST_CAST (char *, p),
3403 dest_align, false))
3404 return NULL_RTX;
3405
3406 dest_mem = get_memory_rtx (dest, len);
3407 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3408 builtin_strncpy_read_str,
3409 CONST_CAST (char *, p), dest_align, false, 0);
3410 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3411 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3412 return dest_mem;
3413 }
3414 }
3415 return NULL_RTX;
3416 }
3417
3418 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3419 bytes from constant string DATA + OFFSET and return it as target
3420 constant. */
3421
3422 rtx
3423 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3424 enum machine_mode mode)
3425 {
3426 const char *c = (const char *) data;
3427 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3428
3429 memset (p, *c, GET_MODE_SIZE (mode));
3430
3431 return c_readstr (p, mode);
3432 }
3433
3434 /* Callback routine for store_by_pieces. Return the RTL of a register
3435 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3436 char value given in the RTL register data. For example, if mode is
3437 4 bytes wide, return the RTL for 0x01010101*data. */
3438
3439 static rtx
3440 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3441 enum machine_mode mode)
3442 {
3443 rtx target, coeff;
3444 size_t size;
3445 char *p;
3446
3447 size = GET_MODE_SIZE (mode);
3448 if (size == 1)
3449 return (rtx) data;
3450
3451 p = XALLOCAVEC (char, size);
3452 memset (p, 1, size);
3453 coeff = c_readstr (p, mode);
3454
3455 target = convert_to_mode (mode, (rtx) data, 1);
3456 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3457 return force_reg (mode, target);
3458 }
3459
3460 /* Expand expression EXP, which is a call to the memset builtin. Return
3461 NULL_RTX if we failed the caller should emit a normal call, otherwise
3462 try to get the result in TARGET, if convenient (and in mode MODE if that's
3463 convenient). */
3464
3465 static rtx
3466 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3467 {
3468 if (!validate_arglist (exp,
3469 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3470 return NULL_RTX;
3471 else
3472 {
3473 tree dest = CALL_EXPR_ARG (exp, 0);
3474 tree val = CALL_EXPR_ARG (exp, 1);
3475 tree len = CALL_EXPR_ARG (exp, 2);
3476 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3477 }
3478 }
3479
3480 /* Helper function to do the actual work for expand_builtin_memset. The
3481 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3482 so that this can also be called without constructing an actual CALL_EXPR.
3483 The other arguments and return value are the same as for
3484 expand_builtin_memset. */
3485
3486 static rtx
3487 expand_builtin_memset_args (tree dest, tree val, tree len,
3488 rtx target, enum machine_mode mode, tree orig_exp)
3489 {
3490 tree fndecl, fn;
3491 enum built_in_function fcode;
3492 enum machine_mode val_mode;
3493 char c;
3494 unsigned int dest_align;
3495 rtx dest_mem, dest_addr, len_rtx;
3496 HOST_WIDE_INT expected_size = -1;
3497 unsigned int expected_align = 0;
3498
3499 dest_align = get_pointer_alignment (dest);
3500
3501 /* If DEST is not a pointer type, don't do this operation in-line. */
3502 if (dest_align == 0)
3503 return NULL_RTX;
3504
3505 if (currently_expanding_gimple_stmt)
3506 stringop_block_profile (currently_expanding_gimple_stmt,
3507 &expected_align, &expected_size);
3508
3509 if (expected_align < dest_align)
3510 expected_align = dest_align;
3511
3512 /* If the LEN parameter is zero, return DEST. */
3513 if (integer_zerop (len))
3514 {
3515 /* Evaluate and ignore VAL in case it has side-effects. */
3516 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3517 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3518 }
3519
3520 /* Stabilize the arguments in case we fail. */
3521 dest = builtin_save_expr (dest);
3522 val = builtin_save_expr (val);
3523 len = builtin_save_expr (len);
3524
3525 len_rtx = expand_normal (len);
3526 dest_mem = get_memory_rtx (dest, len);
3527 val_mode = TYPE_MODE (unsigned_char_type_node);
3528
3529 if (TREE_CODE (val) != INTEGER_CST)
3530 {
3531 rtx val_rtx;
3532
3533 val_rtx = expand_normal (val);
3534 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3535
3536 /* Assume that we can memset by pieces if we can store
3537 * the coefficients by pieces (in the required modes).
3538 * We can't pass builtin_memset_gen_str as that emits RTL. */
3539 c = 1;
3540 if (host_integerp (len, 1)
3541 && can_store_by_pieces (tree_low_cst (len, 1),
3542 builtin_memset_read_str, &c, dest_align,
3543 true))
3544 {
3545 val_rtx = force_reg (val_mode, val_rtx);
3546 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3547 builtin_memset_gen_str, val_rtx, dest_align,
3548 true, 0);
3549 }
3550 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3551 dest_align, expected_align,
3552 expected_size))
3553 goto do_libcall;
3554
3555 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3556 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3557 return dest_mem;
3558 }
3559
3560 if (target_char_cast (val, &c))
3561 goto do_libcall;
3562
3563 if (c)
3564 {
3565 if (host_integerp (len, 1)
3566 && can_store_by_pieces (tree_low_cst (len, 1),
3567 builtin_memset_read_str, &c, dest_align,
3568 true))
3569 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3570 builtin_memset_read_str, &c, dest_align, true, 0);
3571 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3572 gen_int_mode (c, val_mode),
3573 dest_align, expected_align,
3574 expected_size))
3575 goto do_libcall;
3576
3577 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3578 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3579 return dest_mem;
3580 }
3581
3582 set_mem_align (dest_mem, dest_align);
3583 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3584 CALL_EXPR_TAILCALL (orig_exp)
3585 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3586 expected_align, expected_size);
3587
3588 if (dest_addr == 0)
3589 {
3590 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3591 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3592 }
3593
3594 return dest_addr;
3595
3596 do_libcall:
3597 fndecl = get_callee_fndecl (orig_exp);
3598 fcode = DECL_FUNCTION_CODE (fndecl);
3599 if (fcode == BUILT_IN_MEMSET)
3600 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3601 dest, val, len);
3602 else if (fcode == BUILT_IN_BZERO)
3603 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3604 dest, len);
3605 else
3606 gcc_unreachable ();
3607 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3608 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3609 return expand_call (fn, target, target == const0_rtx);
3610 }
3611
3612 /* Expand expression EXP, which is a call to the bzero builtin. Return
3613 NULL_RTX if we failed the caller should emit a normal call. */
3614
3615 static rtx
3616 expand_builtin_bzero (tree exp)
3617 {
3618 tree dest, size;
3619 location_t loc = EXPR_LOCATION (exp);
3620
3621 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3622 return NULL_RTX;
3623
3624 dest = CALL_EXPR_ARG (exp, 0);
3625 size = CALL_EXPR_ARG (exp, 1);
3626
3627 /* New argument list transforming bzero(ptr x, int y) to
3628 memset(ptr x, int 0, size_t y). This is done this way
3629 so that if it isn't expanded inline, we fallback to
3630 calling bzero instead of memset. */
3631
3632 return expand_builtin_memset_args (dest, integer_zero_node,
3633 fold_convert_loc (loc,
3634 size_type_node, size),
3635 const0_rtx, VOIDmode, exp);
3636 }
3637
3638 /* Expand expression EXP, which is a call to the memcmp built-in function.
3639 Return NULL_RTX if we failed and the caller should emit a normal call,
3640 otherwise try to get the result in TARGET, if convenient (and in mode
3641 MODE, if that's convenient). */
3642
3643 static rtx
3644 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3645 ATTRIBUTE_UNUSED enum machine_mode mode)
3646 {
3647 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3648
3649 if (!validate_arglist (exp,
3650 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3651 return NULL_RTX;
3652
3653 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3654 implementing memcmp because it will stop if it encounters two
3655 zero bytes. */
3656 #if defined HAVE_cmpmemsi
3657 {
3658 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3659 rtx result;
3660 rtx insn;
3661 tree arg1 = CALL_EXPR_ARG (exp, 0);
3662 tree arg2 = CALL_EXPR_ARG (exp, 1);
3663 tree len = CALL_EXPR_ARG (exp, 2);
3664
3665 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3666 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3667 enum machine_mode insn_mode;
3668
3669 if (HAVE_cmpmemsi)
3670 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3671 else
3672 return NULL_RTX;
3673
3674 /* If we don't have POINTER_TYPE, call the function. */
3675 if (arg1_align == 0 || arg2_align == 0)
3676 return NULL_RTX;
3677
3678 /* Make a place to write the result of the instruction. */
3679 result = target;
3680 if (! (result != 0
3681 && REG_P (result) && GET_MODE (result) == insn_mode
3682 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3683 result = gen_reg_rtx (insn_mode);
3684
3685 arg1_rtx = get_memory_rtx (arg1, len);
3686 arg2_rtx = get_memory_rtx (arg2, len);
3687 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3688
3689 /* Set MEM_SIZE as appropriate. */
3690 if (CONST_INT_P (arg3_rtx))
3691 {
3692 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3693 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3694 }
3695
3696 if (HAVE_cmpmemsi)
3697 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3698 GEN_INT (MIN (arg1_align, arg2_align)));
3699 else
3700 gcc_unreachable ();
3701
3702 if (insn)
3703 emit_insn (insn);
3704 else
3705 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3706 TYPE_MODE (integer_type_node), 3,
3707 XEXP (arg1_rtx, 0), Pmode,
3708 XEXP (arg2_rtx, 0), Pmode,
3709 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3710 TYPE_UNSIGNED (sizetype)),
3711 TYPE_MODE (sizetype));
3712
3713 /* Return the value in the proper mode for this function. */
3714 mode = TYPE_MODE (TREE_TYPE (exp));
3715 if (GET_MODE (result) == mode)
3716 return result;
3717 else if (target != 0)
3718 {
3719 convert_move (target, result, 0);
3720 return target;
3721 }
3722 else
3723 return convert_to_mode (mode, result, 0);
3724 }
3725 #endif /* HAVE_cmpmemsi. */
3726
3727 return NULL_RTX;
3728 }
3729
3730 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3731 if we failed the caller should emit a normal call, otherwise try to get
3732 the result in TARGET, if convenient. */
3733
3734 static rtx
3735 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3736 {
3737 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3738 return NULL_RTX;
3739
3740 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3741 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3742 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3743 {
3744 rtx arg1_rtx, arg2_rtx;
3745 rtx result, insn = NULL_RTX;
3746 tree fndecl, fn;
3747 tree arg1 = CALL_EXPR_ARG (exp, 0);
3748 tree arg2 = CALL_EXPR_ARG (exp, 1);
3749
3750 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3751 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3752
3753 /* If we don't have POINTER_TYPE, call the function. */
3754 if (arg1_align == 0 || arg2_align == 0)
3755 return NULL_RTX;
3756
3757 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3758 arg1 = builtin_save_expr (arg1);
3759 arg2 = builtin_save_expr (arg2);
3760
3761 arg1_rtx = get_memory_rtx (arg1, NULL);
3762 arg2_rtx = get_memory_rtx (arg2, NULL);
3763
3764 #ifdef HAVE_cmpstrsi
3765 /* Try to call cmpstrsi. */
3766 if (HAVE_cmpstrsi)
3767 {
3768 enum machine_mode insn_mode
3769 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3770
3771 /* Make a place to write the result of the instruction. */
3772 result = target;
3773 if (! (result != 0
3774 && REG_P (result) && GET_MODE (result) == insn_mode
3775 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3776 result = gen_reg_rtx (insn_mode);
3777
3778 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3779 GEN_INT (MIN (arg1_align, arg2_align)));
3780 }
3781 #endif
3782 #ifdef HAVE_cmpstrnsi
3783 /* Try to determine at least one length and call cmpstrnsi. */
3784 if (!insn && HAVE_cmpstrnsi)
3785 {
3786 tree len;
3787 rtx arg3_rtx;
3788
3789 enum machine_mode insn_mode
3790 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3791 tree len1 = c_strlen (arg1, 1);
3792 tree len2 = c_strlen (arg2, 1);
3793
3794 if (len1)
3795 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3796 if (len2)
3797 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3798
3799 /* If we don't have a constant length for the first, use the length
3800 of the second, if we know it. We don't require a constant for
3801 this case; some cost analysis could be done if both are available
3802 but neither is constant. For now, assume they're equally cheap,
3803 unless one has side effects. If both strings have constant lengths,
3804 use the smaller. */
3805
3806 if (!len1)
3807 len = len2;
3808 else if (!len2)
3809 len = len1;
3810 else if (TREE_SIDE_EFFECTS (len1))
3811 len = len2;
3812 else if (TREE_SIDE_EFFECTS (len2))
3813 len = len1;
3814 else if (TREE_CODE (len1) != INTEGER_CST)
3815 len = len2;
3816 else if (TREE_CODE (len2) != INTEGER_CST)
3817 len = len1;
3818 else if (tree_int_cst_lt (len1, len2))
3819 len = len1;
3820 else
3821 len = len2;
3822
3823 /* If both arguments have side effects, we cannot optimize. */
3824 if (!len || TREE_SIDE_EFFECTS (len))
3825 goto do_libcall;
3826
3827 arg3_rtx = expand_normal (len);
3828
3829 /* Make a place to write the result of the instruction. */
3830 result = target;
3831 if (! (result != 0
3832 && REG_P (result) && GET_MODE (result) == insn_mode
3833 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3834 result = gen_reg_rtx (insn_mode);
3835
3836 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3837 GEN_INT (MIN (arg1_align, arg2_align)));
3838 }
3839 #endif
3840
3841 if (insn)
3842 {
3843 enum machine_mode mode;
3844 emit_insn (insn);
3845
3846 /* Return the value in the proper mode for this function. */
3847 mode = TYPE_MODE (TREE_TYPE (exp));
3848 if (GET_MODE (result) == mode)
3849 return result;
3850 if (target == 0)
3851 return convert_to_mode (mode, result, 0);
3852 convert_move (target, result, 0);
3853 return target;
3854 }
3855
3856 /* Expand the library call ourselves using a stabilized argument
3857 list to avoid re-evaluating the function's arguments twice. */
3858 #ifdef HAVE_cmpstrnsi
3859 do_libcall:
3860 #endif
3861 fndecl = get_callee_fndecl (exp);
3862 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3863 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3864 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3865 return expand_call (fn, target, target == const0_rtx);
3866 }
3867 #endif
3868 return NULL_RTX;
3869 }
3870
3871 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3872 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3873 the result in TARGET, if convenient. */
3874
3875 static rtx
3876 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3877 ATTRIBUTE_UNUSED enum machine_mode mode)
3878 {
3879 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3880
3881 if (!validate_arglist (exp,
3882 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3883 return NULL_RTX;
3884
3885 /* If c_strlen can determine an expression for one of the string
3886 lengths, and it doesn't have side effects, then emit cmpstrnsi
3887 using length MIN(strlen(string)+1, arg3). */
3888 #ifdef HAVE_cmpstrnsi
3889 if (HAVE_cmpstrnsi)
3890 {
3891 tree len, len1, len2;
3892 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3893 rtx result, insn;
3894 tree fndecl, fn;
3895 tree arg1 = CALL_EXPR_ARG (exp, 0);
3896 tree arg2 = CALL_EXPR_ARG (exp, 1);
3897 tree arg3 = CALL_EXPR_ARG (exp, 2);
3898
3899 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3900 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3901 enum machine_mode insn_mode
3902 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3903
3904 len1 = c_strlen (arg1, 1);
3905 len2 = c_strlen (arg2, 1);
3906
3907 if (len1)
3908 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3909 if (len2)
3910 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3911
3912 /* If we don't have a constant length for the first, use the length
3913 of the second, if we know it. We don't require a constant for
3914 this case; some cost analysis could be done if both are available
3915 but neither is constant. For now, assume they're equally cheap,
3916 unless one has side effects. If both strings have constant lengths,
3917 use the smaller. */
3918
3919 if (!len1)
3920 len = len2;
3921 else if (!len2)
3922 len = len1;
3923 else if (TREE_SIDE_EFFECTS (len1))
3924 len = len2;
3925 else if (TREE_SIDE_EFFECTS (len2))
3926 len = len1;
3927 else if (TREE_CODE (len1) != INTEGER_CST)
3928 len = len2;
3929 else if (TREE_CODE (len2) != INTEGER_CST)
3930 len = len1;
3931 else if (tree_int_cst_lt (len1, len2))
3932 len = len1;
3933 else
3934 len = len2;
3935
3936 /* If both arguments have side effects, we cannot optimize. */
3937 if (!len || TREE_SIDE_EFFECTS (len))
3938 return NULL_RTX;
3939
3940 /* The actual new length parameter is MIN(len,arg3). */
3941 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3942 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3943
3944 /* If we don't have POINTER_TYPE, call the function. */
3945 if (arg1_align == 0 || arg2_align == 0)
3946 return NULL_RTX;
3947
3948 /* Make a place to write the result of the instruction. */
3949 result = target;
3950 if (! (result != 0
3951 && REG_P (result) && GET_MODE (result) == insn_mode
3952 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3953 result = gen_reg_rtx (insn_mode);
3954
3955 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3956 arg1 = builtin_save_expr (arg1);
3957 arg2 = builtin_save_expr (arg2);
3958 len = builtin_save_expr (len);
3959
3960 arg1_rtx = get_memory_rtx (arg1, len);
3961 arg2_rtx = get_memory_rtx (arg2, len);
3962 arg3_rtx = expand_normal (len);
3963 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3964 GEN_INT (MIN (arg1_align, arg2_align)));
3965 if (insn)
3966 {
3967 emit_insn (insn);
3968
3969 /* Return the value in the proper mode for this function. */
3970 mode = TYPE_MODE (TREE_TYPE (exp));
3971 if (GET_MODE (result) == mode)
3972 return result;
3973 if (target == 0)
3974 return convert_to_mode (mode, result, 0);
3975 convert_move (target, result, 0);
3976 return target;
3977 }
3978
3979 /* Expand the library call ourselves using a stabilized argument
3980 list to avoid re-evaluating the function's arguments twice. */
3981 fndecl = get_callee_fndecl (exp);
3982 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
3983 arg1, arg2, len);
3984 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3985 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3986 return expand_call (fn, target, target == const0_rtx);
3987 }
3988 #endif
3989 return NULL_RTX;
3990 }
3991
3992 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3993 if that's convenient. */
3994
3995 rtx
3996 expand_builtin_saveregs (void)
3997 {
3998 rtx val, seq;
3999
4000 /* Don't do __builtin_saveregs more than once in a function.
4001 Save the result of the first call and reuse it. */
4002 if (saveregs_value != 0)
4003 return saveregs_value;
4004
4005 /* When this function is called, it means that registers must be
4006 saved on entry to this function. So we migrate the call to the
4007 first insn of this function. */
4008
4009 start_sequence ();
4010
4011 /* Do whatever the machine needs done in this case. */
4012 val = targetm.calls.expand_builtin_saveregs ();
4013
4014 seq = get_insns ();
4015 end_sequence ();
4016
4017 saveregs_value = val;
4018
4019 /* Put the insns after the NOTE that starts the function. If this
4020 is inside a start_sequence, make the outer-level insn chain current, so
4021 the code is placed at the start of the function. */
4022 push_topmost_sequence ();
4023 emit_insn_after (seq, entry_of_function ());
4024 pop_topmost_sequence ();
4025
4026 return val;
4027 }
4028
4029 /* Expand a call to __builtin_next_arg. */
4030
4031 static rtx
4032 expand_builtin_next_arg (void)
4033 {
4034 /* Checking arguments is already done in fold_builtin_next_arg
4035 that must be called before this function. */
4036 return expand_binop (ptr_mode, add_optab,
4037 crtl->args.internal_arg_pointer,
4038 crtl->args.arg_offset_rtx,
4039 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4040 }
4041
4042 /* Make it easier for the backends by protecting the valist argument
4043 from multiple evaluations. */
4044
4045 static tree
4046 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4047 {
4048 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4049
4050 /* The current way of determining the type of valist is completely
4051 bogus. We should have the information on the va builtin instead. */
4052 if (!vatype)
4053 vatype = targetm.fn_abi_va_list (cfun->decl);
4054
4055 if (TREE_CODE (vatype) == ARRAY_TYPE)
4056 {
4057 if (TREE_SIDE_EFFECTS (valist))
4058 valist = save_expr (valist);
4059
4060 /* For this case, the backends will be expecting a pointer to
4061 vatype, but it's possible we've actually been given an array
4062 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4063 So fix it. */
4064 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4065 {
4066 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4067 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4068 }
4069 }
4070 else
4071 {
4072 tree pt = build_pointer_type (vatype);
4073
4074 if (! needs_lvalue)
4075 {
4076 if (! TREE_SIDE_EFFECTS (valist))
4077 return valist;
4078
4079 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4080 TREE_SIDE_EFFECTS (valist) = 1;
4081 }
4082
4083 if (TREE_SIDE_EFFECTS (valist))
4084 valist = save_expr (valist);
4085 valist = fold_build2_loc (loc, MEM_REF,
4086 vatype, valist, build_int_cst (pt, 0));
4087 }
4088
4089 return valist;
4090 }
4091
4092 /* The "standard" definition of va_list is void*. */
4093
4094 tree
4095 std_build_builtin_va_list (void)
4096 {
4097 return ptr_type_node;
4098 }
4099
4100 /* The "standard" abi va_list is va_list_type_node. */
4101
4102 tree
4103 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4104 {
4105 return va_list_type_node;
4106 }
4107
4108 /* The "standard" type of va_list is va_list_type_node. */
4109
4110 tree
4111 std_canonical_va_list_type (tree type)
4112 {
4113 tree wtype, htype;
4114
4115 if (INDIRECT_REF_P (type))
4116 type = TREE_TYPE (type);
4117 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4118 type = TREE_TYPE (type);
4119 wtype = va_list_type_node;
4120 htype = type;
4121 /* Treat structure va_list types. */
4122 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4123 htype = TREE_TYPE (htype);
4124 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4125 {
4126 /* If va_list is an array type, the argument may have decayed
4127 to a pointer type, e.g. by being passed to another function.
4128 In that case, unwrap both types so that we can compare the
4129 underlying records. */
4130 if (TREE_CODE (htype) == ARRAY_TYPE
4131 || POINTER_TYPE_P (htype))
4132 {
4133 wtype = TREE_TYPE (wtype);
4134 htype = TREE_TYPE (htype);
4135 }
4136 }
4137 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4138 return va_list_type_node;
4139
4140 return NULL_TREE;
4141 }
4142
4143 /* The "standard" implementation of va_start: just assign `nextarg' to
4144 the variable. */
4145
4146 void
4147 std_expand_builtin_va_start (tree valist, rtx nextarg)
4148 {
4149 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4150 convert_move (va_r, nextarg, 0);
4151 }
4152
4153 /* Expand EXP, a call to __builtin_va_start. */
4154
4155 static rtx
4156 expand_builtin_va_start (tree exp)
4157 {
4158 rtx nextarg;
4159 tree valist;
4160 location_t loc = EXPR_LOCATION (exp);
4161
4162 if (call_expr_nargs (exp) < 2)
4163 {
4164 error_at (loc, "too few arguments to function %<va_start%>");
4165 return const0_rtx;
4166 }
4167
4168 if (fold_builtin_next_arg (exp, true))
4169 return const0_rtx;
4170
4171 nextarg = expand_builtin_next_arg ();
4172 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4173
4174 if (targetm.expand_builtin_va_start)
4175 targetm.expand_builtin_va_start (valist, nextarg);
4176 else
4177 std_expand_builtin_va_start (valist, nextarg);
4178
4179 return const0_rtx;
4180 }
4181
4182 /* The "standard" implementation of va_arg: read the value from the
4183 current (padded) address and increment by the (padded) size. */
4184
4185 tree
4186 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4187 gimple_seq *post_p)
4188 {
4189 tree addr, t, type_size, rounded_size, valist_tmp;
4190 unsigned HOST_WIDE_INT align, boundary;
4191 bool indirect;
4192
4193 #ifdef ARGS_GROW_DOWNWARD
4194 /* All of the alignment and movement below is for args-grow-up machines.
4195 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4196 implement their own specialized gimplify_va_arg_expr routines. */
4197 gcc_unreachable ();
4198 #endif
4199
4200 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4201 if (indirect)
4202 type = build_pointer_type (type);
4203
4204 align = PARM_BOUNDARY / BITS_PER_UNIT;
4205 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4206
4207 /* When we align parameter on stack for caller, if the parameter
4208 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4209 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4210 here with caller. */
4211 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4212 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4213
4214 boundary /= BITS_PER_UNIT;
4215
4216 /* Hoist the valist value into a temporary for the moment. */
4217 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4218
4219 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4220 requires greater alignment, we must perform dynamic alignment. */
4221 if (boundary > align
4222 && !integer_zerop (TYPE_SIZE (type)))
4223 {
4224 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4225 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
4226 gimplify_and_add (t, pre_p);
4227
4228 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4229 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
4230 valist_tmp,
4231 build_int_cst (TREE_TYPE (valist), -boundary)));
4232 gimplify_and_add (t, pre_p);
4233 }
4234 else
4235 boundary = align;
4236
4237 /* If the actual alignment is less than the alignment of the type,
4238 adjust the type accordingly so that we don't assume strict alignment
4239 when dereferencing the pointer. */
4240 boundary *= BITS_PER_UNIT;
4241 if (boundary < TYPE_ALIGN (type))
4242 {
4243 type = build_variant_type_copy (type);
4244 TYPE_ALIGN (type) = boundary;
4245 }
4246
4247 /* Compute the rounded size of the type. */
4248 type_size = size_in_bytes (type);
4249 rounded_size = round_up (type_size, align);
4250
4251 /* Reduce rounded_size so it's sharable with the postqueue. */
4252 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4253
4254 /* Get AP. */
4255 addr = valist_tmp;
4256 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4257 {
4258 /* Small args are padded downward. */
4259 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4260 rounded_size, size_int (align));
4261 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4262 size_binop (MINUS_EXPR, rounded_size, type_size));
4263 addr = fold_build_pointer_plus (addr, t);
4264 }
4265
4266 /* Compute new value for AP. */
4267 t = fold_build_pointer_plus (valist_tmp, rounded_size);
4268 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4269 gimplify_and_add (t, pre_p);
4270
4271 addr = fold_convert (build_pointer_type (type), addr);
4272
4273 if (indirect)
4274 addr = build_va_arg_indirect_ref (addr);
4275
4276 return build_va_arg_indirect_ref (addr);
4277 }
4278
4279 /* Build an indirect-ref expression over the given TREE, which represents a
4280 piece of a va_arg() expansion. */
4281 tree
4282 build_va_arg_indirect_ref (tree addr)
4283 {
4284 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
4285
4286 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4287 mf_mark (addr);
4288
4289 return addr;
4290 }
4291
4292 /* Return a dummy expression of type TYPE in order to keep going after an
4293 error. */
4294
4295 static tree
4296 dummy_object (tree type)
4297 {
4298 tree t = build_int_cst (build_pointer_type (type), 0);
4299 return build2 (MEM_REF, type, t, t);
4300 }
4301
4302 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4303 builtin function, but a very special sort of operator. */
4304
4305 enum gimplify_status
4306 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4307 {
4308 tree promoted_type, have_va_type;
4309 tree valist = TREE_OPERAND (*expr_p, 0);
4310 tree type = TREE_TYPE (*expr_p);
4311 tree t;
4312 location_t loc = EXPR_LOCATION (*expr_p);
4313
4314 /* Verify that valist is of the proper type. */
4315 have_va_type = TREE_TYPE (valist);
4316 if (have_va_type == error_mark_node)
4317 return GS_ERROR;
4318 have_va_type = targetm.canonical_va_list_type (have_va_type);
4319
4320 if (have_va_type == NULL_TREE)
4321 {
4322 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4323 return GS_ERROR;
4324 }
4325
4326 /* Generate a diagnostic for requesting data of a type that cannot
4327 be passed through `...' due to type promotion at the call site. */
4328 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4329 != type)
4330 {
4331 static bool gave_help;
4332 bool warned;
4333
4334 /* Unfortunately, this is merely undefined, rather than a constraint
4335 violation, so we cannot make this an error. If this call is never
4336 executed, the program is still strictly conforming. */
4337 warned = warning_at (loc, 0,
4338 "%qT is promoted to %qT when passed through %<...%>",
4339 type, promoted_type);
4340 if (!gave_help && warned)
4341 {
4342 gave_help = true;
4343 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4344 promoted_type, type);
4345 }
4346
4347 /* We can, however, treat "undefined" any way we please.
4348 Call abort to encourage the user to fix the program. */
4349 if (warned)
4350 inform (loc, "if this code is reached, the program will abort");
4351 /* Before the abort, allow the evaluation of the va_list
4352 expression to exit or longjmp. */
4353 gimplify_and_add (valist, pre_p);
4354 t = build_call_expr_loc (loc,
4355 builtin_decl_implicit (BUILT_IN_TRAP), 0);
4356 gimplify_and_add (t, pre_p);
4357
4358 /* This is dead code, but go ahead and finish so that the
4359 mode of the result comes out right. */
4360 *expr_p = dummy_object (type);
4361 return GS_ALL_DONE;
4362 }
4363 else
4364 {
4365 /* Make it easier for the backends by protecting the valist argument
4366 from multiple evaluations. */
4367 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4368 {
4369 /* For this case, the backends will be expecting a pointer to
4370 TREE_TYPE (abi), but it's possible we've
4371 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4372 So fix it. */
4373 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4374 {
4375 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4376 valist = fold_convert_loc (loc, p1,
4377 build_fold_addr_expr_loc (loc, valist));
4378 }
4379
4380 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4381 }
4382 else
4383 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4384
4385 if (!targetm.gimplify_va_arg_expr)
4386 /* FIXME: Once most targets are converted we should merely
4387 assert this is non-null. */
4388 return GS_ALL_DONE;
4389
4390 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4391 return GS_OK;
4392 }
4393 }
4394
4395 /* Expand EXP, a call to __builtin_va_end. */
4396
4397 static rtx
4398 expand_builtin_va_end (tree exp)
4399 {
4400 tree valist = CALL_EXPR_ARG (exp, 0);
4401
4402 /* Evaluate for side effects, if needed. I hate macros that don't
4403 do that. */
4404 if (TREE_SIDE_EFFECTS (valist))
4405 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4406
4407 return const0_rtx;
4408 }
4409
4410 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4411 builtin rather than just as an assignment in stdarg.h because of the
4412 nastiness of array-type va_list types. */
4413
4414 static rtx
4415 expand_builtin_va_copy (tree exp)
4416 {
4417 tree dst, src, t;
4418 location_t loc = EXPR_LOCATION (exp);
4419
4420 dst = CALL_EXPR_ARG (exp, 0);
4421 src = CALL_EXPR_ARG (exp, 1);
4422
4423 dst = stabilize_va_list_loc (loc, dst, 1);
4424 src = stabilize_va_list_loc (loc, src, 0);
4425
4426 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4427
4428 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4429 {
4430 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4431 TREE_SIDE_EFFECTS (t) = 1;
4432 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4433 }
4434 else
4435 {
4436 rtx dstb, srcb, size;
4437
4438 /* Evaluate to pointers. */
4439 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4440 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4441 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4442 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4443
4444 dstb = convert_memory_address (Pmode, dstb);
4445 srcb = convert_memory_address (Pmode, srcb);
4446
4447 /* "Dereference" to BLKmode memories. */
4448 dstb = gen_rtx_MEM (BLKmode, dstb);
4449 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4450 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4451 srcb = gen_rtx_MEM (BLKmode, srcb);
4452 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4453 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4454
4455 /* Copy. */
4456 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4457 }
4458
4459 return const0_rtx;
4460 }
4461
4462 /* Expand a call to one of the builtin functions __builtin_frame_address or
4463 __builtin_return_address. */
4464
4465 static rtx
4466 expand_builtin_frame_address (tree fndecl, tree exp)
4467 {
4468 /* The argument must be a nonnegative integer constant.
4469 It counts the number of frames to scan up the stack.
4470 The value is the return address saved in that frame. */
4471 if (call_expr_nargs (exp) == 0)
4472 /* Warning about missing arg was already issued. */
4473 return const0_rtx;
4474 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4475 {
4476 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4477 error ("invalid argument to %<__builtin_frame_address%>");
4478 else
4479 error ("invalid argument to %<__builtin_return_address%>");
4480 return const0_rtx;
4481 }
4482 else
4483 {
4484 rtx tem
4485 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4486 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4487
4488 /* Some ports cannot access arbitrary stack frames. */
4489 if (tem == NULL)
4490 {
4491 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4492 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4493 else
4494 warning (0, "unsupported argument to %<__builtin_return_address%>");
4495 return const0_rtx;
4496 }
4497
4498 /* For __builtin_frame_address, return what we've got. */
4499 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4500 return tem;
4501
4502 if (!REG_P (tem)
4503 && ! CONSTANT_P (tem))
4504 tem = copy_to_mode_reg (Pmode, tem);
4505 return tem;
4506 }
4507 }
4508
4509 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4510 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4511 is the same as for allocate_dynamic_stack_space. */
4512
4513 static rtx
4514 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4515 {
4516 rtx op0;
4517 rtx result;
4518 bool valid_arglist;
4519 unsigned int align;
4520 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4521 == BUILT_IN_ALLOCA_WITH_ALIGN);
4522
4523 /* Emit normal call if marked not-inlineable. */
4524 if (CALL_CANNOT_INLINE_P (exp))
4525 return NULL_RTX;
4526
4527 valid_arglist
4528 = (alloca_with_align
4529 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4530 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4531
4532 if (!valid_arglist)
4533 return NULL_RTX;
4534
4535 /* Compute the argument. */
4536 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4537
4538 /* Compute the alignment. */
4539 align = (alloca_with_align
4540 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4541 : BIGGEST_ALIGNMENT);
4542
4543 /* Allocate the desired space. */
4544 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4545 result = convert_memory_address (ptr_mode, result);
4546
4547 return result;
4548 }
4549
4550 /* Expand a call to a bswap builtin with argument ARG0. MODE
4551 is the mode to expand with. */
4552
4553 static rtx
4554 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4555 {
4556 enum machine_mode mode;
4557 tree arg;
4558 rtx op0;
4559
4560 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4561 return NULL_RTX;
4562
4563 arg = CALL_EXPR_ARG (exp, 0);
4564 mode = TYPE_MODE (TREE_TYPE (arg));
4565 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4566
4567 target = expand_unop (mode, bswap_optab, op0, target, 1);
4568
4569 gcc_assert (target);
4570
4571 return convert_to_mode (mode, target, 0);
4572 }
4573
4574 /* Expand a call to a unary builtin in EXP.
4575 Return NULL_RTX if a normal call should be emitted rather than expanding the
4576 function in-line. If convenient, the result should be placed in TARGET.
4577 SUBTARGET may be used as the target for computing one of EXP's operands. */
4578
4579 static rtx
4580 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4581 rtx subtarget, optab op_optab)
4582 {
4583 rtx op0;
4584
4585 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4586 return NULL_RTX;
4587
4588 /* Compute the argument. */
4589 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4590 (subtarget
4591 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4592 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4593 VOIDmode, EXPAND_NORMAL);
4594 /* Compute op, into TARGET if possible.
4595 Set TARGET to wherever the result comes back. */
4596 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4597 op_optab, op0, target, op_optab != clrsb_optab);
4598 gcc_assert (target);
4599
4600 return convert_to_mode (target_mode, target, 0);
4601 }
4602
4603 /* Expand a call to __builtin_expect. We just return our argument
4604 as the builtin_expect semantic should've been already executed by
4605 tree branch prediction pass. */
4606
4607 static rtx
4608 expand_builtin_expect (tree exp, rtx target)
4609 {
4610 tree arg;
4611
4612 if (call_expr_nargs (exp) < 2)
4613 return const0_rtx;
4614 arg = CALL_EXPR_ARG (exp, 0);
4615
4616 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4617 /* When guessing was done, the hints should be already stripped away. */
4618 gcc_assert (!flag_guess_branch_prob
4619 || optimize == 0 || seen_error ());
4620 return target;
4621 }
4622
4623 /* Expand a call to __builtin_assume_aligned. We just return our first
4624 argument as the builtin_assume_aligned semantic should've been already
4625 executed by CCP. */
4626
4627 static rtx
4628 expand_builtin_assume_aligned (tree exp, rtx target)
4629 {
4630 if (call_expr_nargs (exp) < 2)
4631 return const0_rtx;
4632 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4633 EXPAND_NORMAL);
4634 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4635 && (call_expr_nargs (exp) < 3
4636 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4637 return target;
4638 }
4639
4640 void
4641 expand_builtin_trap (void)
4642 {
4643 #ifdef HAVE_trap
4644 if (HAVE_trap)
4645 emit_insn (gen_trap ());
4646 else
4647 #endif
4648 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4649 emit_barrier ();
4650 }
4651
4652 /* Expand a call to __builtin_unreachable. We do nothing except emit
4653 a barrier saying that control flow will not pass here.
4654
4655 It is the responsibility of the program being compiled to ensure
4656 that control flow does never reach __builtin_unreachable. */
4657 static void
4658 expand_builtin_unreachable (void)
4659 {
4660 emit_barrier ();
4661 }
4662
4663 /* Expand EXP, a call to fabs, fabsf or fabsl.
4664 Return NULL_RTX if a normal call should be emitted rather than expanding
4665 the function inline. If convenient, the result should be placed
4666 in TARGET. SUBTARGET may be used as the target for computing
4667 the operand. */
4668
4669 static rtx
4670 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4671 {
4672 enum machine_mode mode;
4673 tree arg;
4674 rtx op0;
4675
4676 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4677 return NULL_RTX;
4678
4679 arg = CALL_EXPR_ARG (exp, 0);
4680 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4681 mode = TYPE_MODE (TREE_TYPE (arg));
4682 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4683 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4684 }
4685
4686 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4687 Return NULL is a normal call should be emitted rather than expanding the
4688 function inline. If convenient, the result should be placed in TARGET.
4689 SUBTARGET may be used as the target for computing the operand. */
4690
4691 static rtx
4692 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4693 {
4694 rtx op0, op1;
4695 tree arg;
4696
4697 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4698 return NULL_RTX;
4699
4700 arg = CALL_EXPR_ARG (exp, 0);
4701 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4702
4703 arg = CALL_EXPR_ARG (exp, 1);
4704 op1 = expand_normal (arg);
4705
4706 return expand_copysign (op0, op1, target);
4707 }
4708
4709 /* Create a new constant string literal and return a char* pointer to it.
4710 The STRING_CST value is the LEN characters at STR. */
4711 tree
4712 build_string_literal (int len, const char *str)
4713 {
4714 tree t, elem, index, type;
4715
4716 t = build_string (len, str);
4717 elem = build_type_variant (char_type_node, 1, 0);
4718 index = build_index_type (size_int (len - 1));
4719 type = build_array_type (elem, index);
4720 TREE_TYPE (t) = type;
4721 TREE_CONSTANT (t) = 1;
4722 TREE_READONLY (t) = 1;
4723 TREE_STATIC (t) = 1;
4724
4725 type = build_pointer_type (elem);
4726 t = build1 (ADDR_EXPR, type,
4727 build4 (ARRAY_REF, elem,
4728 t, integer_zero_node, NULL_TREE, NULL_TREE));
4729 return t;
4730 }
4731
4732 /* Expand a call to __builtin___clear_cache. */
4733
4734 static rtx
4735 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4736 {
4737 #ifndef HAVE_clear_cache
4738 #ifdef CLEAR_INSN_CACHE
4739 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4740 does something. Just do the default expansion to a call to
4741 __clear_cache(). */
4742 return NULL_RTX;
4743 #else
4744 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4745 does nothing. There is no need to call it. Do nothing. */
4746 return const0_rtx;
4747 #endif /* CLEAR_INSN_CACHE */
4748 #else
4749 /* We have a "clear_cache" insn, and it will handle everything. */
4750 tree begin, end;
4751 rtx begin_rtx, end_rtx;
4752
4753 /* We must not expand to a library call. If we did, any
4754 fallback library function in libgcc that might contain a call to
4755 __builtin___clear_cache() would recurse infinitely. */
4756 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4757 {
4758 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4759 return const0_rtx;
4760 }
4761
4762 if (HAVE_clear_cache)
4763 {
4764 struct expand_operand ops[2];
4765
4766 begin = CALL_EXPR_ARG (exp, 0);
4767 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4768
4769 end = CALL_EXPR_ARG (exp, 1);
4770 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4771
4772 create_address_operand (&ops[0], begin_rtx);
4773 create_address_operand (&ops[1], end_rtx);
4774 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4775 return const0_rtx;
4776 }
4777 return const0_rtx;
4778 #endif /* HAVE_clear_cache */
4779 }
4780
4781 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4782
4783 static rtx
4784 round_trampoline_addr (rtx tramp)
4785 {
4786 rtx temp, addend, mask;
4787
4788 /* If we don't need too much alignment, we'll have been guaranteed
4789 proper alignment by get_trampoline_type. */
4790 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4791 return tramp;
4792
4793 /* Round address up to desired boundary. */
4794 temp = gen_reg_rtx (Pmode);
4795 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
4796 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
4797
4798 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4799 temp, 0, OPTAB_LIB_WIDEN);
4800 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4801 temp, 0, OPTAB_LIB_WIDEN);
4802
4803 return tramp;
4804 }
4805
4806 static rtx
4807 expand_builtin_init_trampoline (tree exp)
4808 {
4809 tree t_tramp, t_func, t_chain;
4810 rtx m_tramp, r_tramp, r_chain, tmp;
4811
4812 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4813 POINTER_TYPE, VOID_TYPE))
4814 return NULL_RTX;
4815
4816 t_tramp = CALL_EXPR_ARG (exp, 0);
4817 t_func = CALL_EXPR_ARG (exp, 1);
4818 t_chain = CALL_EXPR_ARG (exp, 2);
4819
4820 r_tramp = expand_normal (t_tramp);
4821 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4822 MEM_NOTRAP_P (m_tramp) = 1;
4823
4824 /* The TRAMP argument should be the address of a field within the
4825 local function's FRAME decl. Let's see if we can fill in the
4826 to fill in the MEM_ATTRs for this memory. */
4827 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4828 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
4829 true, 0);
4830
4831 tmp = round_trampoline_addr (r_tramp);
4832 if (tmp != r_tramp)
4833 {
4834 m_tramp = change_address (m_tramp, BLKmode, tmp);
4835 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4836 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4837 }
4838
4839 /* The FUNC argument should be the address of the nested function.
4840 Extract the actual function decl to pass to the hook. */
4841 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4842 t_func = TREE_OPERAND (t_func, 0);
4843 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4844
4845 r_chain = expand_normal (t_chain);
4846
4847 /* Generate insns to initialize the trampoline. */
4848 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4849
4850 trampolines_created = 1;
4851
4852 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4853 "trampoline generated for nested function %qD", t_func);
4854
4855 return const0_rtx;
4856 }
4857
4858 static rtx
4859 expand_builtin_adjust_trampoline (tree exp)
4860 {
4861 rtx tramp;
4862
4863 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4864 return NULL_RTX;
4865
4866 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4867 tramp = round_trampoline_addr (tramp);
4868 if (targetm.calls.trampoline_adjust_address)
4869 tramp = targetm.calls.trampoline_adjust_address (tramp);
4870
4871 return tramp;
4872 }
4873
4874 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4875 function. The function first checks whether the back end provides
4876 an insn to implement signbit for the respective mode. If not, it
4877 checks whether the floating point format of the value is such that
4878 the sign bit can be extracted. If that is not the case, the
4879 function returns NULL_RTX to indicate that a normal call should be
4880 emitted rather than expanding the function in-line. EXP is the
4881 expression that is a call to the builtin function; if convenient,
4882 the result should be placed in TARGET. */
4883 static rtx
4884 expand_builtin_signbit (tree exp, rtx target)
4885 {
4886 const struct real_format *fmt;
4887 enum machine_mode fmode, imode, rmode;
4888 tree arg;
4889 int word, bitpos;
4890 enum insn_code icode;
4891 rtx temp;
4892 location_t loc = EXPR_LOCATION (exp);
4893
4894 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4895 return NULL_RTX;
4896
4897 arg = CALL_EXPR_ARG (exp, 0);
4898 fmode = TYPE_MODE (TREE_TYPE (arg));
4899 rmode = TYPE_MODE (TREE_TYPE (exp));
4900 fmt = REAL_MODE_FORMAT (fmode);
4901
4902 arg = builtin_save_expr (arg);
4903
4904 /* Expand the argument yielding a RTX expression. */
4905 temp = expand_normal (arg);
4906
4907 /* Check if the back end provides an insn that handles signbit for the
4908 argument's mode. */
4909 icode = optab_handler (signbit_optab, fmode);
4910 if (icode != CODE_FOR_nothing)
4911 {
4912 rtx last = get_last_insn ();
4913 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4914 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4915 return target;
4916 delete_insns_since (last);
4917 }
4918
4919 /* For floating point formats without a sign bit, implement signbit
4920 as "ARG < 0.0". */
4921 bitpos = fmt->signbit_ro;
4922 if (bitpos < 0)
4923 {
4924 /* But we can't do this if the format supports signed zero. */
4925 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4926 return NULL_RTX;
4927
4928 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4929 build_real (TREE_TYPE (arg), dconst0));
4930 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4931 }
4932
4933 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4934 {
4935 imode = int_mode_for_mode (fmode);
4936 if (imode == BLKmode)
4937 return NULL_RTX;
4938 temp = gen_lowpart (imode, temp);
4939 }
4940 else
4941 {
4942 imode = word_mode;
4943 /* Handle targets with different FP word orders. */
4944 if (FLOAT_WORDS_BIG_ENDIAN)
4945 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4946 else
4947 word = bitpos / BITS_PER_WORD;
4948 temp = operand_subword_force (temp, word, fmode);
4949 bitpos = bitpos % BITS_PER_WORD;
4950 }
4951
4952 /* Force the intermediate word_mode (or narrower) result into a
4953 register. This avoids attempting to create paradoxical SUBREGs
4954 of floating point modes below. */
4955 temp = force_reg (imode, temp);
4956
4957 /* If the bitpos is within the "result mode" lowpart, the operation
4958 can be implement with a single bitwise AND. Otherwise, we need
4959 a right shift and an AND. */
4960
4961 if (bitpos < GET_MODE_BITSIZE (rmode))
4962 {
4963 double_int mask = double_int_setbit (double_int_zero, bitpos);
4964
4965 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4966 temp = gen_lowpart (rmode, temp);
4967 temp = expand_binop (rmode, and_optab, temp,
4968 immed_double_int_const (mask, rmode),
4969 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4970 }
4971 else
4972 {
4973 /* Perform a logical right shift to place the signbit in the least
4974 significant bit, then truncate the result to the desired mode
4975 and mask just this bit. */
4976 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4977 temp = gen_lowpart (rmode, temp);
4978 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4979 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4980 }
4981
4982 return temp;
4983 }
4984
4985 /* Expand fork or exec calls. TARGET is the desired target of the
4986 call. EXP is the call. FN is the
4987 identificator of the actual function. IGNORE is nonzero if the
4988 value is to be ignored. */
4989
4990 static rtx
4991 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4992 {
4993 tree id, decl;
4994 tree call;
4995
4996 /* If we are not profiling, just call the function. */
4997 if (!profile_arc_flag)
4998 return NULL_RTX;
4999
5000 /* Otherwise call the wrapper. This should be equivalent for the rest of
5001 compiler, so the code does not diverge, and the wrapper may run the
5002 code necessary for keeping the profiling sane. */
5003
5004 switch (DECL_FUNCTION_CODE (fn))
5005 {
5006 case BUILT_IN_FORK:
5007 id = get_identifier ("__gcov_fork");
5008 break;
5009
5010 case BUILT_IN_EXECL:
5011 id = get_identifier ("__gcov_execl");
5012 break;
5013
5014 case BUILT_IN_EXECV:
5015 id = get_identifier ("__gcov_execv");
5016 break;
5017
5018 case BUILT_IN_EXECLP:
5019 id = get_identifier ("__gcov_execlp");
5020 break;
5021
5022 case BUILT_IN_EXECLE:
5023 id = get_identifier ("__gcov_execle");
5024 break;
5025
5026 case BUILT_IN_EXECVP:
5027 id = get_identifier ("__gcov_execvp");
5028 break;
5029
5030 case BUILT_IN_EXECVE:
5031 id = get_identifier ("__gcov_execve");
5032 break;
5033
5034 default:
5035 gcc_unreachable ();
5036 }
5037
5038 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5039 FUNCTION_DECL, id, TREE_TYPE (fn));
5040 DECL_EXTERNAL (decl) = 1;
5041 TREE_PUBLIC (decl) = 1;
5042 DECL_ARTIFICIAL (decl) = 1;
5043 TREE_NOTHROW (decl) = 1;
5044 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5045 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5046 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5047 return expand_call (call, target, ignore);
5048 }
5049
5050
5051 \f
5052 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5053 the pointer in these functions is void*, the tree optimizers may remove
5054 casts. The mode computed in expand_builtin isn't reliable either, due
5055 to __sync_bool_compare_and_swap.
5056
5057 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5058 group of builtins. This gives us log2 of the mode size. */
5059
5060 static inline enum machine_mode
5061 get_builtin_sync_mode (int fcode_diff)
5062 {
5063 /* The size is not negotiable, so ask not to get BLKmode in return
5064 if the target indicates that a smaller size would be better. */
5065 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5066 }
5067
5068 /* Expand the memory expression LOC and return the appropriate memory operand
5069 for the builtin_sync operations. */
5070
5071 static rtx
5072 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5073 {
5074 rtx addr, mem;
5075
5076 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5077 addr = convert_memory_address (Pmode, addr);
5078
5079 /* Note that we explicitly do not want any alias information for this
5080 memory, so that we kill all other live memories. Otherwise we don't
5081 satisfy the full barrier semantics of the intrinsic. */
5082 mem = validize_mem (gen_rtx_MEM (mode, addr));
5083
5084 /* The alignment needs to be at least according to that of the mode. */
5085 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5086 get_pointer_alignment (loc)));
5087 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5088 MEM_VOLATILE_P (mem) = 1;
5089
5090 return mem;
5091 }
5092
5093 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5094 EXP is the CALL_EXPR. CODE is the rtx code
5095 that corresponds to the arithmetic or logical operation from the name;
5096 an exception here is that NOT actually means NAND. TARGET is an optional
5097 place for us to store the results; AFTER is true if this is the
5098 fetch_and_xxx form. IGNORE is true if we don't actually care about
5099 the result of the operation at all. */
5100
5101 static rtx
5102 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5103 enum rtx_code code, bool after,
5104 rtx target, bool ignore)
5105 {
5106 rtx val, mem;
5107 enum machine_mode old_mode;
5108 location_t loc = EXPR_LOCATION (exp);
5109
5110 if (code == NOT && warn_sync_nand)
5111 {
5112 tree fndecl = get_callee_fndecl (exp);
5113 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5114
5115 static bool warned_f_a_n, warned_n_a_f;
5116
5117 switch (fcode)
5118 {
5119 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5120 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5121 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5122 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5123 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5124
5125 if (warned_f_a_n)
5126 break;
5127
5128 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5129 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5130 warned_f_a_n = true;
5131 break;
5132
5133 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5134 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5135 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5136 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5137 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5138
5139 if (warned_n_a_f)
5140 break;
5141
5142 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5143 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5144 warned_n_a_f = true;
5145 break;
5146
5147 default:
5148 gcc_unreachable ();
5149 }
5150 }
5151
5152 /* Expand the operands. */
5153 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5154
5155 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5156 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5157 of CONST_INTs, where we know the old_mode only from the call argument. */
5158 old_mode = GET_MODE (val);
5159 if (old_mode == VOIDmode)
5160 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5161 val = convert_modes (mode, old_mode, val, 1);
5162
5163 if (ignore)
5164 return expand_sync_operation (mem, val, code);
5165 else
5166 return expand_sync_fetch_operation (mem, val, code, after, target);
5167 }
5168
5169 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5170 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5171 true if this is the boolean form. TARGET is a place for us to store the
5172 results; this is NOT optional if IS_BOOL is true. */
5173
5174 static rtx
5175 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5176 bool is_bool, rtx target)
5177 {
5178 rtx old_val, new_val, mem;
5179 enum machine_mode old_mode;
5180
5181 /* Expand the operands. */
5182 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5183
5184
5185 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5186 mode, EXPAND_NORMAL);
5187 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5188 of CONST_INTs, where we know the old_mode only from the call argument. */
5189 old_mode = GET_MODE (old_val);
5190 if (old_mode == VOIDmode)
5191 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5192 old_val = convert_modes (mode, old_mode, old_val, 1);
5193
5194 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5195 mode, EXPAND_NORMAL);
5196 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5197 of CONST_INTs, where we know the old_mode only from the call argument. */
5198 old_mode = GET_MODE (new_val);
5199 if (old_mode == VOIDmode)
5200 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5201 new_val = convert_modes (mode, old_mode, new_val, 1);
5202
5203 if (is_bool)
5204 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5205 else
5206 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5207 }
5208
5209 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5210 general form is actually an atomic exchange, and some targets only
5211 support a reduced form with the second argument being a constant 1.
5212 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5213 the results. */
5214
5215 static rtx
5216 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5217 rtx target)
5218 {
5219 rtx val, mem;
5220 enum machine_mode old_mode;
5221
5222 /* Expand the operands. */
5223 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5224 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5225 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5226 of CONST_INTs, where we know the old_mode only from the call argument. */
5227 old_mode = GET_MODE (val);
5228 if (old_mode == VOIDmode)
5229 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5230 val = convert_modes (mode, old_mode, val, 1);
5231
5232 return expand_sync_lock_test_and_set (mem, val, target);
5233 }
5234
5235 /* Expand the __sync_synchronize intrinsic. */
5236
5237 static void
5238 expand_builtin_sync_synchronize (void)
5239 {
5240 gimple x;
5241 VEC (tree, gc) *v_clobbers;
5242
5243 #ifdef HAVE_memory_barrier
5244 if (HAVE_memory_barrier)
5245 {
5246 emit_insn (gen_memory_barrier ());
5247 return;
5248 }
5249 #endif
5250
5251 if (synchronize_libfunc != NULL_RTX)
5252 {
5253 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5254 return;
5255 }
5256
5257 /* If no explicit memory barrier instruction is available, create an
5258 empty asm stmt with a memory clobber. */
5259 v_clobbers = VEC_alloc (tree, gc, 1);
5260 VEC_quick_push (tree, v_clobbers,
5261 tree_cons (NULL, build_string (6, "memory"), NULL));
5262 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5263 gimple_asm_set_volatile (x, true);
5264 expand_asm_stmt (x);
5265 }
5266
5267 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5268
5269 static void
5270 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5271 {
5272 struct expand_operand ops[2];
5273 enum insn_code icode;
5274 rtx mem;
5275
5276 /* Expand the operands. */
5277 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5278
5279 /* If there is an explicit operation in the md file, use it. */
5280 icode = direct_optab_handler (sync_lock_release_optab, mode);
5281 if (icode != CODE_FOR_nothing)
5282 {
5283 create_fixed_operand (&ops[0], mem);
5284 create_input_operand (&ops[1], const0_rtx, mode);
5285 if (maybe_expand_insn (icode, 2, ops))
5286 return;
5287 }
5288
5289 /* Otherwise we can implement this operation by emitting a barrier
5290 followed by a store of zero. */
5291 expand_builtin_sync_synchronize ();
5292 emit_move_insn (mem, const0_rtx);
5293 }
5294 \f
5295 /* Expand an expression EXP that calls a built-in function,
5296 with result going to TARGET if that's convenient
5297 (and in mode MODE if that's convenient).
5298 SUBTARGET may be used as the target for computing one of EXP's operands.
5299 IGNORE is nonzero if the value is to be ignored. */
5300
5301 rtx
5302 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5303 int ignore)
5304 {
5305 tree fndecl = get_callee_fndecl (exp);
5306 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5307 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5308 int flags;
5309
5310 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5311 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5312
5313 /* When not optimizing, generate calls to library functions for a certain
5314 set of builtins. */
5315 if (!optimize
5316 && !called_as_built_in (fndecl)
5317 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5318 && fcode != BUILT_IN_ALLOCA
5319 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5320 && fcode != BUILT_IN_FREE)
5321 return expand_call (exp, target, ignore);
5322
5323 /* The built-in function expanders test for target == const0_rtx
5324 to determine whether the function's result will be ignored. */
5325 if (ignore)
5326 target = const0_rtx;
5327
5328 /* If the result of a pure or const built-in function is ignored, and
5329 none of its arguments are volatile, we can avoid expanding the
5330 built-in call and just evaluate the arguments for side-effects. */
5331 if (target == const0_rtx
5332 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5333 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5334 {
5335 bool volatilep = false;
5336 tree arg;
5337 call_expr_arg_iterator iter;
5338
5339 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5340 if (TREE_THIS_VOLATILE (arg))
5341 {
5342 volatilep = true;
5343 break;
5344 }
5345
5346 if (! volatilep)
5347 {
5348 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5349 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5350 return const0_rtx;
5351 }
5352 }
5353
5354 switch (fcode)
5355 {
5356 CASE_FLT_FN (BUILT_IN_FABS):
5357 target = expand_builtin_fabs (exp, target, subtarget);
5358 if (target)
5359 return target;
5360 break;
5361
5362 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5363 target = expand_builtin_copysign (exp, target, subtarget);
5364 if (target)
5365 return target;
5366 break;
5367
5368 /* Just do a normal library call if we were unable to fold
5369 the values. */
5370 CASE_FLT_FN (BUILT_IN_CABS):
5371 break;
5372
5373 CASE_FLT_FN (BUILT_IN_EXP):
5374 CASE_FLT_FN (BUILT_IN_EXP10):
5375 CASE_FLT_FN (BUILT_IN_POW10):
5376 CASE_FLT_FN (BUILT_IN_EXP2):
5377 CASE_FLT_FN (BUILT_IN_EXPM1):
5378 CASE_FLT_FN (BUILT_IN_LOGB):
5379 CASE_FLT_FN (BUILT_IN_LOG):
5380 CASE_FLT_FN (BUILT_IN_LOG10):
5381 CASE_FLT_FN (BUILT_IN_LOG2):
5382 CASE_FLT_FN (BUILT_IN_LOG1P):
5383 CASE_FLT_FN (BUILT_IN_TAN):
5384 CASE_FLT_FN (BUILT_IN_ASIN):
5385 CASE_FLT_FN (BUILT_IN_ACOS):
5386 CASE_FLT_FN (BUILT_IN_ATAN):
5387 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5388 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5389 because of possible accuracy problems. */
5390 if (! flag_unsafe_math_optimizations)
5391 break;
5392 CASE_FLT_FN (BUILT_IN_SQRT):
5393 CASE_FLT_FN (BUILT_IN_FLOOR):
5394 CASE_FLT_FN (BUILT_IN_CEIL):
5395 CASE_FLT_FN (BUILT_IN_TRUNC):
5396 CASE_FLT_FN (BUILT_IN_ROUND):
5397 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5398 CASE_FLT_FN (BUILT_IN_RINT):
5399 target = expand_builtin_mathfn (exp, target, subtarget);
5400 if (target)
5401 return target;
5402 break;
5403
5404 CASE_FLT_FN (BUILT_IN_FMA):
5405 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5406 if (target)
5407 return target;
5408 break;
5409
5410 CASE_FLT_FN (BUILT_IN_ILOGB):
5411 if (! flag_unsafe_math_optimizations)
5412 break;
5413 CASE_FLT_FN (BUILT_IN_ISINF):
5414 CASE_FLT_FN (BUILT_IN_FINITE):
5415 case BUILT_IN_ISFINITE:
5416 case BUILT_IN_ISNORMAL:
5417 target = expand_builtin_interclass_mathfn (exp, target);
5418 if (target)
5419 return target;
5420 break;
5421
5422 CASE_FLT_FN (BUILT_IN_ICEIL):
5423 CASE_FLT_FN (BUILT_IN_LCEIL):
5424 CASE_FLT_FN (BUILT_IN_LLCEIL):
5425 CASE_FLT_FN (BUILT_IN_LFLOOR):
5426 CASE_FLT_FN (BUILT_IN_IFLOOR):
5427 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5428 target = expand_builtin_int_roundingfn (exp, target);
5429 if (target)
5430 return target;
5431 break;
5432
5433 CASE_FLT_FN (BUILT_IN_IRINT):
5434 CASE_FLT_FN (BUILT_IN_LRINT):
5435 CASE_FLT_FN (BUILT_IN_LLRINT):
5436 CASE_FLT_FN (BUILT_IN_IROUND):
5437 CASE_FLT_FN (BUILT_IN_LROUND):
5438 CASE_FLT_FN (BUILT_IN_LLROUND):
5439 target = expand_builtin_int_roundingfn_2 (exp, target);
5440 if (target)
5441 return target;
5442 break;
5443
5444 CASE_FLT_FN (BUILT_IN_POWI):
5445 target = expand_builtin_powi (exp, target);
5446 if (target)
5447 return target;
5448 break;
5449
5450 CASE_FLT_FN (BUILT_IN_ATAN2):
5451 CASE_FLT_FN (BUILT_IN_LDEXP):
5452 CASE_FLT_FN (BUILT_IN_SCALB):
5453 CASE_FLT_FN (BUILT_IN_SCALBN):
5454 CASE_FLT_FN (BUILT_IN_SCALBLN):
5455 if (! flag_unsafe_math_optimizations)
5456 break;
5457
5458 CASE_FLT_FN (BUILT_IN_FMOD):
5459 CASE_FLT_FN (BUILT_IN_REMAINDER):
5460 CASE_FLT_FN (BUILT_IN_DREM):
5461 CASE_FLT_FN (BUILT_IN_POW):
5462 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5463 if (target)
5464 return target;
5465 break;
5466
5467 CASE_FLT_FN (BUILT_IN_CEXPI):
5468 target = expand_builtin_cexpi (exp, target);
5469 gcc_assert (target);
5470 return target;
5471
5472 CASE_FLT_FN (BUILT_IN_SIN):
5473 CASE_FLT_FN (BUILT_IN_COS):
5474 if (! flag_unsafe_math_optimizations)
5475 break;
5476 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5477 if (target)
5478 return target;
5479 break;
5480
5481 CASE_FLT_FN (BUILT_IN_SINCOS):
5482 if (! flag_unsafe_math_optimizations)
5483 break;
5484 target = expand_builtin_sincos (exp);
5485 if (target)
5486 return target;
5487 break;
5488
5489 case BUILT_IN_APPLY_ARGS:
5490 return expand_builtin_apply_args ();
5491
5492 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5493 FUNCTION with a copy of the parameters described by
5494 ARGUMENTS, and ARGSIZE. It returns a block of memory
5495 allocated on the stack into which is stored all the registers
5496 that might possibly be used for returning the result of a
5497 function. ARGUMENTS is the value returned by
5498 __builtin_apply_args. ARGSIZE is the number of bytes of
5499 arguments that must be copied. ??? How should this value be
5500 computed? We'll also need a safe worst case value for varargs
5501 functions. */
5502 case BUILT_IN_APPLY:
5503 if (!validate_arglist (exp, POINTER_TYPE,
5504 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5505 && !validate_arglist (exp, REFERENCE_TYPE,
5506 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5507 return const0_rtx;
5508 else
5509 {
5510 rtx ops[3];
5511
5512 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5513 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5514 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5515
5516 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5517 }
5518
5519 /* __builtin_return (RESULT) causes the function to return the
5520 value described by RESULT. RESULT is address of the block of
5521 memory returned by __builtin_apply. */
5522 case BUILT_IN_RETURN:
5523 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5524 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5525 return const0_rtx;
5526
5527 case BUILT_IN_SAVEREGS:
5528 return expand_builtin_saveregs ();
5529
5530 case BUILT_IN_VA_ARG_PACK:
5531 /* All valid uses of __builtin_va_arg_pack () are removed during
5532 inlining. */
5533 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5534 return const0_rtx;
5535
5536 case BUILT_IN_VA_ARG_PACK_LEN:
5537 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5538 inlining. */
5539 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5540 return const0_rtx;
5541
5542 /* Return the address of the first anonymous stack arg. */
5543 case BUILT_IN_NEXT_ARG:
5544 if (fold_builtin_next_arg (exp, false))
5545 return const0_rtx;
5546 return expand_builtin_next_arg ();
5547
5548 case BUILT_IN_CLEAR_CACHE:
5549 target = expand_builtin___clear_cache (exp);
5550 if (target)
5551 return target;
5552 break;
5553
5554 case BUILT_IN_CLASSIFY_TYPE:
5555 return expand_builtin_classify_type (exp);
5556
5557 case BUILT_IN_CONSTANT_P:
5558 return const0_rtx;
5559
5560 case BUILT_IN_FRAME_ADDRESS:
5561 case BUILT_IN_RETURN_ADDRESS:
5562 return expand_builtin_frame_address (fndecl, exp);
5563
5564 /* Returns the address of the area where the structure is returned.
5565 0 otherwise. */
5566 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5567 if (call_expr_nargs (exp) != 0
5568 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5569 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5570 return const0_rtx;
5571 else
5572 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5573
5574 case BUILT_IN_ALLOCA:
5575 case BUILT_IN_ALLOCA_WITH_ALIGN:
5576 /* If the allocation stems from the declaration of a variable-sized
5577 object, it cannot accumulate. */
5578 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
5579 if (target)
5580 return target;
5581 break;
5582
5583 case BUILT_IN_STACK_SAVE:
5584 return expand_stack_save ();
5585
5586 case BUILT_IN_STACK_RESTORE:
5587 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5588 return const0_rtx;
5589
5590 case BUILT_IN_BSWAP32:
5591 case BUILT_IN_BSWAP64:
5592 target = expand_builtin_bswap (exp, target, subtarget);
5593
5594 if (target)
5595 return target;
5596 break;
5597
5598 CASE_INT_FN (BUILT_IN_FFS):
5599 case BUILT_IN_FFSIMAX:
5600 target = expand_builtin_unop (target_mode, exp, target,
5601 subtarget, ffs_optab);
5602 if (target)
5603 return target;
5604 break;
5605
5606 CASE_INT_FN (BUILT_IN_CLZ):
5607 case BUILT_IN_CLZIMAX:
5608 target = expand_builtin_unop (target_mode, exp, target,
5609 subtarget, clz_optab);
5610 if (target)
5611 return target;
5612 break;
5613
5614 CASE_INT_FN (BUILT_IN_CTZ):
5615 case BUILT_IN_CTZIMAX:
5616 target = expand_builtin_unop (target_mode, exp, target,
5617 subtarget, ctz_optab);
5618 if (target)
5619 return target;
5620 break;
5621
5622 CASE_INT_FN (BUILT_IN_CLRSB):
5623 case BUILT_IN_CLRSBIMAX:
5624 target = expand_builtin_unop (target_mode, exp, target,
5625 subtarget, clrsb_optab);
5626 if (target)
5627 return target;
5628 break;
5629
5630 CASE_INT_FN (BUILT_IN_POPCOUNT):
5631 case BUILT_IN_POPCOUNTIMAX:
5632 target = expand_builtin_unop (target_mode, exp, target,
5633 subtarget, popcount_optab);
5634 if (target)
5635 return target;
5636 break;
5637
5638 CASE_INT_FN (BUILT_IN_PARITY):
5639 case BUILT_IN_PARITYIMAX:
5640 target = expand_builtin_unop (target_mode, exp, target,
5641 subtarget, parity_optab);
5642 if (target)
5643 return target;
5644 break;
5645
5646 case BUILT_IN_STRLEN:
5647 target = expand_builtin_strlen (exp, target, target_mode);
5648 if (target)
5649 return target;
5650 break;
5651
5652 case BUILT_IN_STRCPY:
5653 target = expand_builtin_strcpy (exp, target);
5654 if (target)
5655 return target;
5656 break;
5657
5658 case BUILT_IN_STRNCPY:
5659 target = expand_builtin_strncpy (exp, target);
5660 if (target)
5661 return target;
5662 break;
5663
5664 case BUILT_IN_STPCPY:
5665 target = expand_builtin_stpcpy (exp, target, mode);
5666 if (target)
5667 return target;
5668 break;
5669
5670 case BUILT_IN_MEMCPY:
5671 target = expand_builtin_memcpy (exp, target);
5672 if (target)
5673 return target;
5674 break;
5675
5676 case BUILT_IN_MEMPCPY:
5677 target = expand_builtin_mempcpy (exp, target, mode);
5678 if (target)
5679 return target;
5680 break;
5681
5682 case BUILT_IN_MEMSET:
5683 target = expand_builtin_memset (exp, target, mode);
5684 if (target)
5685 return target;
5686 break;
5687
5688 case BUILT_IN_BZERO:
5689 target = expand_builtin_bzero (exp);
5690 if (target)
5691 return target;
5692 break;
5693
5694 case BUILT_IN_STRCMP:
5695 target = expand_builtin_strcmp (exp, target);
5696 if (target)
5697 return target;
5698 break;
5699
5700 case BUILT_IN_STRNCMP:
5701 target = expand_builtin_strncmp (exp, target, mode);
5702 if (target)
5703 return target;
5704 break;
5705
5706 case BUILT_IN_BCMP:
5707 case BUILT_IN_MEMCMP:
5708 target = expand_builtin_memcmp (exp, target, mode);
5709 if (target)
5710 return target;
5711 break;
5712
5713 case BUILT_IN_SETJMP:
5714 /* This should have been lowered to the builtins below. */
5715 gcc_unreachable ();
5716
5717 case BUILT_IN_SETJMP_SETUP:
5718 /* __builtin_setjmp_setup is passed a pointer to an array of five words
5719 and the receiver label. */
5720 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5721 {
5722 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5723 VOIDmode, EXPAND_NORMAL);
5724 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
5725 rtx label_r = label_rtx (label);
5726
5727 /* This is copied from the handling of non-local gotos. */
5728 expand_builtin_setjmp_setup (buf_addr, label_r);
5729 nonlocal_goto_handler_labels
5730 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
5731 nonlocal_goto_handler_labels);
5732 /* ??? Do not let expand_label treat us as such since we would
5733 not want to be both on the list of non-local labels and on
5734 the list of forced labels. */
5735 FORCED_LABEL (label) = 0;
5736 return const0_rtx;
5737 }
5738 break;
5739
5740 case BUILT_IN_SETJMP_DISPATCHER:
5741 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
5742 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5743 {
5744 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
5745 rtx label_r = label_rtx (label);
5746
5747 /* Remove the dispatcher label from the list of non-local labels
5748 since the receiver labels have been added to it above. */
5749 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
5750 return const0_rtx;
5751 }
5752 break;
5753
5754 case BUILT_IN_SETJMP_RECEIVER:
5755 /* __builtin_setjmp_receiver is passed the receiver label. */
5756 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5757 {
5758 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
5759 rtx label_r = label_rtx (label);
5760
5761 expand_builtin_setjmp_receiver (label_r);
5762 return const0_rtx;
5763 }
5764 break;
5765
5766 /* __builtin_longjmp is passed a pointer to an array of five words.
5767 It's similar to the C library longjmp function but works with
5768 __builtin_setjmp above. */
5769 case BUILT_IN_LONGJMP:
5770 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5771 {
5772 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5773 VOIDmode, EXPAND_NORMAL);
5774 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
5775
5776 if (value != const1_rtx)
5777 {
5778 error ("%<__builtin_longjmp%> second argument must be 1");
5779 return const0_rtx;
5780 }
5781
5782 expand_builtin_longjmp (buf_addr, value);
5783 return const0_rtx;
5784 }
5785 break;
5786
5787 case BUILT_IN_NONLOCAL_GOTO:
5788 target = expand_builtin_nonlocal_goto (exp);
5789 if (target)
5790 return target;
5791 break;
5792
5793 /* This updates the setjmp buffer that is its argument with the value
5794 of the current stack pointer. */
5795 case BUILT_IN_UPDATE_SETJMP_BUF:
5796 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5797 {
5798 rtx buf_addr
5799 = expand_normal (CALL_EXPR_ARG (exp, 0));
5800
5801 expand_builtin_update_setjmp_buf (buf_addr);
5802 return const0_rtx;
5803 }
5804 break;
5805
5806 case BUILT_IN_TRAP:
5807 expand_builtin_trap ();
5808 return const0_rtx;
5809
5810 case BUILT_IN_UNREACHABLE:
5811 expand_builtin_unreachable ();
5812 return const0_rtx;
5813
5814 CASE_FLT_FN (BUILT_IN_SIGNBIT):
5815 case BUILT_IN_SIGNBITD32:
5816 case BUILT_IN_SIGNBITD64:
5817 case BUILT_IN_SIGNBITD128:
5818 target = expand_builtin_signbit (exp, target);
5819 if (target)
5820 return target;
5821 break;
5822
5823 /* Various hooks for the DWARF 2 __throw routine. */
5824 case BUILT_IN_UNWIND_INIT:
5825 expand_builtin_unwind_init ();
5826 return const0_rtx;
5827 case BUILT_IN_DWARF_CFA:
5828 return virtual_cfa_rtx;
5829 #ifdef DWARF2_UNWIND_INFO
5830 case BUILT_IN_DWARF_SP_COLUMN:
5831 return expand_builtin_dwarf_sp_column ();
5832 case BUILT_IN_INIT_DWARF_REG_SIZES:
5833 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
5834 return const0_rtx;
5835 #endif
5836 case BUILT_IN_FROB_RETURN_ADDR:
5837 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
5838 case BUILT_IN_EXTRACT_RETURN_ADDR:
5839 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
5840 case BUILT_IN_EH_RETURN:
5841 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
5842 CALL_EXPR_ARG (exp, 1));
5843 return const0_rtx;
5844 #ifdef EH_RETURN_DATA_REGNO
5845 case BUILT_IN_EH_RETURN_DATA_REGNO:
5846 return expand_builtin_eh_return_data_regno (exp);
5847 #endif
5848 case BUILT_IN_EXTEND_POINTER:
5849 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
5850 case BUILT_IN_EH_POINTER:
5851 return expand_builtin_eh_pointer (exp);
5852 case BUILT_IN_EH_FILTER:
5853 return expand_builtin_eh_filter (exp);
5854 case BUILT_IN_EH_COPY_VALUES:
5855 return expand_builtin_eh_copy_values (exp);
5856
5857 case BUILT_IN_VA_START:
5858 return expand_builtin_va_start (exp);
5859 case BUILT_IN_VA_END:
5860 return expand_builtin_va_end (exp);
5861 case BUILT_IN_VA_COPY:
5862 return expand_builtin_va_copy (exp);
5863 case BUILT_IN_EXPECT:
5864 return expand_builtin_expect (exp, target);
5865 case BUILT_IN_ASSUME_ALIGNED:
5866 return expand_builtin_assume_aligned (exp, target);
5867 case BUILT_IN_PREFETCH:
5868 expand_builtin_prefetch (exp);
5869 return const0_rtx;
5870
5871 case BUILT_IN_INIT_TRAMPOLINE:
5872 return expand_builtin_init_trampoline (exp);
5873 case BUILT_IN_ADJUST_TRAMPOLINE:
5874 return expand_builtin_adjust_trampoline (exp);
5875
5876 case BUILT_IN_FORK:
5877 case BUILT_IN_EXECL:
5878 case BUILT_IN_EXECV:
5879 case BUILT_IN_EXECLP:
5880 case BUILT_IN_EXECLE:
5881 case BUILT_IN_EXECVP:
5882 case BUILT_IN_EXECVE:
5883 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
5884 if (target)
5885 return target;
5886 break;
5887
5888 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
5889 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
5890 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
5891 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
5892 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
5893 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
5894 target = expand_builtin_sync_operation (mode, exp, PLUS,
5895 false, target, ignore);
5896 if (target)
5897 return target;
5898 break;
5899
5900 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
5901 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
5902 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
5903 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
5904 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
5905 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
5906 target = expand_builtin_sync_operation (mode, exp, MINUS,
5907 false, target, ignore);
5908 if (target)
5909 return target;
5910 break;
5911
5912 case BUILT_IN_SYNC_FETCH_AND_OR_1:
5913 case BUILT_IN_SYNC_FETCH_AND_OR_2:
5914 case BUILT_IN_SYNC_FETCH_AND_OR_4:
5915 case BUILT_IN_SYNC_FETCH_AND_OR_8:
5916 case BUILT_IN_SYNC_FETCH_AND_OR_16:
5917 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
5918 target = expand_builtin_sync_operation (mode, exp, IOR,
5919 false, target, ignore);
5920 if (target)
5921 return target;
5922 break;
5923
5924 case BUILT_IN_SYNC_FETCH_AND_AND_1:
5925 case BUILT_IN_SYNC_FETCH_AND_AND_2:
5926 case BUILT_IN_SYNC_FETCH_AND_AND_4:
5927 case BUILT_IN_SYNC_FETCH_AND_AND_8:
5928 case BUILT_IN_SYNC_FETCH_AND_AND_16:
5929 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
5930 target = expand_builtin_sync_operation (mode, exp, AND,
5931 false, target, ignore);
5932 if (target)
5933 return target;
5934 break;
5935
5936 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
5937 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
5938 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
5939 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
5940 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
5941 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
5942 target = expand_builtin_sync_operation (mode, exp, XOR,
5943 false, target, ignore);
5944 if (target)
5945 return target;
5946 break;
5947
5948 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5949 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5950 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5951 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5952 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5953 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
5954 target = expand_builtin_sync_operation (mode, exp, NOT,
5955 false, target, ignore);
5956 if (target)
5957 return target;
5958 break;
5959
5960 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
5961 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
5962 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
5963 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
5964 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
5965 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
5966 target = expand_builtin_sync_operation (mode, exp, PLUS,
5967 true, target, ignore);
5968 if (target)
5969 return target;
5970 break;
5971
5972 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
5973 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
5974 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
5975 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
5976 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
5977 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
5978 target = expand_builtin_sync_operation (mode, exp, MINUS,
5979 true, target, ignore);
5980 if (target)
5981 return target;
5982 break;
5983
5984 case BUILT_IN_SYNC_OR_AND_FETCH_1:
5985 case BUILT_IN_SYNC_OR_AND_FETCH_2:
5986 case BUILT_IN_SYNC_OR_AND_FETCH_4:
5987 case BUILT_IN_SYNC_OR_AND_FETCH_8:
5988 case BUILT_IN_SYNC_OR_AND_FETCH_16:
5989 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
5990 target = expand_builtin_sync_operation (mode, exp, IOR,
5991 true, target, ignore);
5992 if (target)
5993 return target;
5994 break;
5995
5996 case BUILT_IN_SYNC_AND_AND_FETCH_1:
5997 case BUILT_IN_SYNC_AND_AND_FETCH_2:
5998 case BUILT_IN_SYNC_AND_AND_FETCH_4:
5999 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6000 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6001 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6002 target = expand_builtin_sync_operation (mode, exp, AND,
6003 true, target, ignore);
6004 if (target)
6005 return target;
6006 break;
6007
6008 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6009 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6010 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6011 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6012 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6013 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6014 target = expand_builtin_sync_operation (mode, exp, XOR,
6015 true, target, ignore);
6016 if (target)
6017 return target;
6018 break;
6019
6020 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6021 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6022 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6023 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6024 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6025 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6026 target = expand_builtin_sync_operation (mode, exp, NOT,
6027 true, target, ignore);
6028 if (target)
6029 return target;
6030 break;
6031
6032 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6033 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6034 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6035 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6036 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6037 if (mode == VOIDmode)
6038 mode = TYPE_MODE (boolean_type_node);
6039 if (!target || !register_operand (target, mode))
6040 target = gen_reg_rtx (mode);
6041
6042 mode = get_builtin_sync_mode
6043 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6044 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6045 if (target)
6046 return target;
6047 break;
6048
6049 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6050 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6051 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6052 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6053 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6054 mode = get_builtin_sync_mode
6055 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6056 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6057 if (target)
6058 return target;
6059 break;
6060
6061 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6062 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6063 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6064 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6065 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6066 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6067 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6068 if (target)
6069 return target;
6070 break;
6071
6072 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6073 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6074 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6075 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6076 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6077 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6078 expand_builtin_sync_lock_release (mode, exp);
6079 return const0_rtx;
6080
6081 case BUILT_IN_SYNC_SYNCHRONIZE:
6082 expand_builtin_sync_synchronize ();
6083 return const0_rtx;
6084
6085 case BUILT_IN_OBJECT_SIZE:
6086 return expand_builtin_object_size (exp);
6087
6088 case BUILT_IN_MEMCPY_CHK:
6089 case BUILT_IN_MEMPCPY_CHK:
6090 case BUILT_IN_MEMMOVE_CHK:
6091 case BUILT_IN_MEMSET_CHK:
6092 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6093 if (target)
6094 return target;
6095 break;
6096
6097 case BUILT_IN_STRCPY_CHK:
6098 case BUILT_IN_STPCPY_CHK:
6099 case BUILT_IN_STRNCPY_CHK:
6100 case BUILT_IN_STRCAT_CHK:
6101 case BUILT_IN_STRNCAT_CHK:
6102 case BUILT_IN_SNPRINTF_CHK:
6103 case BUILT_IN_VSNPRINTF_CHK:
6104 maybe_emit_chk_warning (exp, fcode);
6105 break;
6106
6107 case BUILT_IN_SPRINTF_CHK:
6108 case BUILT_IN_VSPRINTF_CHK:
6109 maybe_emit_sprintf_chk_warning (exp, fcode);
6110 break;
6111
6112 case BUILT_IN_FREE:
6113 if (warn_free_nonheap_object)
6114 maybe_emit_free_warning (exp);
6115 break;
6116
6117 default: /* just do library call, if unknown builtin */
6118 break;
6119 }
6120
6121 /* The switch statement above can drop through to cause the function
6122 to be called normally. */
6123 return expand_call (exp, target, ignore);
6124 }
6125
6126 /* Determine whether a tree node represents a call to a built-in
6127 function. If the tree T is a call to a built-in function with
6128 the right number of arguments of the appropriate types, return
6129 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6130 Otherwise the return value is END_BUILTINS. */
6131
6132 enum built_in_function
6133 builtin_mathfn_code (const_tree t)
6134 {
6135 const_tree fndecl, arg, parmlist;
6136 const_tree argtype, parmtype;
6137 const_call_expr_arg_iterator iter;
6138
6139 if (TREE_CODE (t) != CALL_EXPR
6140 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6141 return END_BUILTINS;
6142
6143 fndecl = get_callee_fndecl (t);
6144 if (fndecl == NULL_TREE
6145 || TREE_CODE (fndecl) != FUNCTION_DECL
6146 || ! DECL_BUILT_IN (fndecl)
6147 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6148 return END_BUILTINS;
6149
6150 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6151 init_const_call_expr_arg_iterator (t, &iter);
6152 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6153 {
6154 /* If a function doesn't take a variable number of arguments,
6155 the last element in the list will have type `void'. */
6156 parmtype = TREE_VALUE (parmlist);
6157 if (VOID_TYPE_P (parmtype))
6158 {
6159 if (more_const_call_expr_args_p (&iter))
6160 return END_BUILTINS;
6161 return DECL_FUNCTION_CODE (fndecl);
6162 }
6163
6164 if (! more_const_call_expr_args_p (&iter))
6165 return END_BUILTINS;
6166
6167 arg = next_const_call_expr_arg (&iter);
6168 argtype = TREE_TYPE (arg);
6169
6170 if (SCALAR_FLOAT_TYPE_P (parmtype))
6171 {
6172 if (! SCALAR_FLOAT_TYPE_P (argtype))
6173 return END_BUILTINS;
6174 }
6175 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6176 {
6177 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6178 return END_BUILTINS;
6179 }
6180 else if (POINTER_TYPE_P (parmtype))
6181 {
6182 if (! POINTER_TYPE_P (argtype))
6183 return END_BUILTINS;
6184 }
6185 else if (INTEGRAL_TYPE_P (parmtype))
6186 {
6187 if (! INTEGRAL_TYPE_P (argtype))
6188 return END_BUILTINS;
6189 }
6190 else
6191 return END_BUILTINS;
6192 }
6193
6194 /* Variable-length argument list. */
6195 return DECL_FUNCTION_CODE (fndecl);
6196 }
6197
6198 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6199 evaluate to a constant. */
6200
6201 static tree
6202 fold_builtin_constant_p (tree arg)
6203 {
6204 /* We return 1 for a numeric type that's known to be a constant
6205 value at compile-time or for an aggregate type that's a
6206 literal constant. */
6207 STRIP_NOPS (arg);
6208
6209 /* If we know this is a constant, emit the constant of one. */
6210 if (CONSTANT_CLASS_P (arg)
6211 || (TREE_CODE (arg) == CONSTRUCTOR
6212 && TREE_CONSTANT (arg)))
6213 return integer_one_node;
6214 if (TREE_CODE (arg) == ADDR_EXPR)
6215 {
6216 tree op = TREE_OPERAND (arg, 0);
6217 if (TREE_CODE (op) == STRING_CST
6218 || (TREE_CODE (op) == ARRAY_REF
6219 && integer_zerop (TREE_OPERAND (op, 1))
6220 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6221 return integer_one_node;
6222 }
6223
6224 /* If this expression has side effects, show we don't know it to be a
6225 constant. Likewise if it's a pointer or aggregate type since in
6226 those case we only want literals, since those are only optimized
6227 when generating RTL, not later.
6228 And finally, if we are compiling an initializer, not code, we
6229 need to return a definite result now; there's not going to be any
6230 more optimization done. */
6231 if (TREE_SIDE_EFFECTS (arg)
6232 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6233 || POINTER_TYPE_P (TREE_TYPE (arg))
6234 || cfun == 0
6235 || folding_initializer)
6236 return integer_zero_node;
6237
6238 return NULL_TREE;
6239 }
6240
6241 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6242 return it as a truthvalue. */
6243
6244 static tree
6245 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6246 {
6247 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6248
6249 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6250 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6251 ret_type = TREE_TYPE (TREE_TYPE (fn));
6252 pred_type = TREE_VALUE (arg_types);
6253 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6254
6255 pred = fold_convert_loc (loc, pred_type, pred);
6256 expected = fold_convert_loc (loc, expected_type, expected);
6257 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6258
6259 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6260 build_int_cst (ret_type, 0));
6261 }
6262
6263 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6264 NULL_TREE if no simplification is possible. */
6265
6266 static tree
6267 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6268 {
6269 tree inner, fndecl, inner_arg0;
6270 enum tree_code code;
6271
6272 /* Distribute the expected value over short-circuiting operators.
6273 See through the cast from truthvalue_type_node to long. */
6274 inner_arg0 = arg0;
6275 while (TREE_CODE (inner_arg0) == NOP_EXPR
6276 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6277 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6278 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6279
6280 /* If this is a builtin_expect within a builtin_expect keep the
6281 inner one. See through a comparison against a constant. It
6282 might have been added to create a thruthvalue. */
6283 inner = inner_arg0;
6284
6285 if (COMPARISON_CLASS_P (inner)
6286 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6287 inner = TREE_OPERAND (inner, 0);
6288
6289 if (TREE_CODE (inner) == CALL_EXPR
6290 && (fndecl = get_callee_fndecl (inner))
6291 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6292 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6293 return arg0;
6294
6295 inner = inner_arg0;
6296 code = TREE_CODE (inner);
6297 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6298 {
6299 tree op0 = TREE_OPERAND (inner, 0);
6300 tree op1 = TREE_OPERAND (inner, 1);
6301
6302 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6303 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6304 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6305
6306 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6307 }
6308
6309 /* If the argument isn't invariant then there's nothing else we can do. */
6310 if (!TREE_CONSTANT (inner_arg0))
6311 return NULL_TREE;
6312
6313 /* If we expect that a comparison against the argument will fold to
6314 a constant return the constant. In practice, this means a true
6315 constant or the address of a non-weak symbol. */
6316 inner = inner_arg0;
6317 STRIP_NOPS (inner);
6318 if (TREE_CODE (inner) == ADDR_EXPR)
6319 {
6320 do
6321 {
6322 inner = TREE_OPERAND (inner, 0);
6323 }
6324 while (TREE_CODE (inner) == COMPONENT_REF
6325 || TREE_CODE (inner) == ARRAY_REF);
6326 if ((TREE_CODE (inner) == VAR_DECL
6327 || TREE_CODE (inner) == FUNCTION_DECL)
6328 && DECL_WEAK (inner))
6329 return NULL_TREE;
6330 }
6331
6332 /* Otherwise, ARG0 already has the proper type for the return value. */
6333 return arg0;
6334 }
6335
6336 /* Fold a call to __builtin_classify_type with argument ARG. */
6337
6338 static tree
6339 fold_builtin_classify_type (tree arg)
6340 {
6341 if (arg == 0)
6342 return build_int_cst (integer_type_node, no_type_class);
6343
6344 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
6345 }
6346
6347 /* Fold a call to __builtin_strlen with argument ARG. */
6348
6349 static tree
6350 fold_builtin_strlen (location_t loc, tree type, tree arg)
6351 {
6352 if (!validate_arg (arg, POINTER_TYPE))
6353 return NULL_TREE;
6354 else
6355 {
6356 tree len = c_strlen (arg, 0);
6357
6358 if (len)
6359 return fold_convert_loc (loc, type, len);
6360
6361 return NULL_TREE;
6362 }
6363 }
6364
6365 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6366
6367 static tree
6368 fold_builtin_inf (location_t loc, tree type, int warn)
6369 {
6370 REAL_VALUE_TYPE real;
6371
6372 /* __builtin_inff is intended to be usable to define INFINITY on all
6373 targets. If an infinity is not available, INFINITY expands "to a
6374 positive constant of type float that overflows at translation
6375 time", footnote "In this case, using INFINITY will violate the
6376 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6377 Thus we pedwarn to ensure this constraint violation is
6378 diagnosed. */
6379 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6380 pedwarn (loc, 0, "target format does not support infinity");
6381
6382 real_inf (&real);
6383 return build_real (type, real);
6384 }
6385
6386 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6387
6388 static tree
6389 fold_builtin_nan (tree arg, tree type, int quiet)
6390 {
6391 REAL_VALUE_TYPE real;
6392 const char *str;
6393
6394 if (!validate_arg (arg, POINTER_TYPE))
6395 return NULL_TREE;
6396 str = c_getstr (arg);
6397 if (!str)
6398 return NULL_TREE;
6399
6400 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6401 return NULL_TREE;
6402
6403 return build_real (type, real);
6404 }
6405
6406 /* Return true if the floating point expression T has an integer value.
6407 We also allow +Inf, -Inf and NaN to be considered integer values. */
6408
6409 static bool
6410 integer_valued_real_p (tree t)
6411 {
6412 switch (TREE_CODE (t))
6413 {
6414 case FLOAT_EXPR:
6415 return true;
6416
6417 case ABS_EXPR:
6418 case SAVE_EXPR:
6419 return integer_valued_real_p (TREE_OPERAND (t, 0));
6420
6421 case COMPOUND_EXPR:
6422 case MODIFY_EXPR:
6423 case BIND_EXPR:
6424 return integer_valued_real_p (TREE_OPERAND (t, 1));
6425
6426 case PLUS_EXPR:
6427 case MINUS_EXPR:
6428 case MULT_EXPR:
6429 case MIN_EXPR:
6430 case MAX_EXPR:
6431 return integer_valued_real_p (TREE_OPERAND (t, 0))
6432 && integer_valued_real_p (TREE_OPERAND (t, 1));
6433
6434 case COND_EXPR:
6435 return integer_valued_real_p (TREE_OPERAND (t, 1))
6436 && integer_valued_real_p (TREE_OPERAND (t, 2));
6437
6438 case REAL_CST:
6439 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6440
6441 case NOP_EXPR:
6442 {
6443 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6444 if (TREE_CODE (type) == INTEGER_TYPE)
6445 return true;
6446 if (TREE_CODE (type) == REAL_TYPE)
6447 return integer_valued_real_p (TREE_OPERAND (t, 0));
6448 break;
6449 }
6450
6451 case CALL_EXPR:
6452 switch (builtin_mathfn_code (t))
6453 {
6454 CASE_FLT_FN (BUILT_IN_CEIL):
6455 CASE_FLT_FN (BUILT_IN_FLOOR):
6456 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6457 CASE_FLT_FN (BUILT_IN_RINT):
6458 CASE_FLT_FN (BUILT_IN_ROUND):
6459 CASE_FLT_FN (BUILT_IN_TRUNC):
6460 return true;
6461
6462 CASE_FLT_FN (BUILT_IN_FMIN):
6463 CASE_FLT_FN (BUILT_IN_FMAX):
6464 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6465 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6466
6467 default:
6468 break;
6469 }
6470 break;
6471
6472 default:
6473 break;
6474 }
6475 return false;
6476 }
6477
6478 /* FNDECL is assumed to be a builtin where truncation can be propagated
6479 across (for instance floor((double)f) == (double)floorf (f).
6480 Do the transformation for a call with argument ARG. */
6481
6482 static tree
6483 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6484 {
6485 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6486
6487 if (!validate_arg (arg, REAL_TYPE))
6488 return NULL_TREE;
6489
6490 /* Integer rounding functions are idempotent. */
6491 if (fcode == builtin_mathfn_code (arg))
6492 return arg;
6493
6494 /* If argument is already integer valued, and we don't need to worry
6495 about setting errno, there's no need to perform rounding. */
6496 if (! flag_errno_math && integer_valued_real_p (arg))
6497 return arg;
6498
6499 if (optimize)
6500 {
6501 tree arg0 = strip_float_extensions (arg);
6502 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6503 tree newtype = TREE_TYPE (arg0);
6504 tree decl;
6505
6506 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6507 && (decl = mathfn_built_in (newtype, fcode)))
6508 return fold_convert_loc (loc, ftype,
6509 build_call_expr_loc (loc, decl, 1,
6510 fold_convert_loc (loc,
6511 newtype,
6512 arg0)));
6513 }
6514 return NULL_TREE;
6515 }
6516
6517 /* FNDECL is assumed to be builtin which can narrow the FP type of
6518 the argument, for instance lround((double)f) -> lroundf (f).
6519 Do the transformation for a call with argument ARG. */
6520
6521 static tree
6522 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6523 {
6524 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6525
6526 if (!validate_arg (arg, REAL_TYPE))
6527 return NULL_TREE;
6528
6529 /* If argument is already integer valued, and we don't need to worry
6530 about setting errno, there's no need to perform rounding. */
6531 if (! flag_errno_math && integer_valued_real_p (arg))
6532 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6533 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6534
6535 if (optimize)
6536 {
6537 tree ftype = TREE_TYPE (arg);
6538 tree arg0 = strip_float_extensions (arg);
6539 tree newtype = TREE_TYPE (arg0);
6540 tree decl;
6541
6542 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6543 && (decl = mathfn_built_in (newtype, fcode)))
6544 return build_call_expr_loc (loc, decl, 1,
6545 fold_convert_loc (loc, newtype, arg0));
6546 }
6547
6548 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
6549 sizeof (int) == sizeof (long). */
6550 if (TYPE_PRECISION (integer_type_node)
6551 == TYPE_PRECISION (long_integer_type_node))
6552 {
6553 tree newfn = NULL_TREE;
6554 switch (fcode)
6555 {
6556 CASE_FLT_FN (BUILT_IN_ICEIL):
6557 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6558 break;
6559
6560 CASE_FLT_FN (BUILT_IN_IFLOOR):
6561 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6562 break;
6563
6564 CASE_FLT_FN (BUILT_IN_IROUND):
6565 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6566 break;
6567
6568 CASE_FLT_FN (BUILT_IN_IRINT):
6569 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6570 break;
6571
6572 default:
6573 break;
6574 }
6575
6576 if (newfn)
6577 {
6578 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6579 return fold_convert_loc (loc,
6580 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6581 }
6582 }
6583
6584 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6585 sizeof (long long) == sizeof (long). */
6586 if (TYPE_PRECISION (long_long_integer_type_node)
6587 == TYPE_PRECISION (long_integer_type_node))
6588 {
6589 tree newfn = NULL_TREE;
6590 switch (fcode)
6591 {
6592 CASE_FLT_FN (BUILT_IN_LLCEIL):
6593 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6594 break;
6595
6596 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6597 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6598 break;
6599
6600 CASE_FLT_FN (BUILT_IN_LLROUND):
6601 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6602 break;
6603
6604 CASE_FLT_FN (BUILT_IN_LLRINT):
6605 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6606 break;
6607
6608 default:
6609 break;
6610 }
6611
6612 if (newfn)
6613 {
6614 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6615 return fold_convert_loc (loc,
6616 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6617 }
6618 }
6619
6620 return NULL_TREE;
6621 }
6622
6623 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6624 return type. Return NULL_TREE if no simplification can be made. */
6625
6626 static tree
6627 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6628 {
6629 tree res;
6630
6631 if (!validate_arg (arg, COMPLEX_TYPE)
6632 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6633 return NULL_TREE;
6634
6635 /* Calculate the result when the argument is a constant. */
6636 if (TREE_CODE (arg) == COMPLEX_CST
6637 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6638 type, mpfr_hypot)))
6639 return res;
6640
6641 if (TREE_CODE (arg) == COMPLEX_EXPR)
6642 {
6643 tree real = TREE_OPERAND (arg, 0);
6644 tree imag = TREE_OPERAND (arg, 1);
6645
6646 /* If either part is zero, cabs is fabs of the other. */
6647 if (real_zerop (real))
6648 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6649 if (real_zerop (imag))
6650 return fold_build1_loc (loc, ABS_EXPR, type, real);
6651
6652 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6653 if (flag_unsafe_math_optimizations
6654 && operand_equal_p (real, imag, OEP_PURE_SAME))
6655 {
6656 const REAL_VALUE_TYPE sqrt2_trunc
6657 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
6658 STRIP_NOPS (real);
6659 return fold_build2_loc (loc, MULT_EXPR, type,
6660 fold_build1_loc (loc, ABS_EXPR, type, real),
6661 build_real (type, sqrt2_trunc));
6662 }
6663 }
6664
6665 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
6666 if (TREE_CODE (arg) == NEGATE_EXPR
6667 || TREE_CODE (arg) == CONJ_EXPR)
6668 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
6669
6670 /* Don't do this when optimizing for size. */
6671 if (flag_unsafe_math_optimizations
6672 && optimize && optimize_function_for_speed_p (cfun))
6673 {
6674 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6675
6676 if (sqrtfn != NULL_TREE)
6677 {
6678 tree rpart, ipart, result;
6679
6680 arg = builtin_save_expr (arg);
6681
6682 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
6683 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
6684
6685 rpart = builtin_save_expr (rpart);
6686 ipart = builtin_save_expr (ipart);
6687
6688 result = fold_build2_loc (loc, PLUS_EXPR, type,
6689 fold_build2_loc (loc, MULT_EXPR, type,
6690 rpart, rpart),
6691 fold_build2_loc (loc, MULT_EXPR, type,
6692 ipart, ipart));
6693
6694 return build_call_expr_loc (loc, sqrtfn, 1, result);
6695 }
6696 }
6697
6698 return NULL_TREE;
6699 }
6700
6701 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
6702 complex tree type of the result. If NEG is true, the imaginary
6703 zero is negative. */
6704
6705 static tree
6706 build_complex_cproj (tree type, bool neg)
6707 {
6708 REAL_VALUE_TYPE rinf, rzero = dconst0;
6709
6710 real_inf (&rinf);
6711 rzero.sign = neg;
6712 return build_complex (type, build_real (TREE_TYPE (type), rinf),
6713 build_real (TREE_TYPE (type), rzero));
6714 }
6715
6716 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
6717 return type. Return NULL_TREE if no simplification can be made. */
6718
6719 static tree
6720 fold_builtin_cproj (location_t loc, tree arg, tree type)
6721 {
6722 if (!validate_arg (arg, COMPLEX_TYPE)
6723 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6724 return NULL_TREE;
6725
6726 /* If there are no infinities, return arg. */
6727 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
6728 return non_lvalue_loc (loc, arg);
6729
6730 /* Calculate the result when the argument is a constant. */
6731 if (TREE_CODE (arg) == COMPLEX_CST)
6732 {
6733 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
6734 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
6735
6736 if (real_isinf (real) || real_isinf (imag))
6737 return build_complex_cproj (type, imag->sign);
6738 else
6739 return arg;
6740 }
6741 else if (TREE_CODE (arg) == COMPLEX_EXPR)
6742 {
6743 tree real = TREE_OPERAND (arg, 0);
6744 tree imag = TREE_OPERAND (arg, 1);
6745
6746 STRIP_NOPS (real);
6747 STRIP_NOPS (imag);
6748
6749 /* If the real part is inf and the imag part is known to be
6750 nonnegative, return (inf + 0i). Remember side-effects are
6751 possible in the imag part. */
6752 if (TREE_CODE (real) == REAL_CST
6753 && real_isinf (TREE_REAL_CST_PTR (real))
6754 && tree_expr_nonnegative_p (imag))
6755 return omit_one_operand_loc (loc, type,
6756 build_complex_cproj (type, false),
6757 arg);
6758
6759 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
6760 Remember side-effects are possible in the real part. */
6761 if (TREE_CODE (imag) == REAL_CST
6762 && real_isinf (TREE_REAL_CST_PTR (imag)))
6763 return
6764 omit_one_operand_loc (loc, type,
6765 build_complex_cproj (type, TREE_REAL_CST_PTR
6766 (imag)->sign), arg);
6767 }
6768
6769 return NULL_TREE;
6770 }
6771
6772 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
6773 Return NULL_TREE if no simplification can be made. */
6774
6775 static tree
6776 fold_builtin_sqrt (location_t loc, tree arg, tree type)
6777 {
6778
6779 enum built_in_function fcode;
6780 tree res;
6781
6782 if (!validate_arg (arg, REAL_TYPE))
6783 return NULL_TREE;
6784
6785 /* Calculate the result when the argument is a constant. */
6786 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
6787 return res;
6788
6789 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6790 fcode = builtin_mathfn_code (arg);
6791 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6792 {
6793 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6794 arg = fold_build2_loc (loc, MULT_EXPR, type,
6795 CALL_EXPR_ARG (arg, 0),
6796 build_real (type, dconsthalf));
6797 return build_call_expr_loc (loc, expfn, 1, arg);
6798 }
6799
6800 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6801 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6802 {
6803 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6804
6805 if (powfn)
6806 {
6807 tree arg0 = CALL_EXPR_ARG (arg, 0);
6808 tree tree_root;
6809 /* The inner root was either sqrt or cbrt. */
6810 /* This was a conditional expression but it triggered a bug
6811 in Sun C 5.5. */
6812 REAL_VALUE_TYPE dconstroot;
6813 if (BUILTIN_SQRT_P (fcode))
6814 dconstroot = dconsthalf;
6815 else
6816 dconstroot = dconst_third ();
6817
6818 /* Adjust for the outer root. */
6819 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6820 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6821 tree_root = build_real (type, dconstroot);
6822 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6823 }
6824 }
6825
6826 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
6827 if (flag_unsafe_math_optimizations
6828 && (fcode == BUILT_IN_POW
6829 || fcode == BUILT_IN_POWF
6830 || fcode == BUILT_IN_POWL))
6831 {
6832 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6833 tree arg0 = CALL_EXPR_ARG (arg, 0);
6834 tree arg1 = CALL_EXPR_ARG (arg, 1);
6835 tree narg1;
6836 if (!tree_expr_nonnegative_p (arg0))
6837 arg0 = build1 (ABS_EXPR, type, arg0);
6838 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
6839 build_real (type, dconsthalf));
6840 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
6841 }
6842
6843 return NULL_TREE;
6844 }
6845
6846 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
6847 Return NULL_TREE if no simplification can be made. */
6848
6849 static tree
6850 fold_builtin_cbrt (location_t loc, tree arg, tree type)
6851 {
6852 const enum built_in_function fcode = builtin_mathfn_code (arg);
6853 tree res;
6854
6855 if (!validate_arg (arg, REAL_TYPE))
6856 return NULL_TREE;
6857
6858 /* Calculate the result when the argument is a constant. */
6859 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
6860 return res;
6861
6862 if (flag_unsafe_math_optimizations)
6863 {
6864 /* Optimize cbrt(expN(x)) -> expN(x/3). */
6865 if (BUILTIN_EXPONENT_P (fcode))
6866 {
6867 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6868 const REAL_VALUE_TYPE third_trunc =
6869 real_value_truncate (TYPE_MODE (type), dconst_third ());
6870 arg = fold_build2_loc (loc, MULT_EXPR, type,
6871 CALL_EXPR_ARG (arg, 0),
6872 build_real (type, third_trunc));
6873 return build_call_expr_loc (loc, expfn, 1, arg);
6874 }
6875
6876 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
6877 if (BUILTIN_SQRT_P (fcode))
6878 {
6879 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6880
6881 if (powfn)
6882 {
6883 tree arg0 = CALL_EXPR_ARG (arg, 0);
6884 tree tree_root;
6885 REAL_VALUE_TYPE dconstroot = dconst_third ();
6886
6887 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6888 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6889 tree_root = build_real (type, dconstroot);
6890 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6891 }
6892 }
6893
6894 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
6895 if (BUILTIN_CBRT_P (fcode))
6896 {
6897 tree arg0 = CALL_EXPR_ARG (arg, 0);
6898 if (tree_expr_nonnegative_p (arg0))
6899 {
6900 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6901
6902 if (powfn)
6903 {
6904 tree tree_root;
6905 REAL_VALUE_TYPE dconstroot;
6906
6907 real_arithmetic (&dconstroot, MULT_EXPR,
6908 dconst_third_ptr (), dconst_third_ptr ());
6909 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6910 tree_root = build_real (type, dconstroot);
6911 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6912 }
6913 }
6914 }
6915
6916 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
6917 if (fcode == BUILT_IN_POW
6918 || fcode == BUILT_IN_POWF
6919 || fcode == BUILT_IN_POWL)
6920 {
6921 tree arg00 = CALL_EXPR_ARG (arg, 0);
6922 tree arg01 = CALL_EXPR_ARG (arg, 1);
6923 if (tree_expr_nonnegative_p (arg00))
6924 {
6925 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6926 const REAL_VALUE_TYPE dconstroot
6927 = real_value_truncate (TYPE_MODE (type), dconst_third ());
6928 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
6929 build_real (type, dconstroot));
6930 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
6931 }
6932 }
6933 }
6934 return NULL_TREE;
6935 }
6936
6937 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
6938 TYPE is the type of the return value. Return NULL_TREE if no
6939 simplification can be made. */
6940
6941 static tree
6942 fold_builtin_cos (location_t loc,
6943 tree arg, tree type, tree fndecl)
6944 {
6945 tree res, narg;
6946
6947 if (!validate_arg (arg, REAL_TYPE))
6948 return NULL_TREE;
6949
6950 /* Calculate the result when the argument is a constant. */
6951 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
6952 return res;
6953
6954 /* Optimize cos(-x) into cos (x). */
6955 if ((narg = fold_strip_sign_ops (arg)))
6956 return build_call_expr_loc (loc, fndecl, 1, narg);
6957
6958 return NULL_TREE;
6959 }
6960
6961 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
6962 Return NULL_TREE if no simplification can be made. */
6963
6964 static tree
6965 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
6966 {
6967 if (validate_arg (arg, REAL_TYPE))
6968 {
6969 tree res, narg;
6970
6971 /* Calculate the result when the argument is a constant. */
6972 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
6973 return res;
6974
6975 /* Optimize cosh(-x) into cosh (x). */
6976 if ((narg = fold_strip_sign_ops (arg)))
6977 return build_call_expr_loc (loc, fndecl, 1, narg);
6978 }
6979
6980 return NULL_TREE;
6981 }
6982
6983 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
6984 argument ARG. TYPE is the type of the return value. Return
6985 NULL_TREE if no simplification can be made. */
6986
6987 static tree
6988 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
6989 bool hyper)
6990 {
6991 if (validate_arg (arg, COMPLEX_TYPE)
6992 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
6993 {
6994 tree tmp;
6995
6996 /* Calculate the result when the argument is a constant. */
6997 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
6998 return tmp;
6999
7000 /* Optimize fn(-x) into fn(x). */
7001 if ((tmp = fold_strip_sign_ops (arg)))
7002 return build_call_expr_loc (loc, fndecl, 1, tmp);
7003 }
7004
7005 return NULL_TREE;
7006 }
7007
7008 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7009 Return NULL_TREE if no simplification can be made. */
7010
7011 static tree
7012 fold_builtin_tan (tree arg, tree type)
7013 {
7014 enum built_in_function fcode;
7015 tree res;
7016
7017 if (!validate_arg (arg, REAL_TYPE))
7018 return NULL_TREE;
7019
7020 /* Calculate the result when the argument is a constant. */
7021 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7022 return res;
7023
7024 /* Optimize tan(atan(x)) = x. */
7025 fcode = builtin_mathfn_code (arg);
7026 if (flag_unsafe_math_optimizations
7027 && (fcode == BUILT_IN_ATAN
7028 || fcode == BUILT_IN_ATANF
7029 || fcode == BUILT_IN_ATANL))
7030 return CALL_EXPR_ARG (arg, 0);
7031
7032 return NULL_TREE;
7033 }
7034
7035 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7036 NULL_TREE if no simplification can be made. */
7037
7038 static tree
7039 fold_builtin_sincos (location_t loc,
7040 tree arg0, tree arg1, tree arg2)
7041 {
7042 tree type;
7043 tree res, fn, call;
7044
7045 if (!validate_arg (arg0, REAL_TYPE)
7046 || !validate_arg (arg1, POINTER_TYPE)
7047 || !validate_arg (arg2, POINTER_TYPE))
7048 return NULL_TREE;
7049
7050 type = TREE_TYPE (arg0);
7051
7052 /* Calculate the result when the argument is a constant. */
7053 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7054 return res;
7055
7056 /* Canonicalize sincos to cexpi. */
7057 if (!TARGET_C99_FUNCTIONS)
7058 return NULL_TREE;
7059 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7060 if (!fn)
7061 return NULL_TREE;
7062
7063 call = build_call_expr_loc (loc, fn, 1, arg0);
7064 call = builtin_save_expr (call);
7065
7066 return build2 (COMPOUND_EXPR, void_type_node,
7067 build2 (MODIFY_EXPR, void_type_node,
7068 build_fold_indirect_ref_loc (loc, arg1),
7069 build1 (IMAGPART_EXPR, type, call)),
7070 build2 (MODIFY_EXPR, void_type_node,
7071 build_fold_indirect_ref_loc (loc, arg2),
7072 build1 (REALPART_EXPR, type, call)));
7073 }
7074
7075 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7076 NULL_TREE if no simplification can be made. */
7077
7078 static tree
7079 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7080 {
7081 tree rtype;
7082 tree realp, imagp, ifn;
7083 tree res;
7084
7085 if (!validate_arg (arg0, COMPLEX_TYPE)
7086 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7087 return NULL_TREE;
7088
7089 /* Calculate the result when the argument is a constant. */
7090 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7091 return res;
7092
7093 rtype = TREE_TYPE (TREE_TYPE (arg0));
7094
7095 /* In case we can figure out the real part of arg0 and it is constant zero
7096 fold to cexpi. */
7097 if (!TARGET_C99_FUNCTIONS)
7098 return NULL_TREE;
7099 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7100 if (!ifn)
7101 return NULL_TREE;
7102
7103 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7104 && real_zerop (realp))
7105 {
7106 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7107 return build_call_expr_loc (loc, ifn, 1, narg);
7108 }
7109
7110 /* In case we can easily decompose real and imaginary parts split cexp
7111 to exp (r) * cexpi (i). */
7112 if (flag_unsafe_math_optimizations
7113 && realp)
7114 {
7115 tree rfn, rcall, icall;
7116
7117 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7118 if (!rfn)
7119 return NULL_TREE;
7120
7121 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7122 if (!imagp)
7123 return NULL_TREE;
7124
7125 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7126 icall = builtin_save_expr (icall);
7127 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7128 rcall = builtin_save_expr (rcall);
7129 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7130 fold_build2_loc (loc, MULT_EXPR, rtype,
7131 rcall,
7132 fold_build1_loc (loc, REALPART_EXPR,
7133 rtype, icall)),
7134 fold_build2_loc (loc, MULT_EXPR, rtype,
7135 rcall,
7136 fold_build1_loc (loc, IMAGPART_EXPR,
7137 rtype, icall)));
7138 }
7139
7140 return NULL_TREE;
7141 }
7142
7143 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7144 Return NULL_TREE if no simplification can be made. */
7145
7146 static tree
7147 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7148 {
7149 if (!validate_arg (arg, REAL_TYPE))
7150 return NULL_TREE;
7151
7152 /* Optimize trunc of constant value. */
7153 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7154 {
7155 REAL_VALUE_TYPE r, x;
7156 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7157
7158 x = TREE_REAL_CST (arg);
7159 real_trunc (&r, TYPE_MODE (type), &x);
7160 return build_real (type, r);
7161 }
7162
7163 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7164 }
7165
7166 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7167 Return NULL_TREE if no simplification can be made. */
7168
7169 static tree
7170 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7171 {
7172 if (!validate_arg (arg, REAL_TYPE))
7173 return NULL_TREE;
7174
7175 /* Optimize floor of constant value. */
7176 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7177 {
7178 REAL_VALUE_TYPE x;
7179
7180 x = TREE_REAL_CST (arg);
7181 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7182 {
7183 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7184 REAL_VALUE_TYPE r;
7185
7186 real_floor (&r, TYPE_MODE (type), &x);
7187 return build_real (type, r);
7188 }
7189 }
7190
7191 /* Fold floor (x) where x is nonnegative to trunc (x). */
7192 if (tree_expr_nonnegative_p (arg))
7193 {
7194 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7195 if (truncfn)
7196 return build_call_expr_loc (loc, truncfn, 1, arg);
7197 }
7198
7199 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7200 }
7201
7202 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7203 Return NULL_TREE if no simplification can be made. */
7204
7205 static tree
7206 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7207 {
7208 if (!validate_arg (arg, REAL_TYPE))
7209 return NULL_TREE;
7210
7211 /* Optimize ceil of constant value. */
7212 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7213 {
7214 REAL_VALUE_TYPE x;
7215
7216 x = TREE_REAL_CST (arg);
7217 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7218 {
7219 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7220 REAL_VALUE_TYPE r;
7221
7222 real_ceil (&r, TYPE_MODE (type), &x);
7223 return build_real (type, r);
7224 }
7225 }
7226
7227 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7228 }
7229
7230 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7231 Return NULL_TREE if no simplification can be made. */
7232
7233 static tree
7234 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7235 {
7236 if (!validate_arg (arg, REAL_TYPE))
7237 return NULL_TREE;
7238
7239 /* Optimize round of constant value. */
7240 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7241 {
7242 REAL_VALUE_TYPE x;
7243
7244 x = TREE_REAL_CST (arg);
7245 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7246 {
7247 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7248 REAL_VALUE_TYPE r;
7249
7250 real_round (&r, TYPE_MODE (type), &x);
7251 return build_real (type, r);
7252 }
7253 }
7254
7255 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7256 }
7257
7258 /* Fold function call to builtin lround, lroundf or lroundl (or the
7259 corresponding long long versions) and other rounding functions. ARG
7260 is the argument to the call. Return NULL_TREE if no simplification
7261 can be made. */
7262
7263 static tree
7264 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7265 {
7266 if (!validate_arg (arg, REAL_TYPE))
7267 return NULL_TREE;
7268
7269 /* Optimize lround of constant value. */
7270 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7271 {
7272 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7273
7274 if (real_isfinite (&x))
7275 {
7276 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7277 tree ftype = TREE_TYPE (arg);
7278 double_int val;
7279 REAL_VALUE_TYPE r;
7280
7281 switch (DECL_FUNCTION_CODE (fndecl))
7282 {
7283 CASE_FLT_FN (BUILT_IN_IFLOOR):
7284 CASE_FLT_FN (BUILT_IN_LFLOOR):
7285 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7286 real_floor (&r, TYPE_MODE (ftype), &x);
7287 break;
7288
7289 CASE_FLT_FN (BUILT_IN_ICEIL):
7290 CASE_FLT_FN (BUILT_IN_LCEIL):
7291 CASE_FLT_FN (BUILT_IN_LLCEIL):
7292 real_ceil (&r, TYPE_MODE (ftype), &x);
7293 break;
7294
7295 CASE_FLT_FN (BUILT_IN_IROUND):
7296 CASE_FLT_FN (BUILT_IN_LROUND):
7297 CASE_FLT_FN (BUILT_IN_LLROUND):
7298 real_round (&r, TYPE_MODE (ftype), &x);
7299 break;
7300
7301 default:
7302 gcc_unreachable ();
7303 }
7304
7305 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7306 if (double_int_fits_to_tree_p (itype, val))
7307 return double_int_to_tree (itype, val);
7308 }
7309 }
7310
7311 switch (DECL_FUNCTION_CODE (fndecl))
7312 {
7313 CASE_FLT_FN (BUILT_IN_LFLOOR):
7314 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7315 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7316 if (tree_expr_nonnegative_p (arg))
7317 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7318 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7319 break;
7320 default:;
7321 }
7322
7323 return fold_fixed_mathfn (loc, fndecl, arg);
7324 }
7325
7326 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7327 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7328 the argument to the call. Return NULL_TREE if no simplification can
7329 be made. */
7330
7331 static tree
7332 fold_builtin_bitop (tree fndecl, tree arg)
7333 {
7334 if (!validate_arg (arg, INTEGER_TYPE))
7335 return NULL_TREE;
7336
7337 /* Optimize for constant argument. */
7338 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7339 {
7340 HOST_WIDE_INT hi, width, result;
7341 unsigned HOST_WIDE_INT lo;
7342 tree type;
7343
7344 type = TREE_TYPE (arg);
7345 width = TYPE_PRECISION (type);
7346 lo = TREE_INT_CST_LOW (arg);
7347
7348 /* Clear all the bits that are beyond the type's precision. */
7349 if (width > HOST_BITS_PER_WIDE_INT)
7350 {
7351 hi = TREE_INT_CST_HIGH (arg);
7352 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7353 hi &= ~((unsigned HOST_WIDE_INT) (-1)
7354 << (width - HOST_BITS_PER_WIDE_INT));
7355 }
7356 else
7357 {
7358 hi = 0;
7359 if (width < HOST_BITS_PER_WIDE_INT)
7360 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7361 }
7362
7363 switch (DECL_FUNCTION_CODE (fndecl))
7364 {
7365 CASE_INT_FN (BUILT_IN_FFS):
7366 if (lo != 0)
7367 result = ffs_hwi (lo);
7368 else if (hi != 0)
7369 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
7370 else
7371 result = 0;
7372 break;
7373
7374 CASE_INT_FN (BUILT_IN_CLZ):
7375 if (hi != 0)
7376 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7377 else if (lo != 0)
7378 result = width - floor_log2 (lo) - 1;
7379 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7380 result = width;
7381 break;
7382
7383 CASE_INT_FN (BUILT_IN_CTZ):
7384 if (lo != 0)
7385 result = ctz_hwi (lo);
7386 else if (hi != 0)
7387 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
7388 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7389 result = width;
7390 break;
7391
7392 CASE_INT_FN (BUILT_IN_CLRSB):
7393 if (width > HOST_BITS_PER_WIDE_INT
7394 && (hi & ((unsigned HOST_WIDE_INT) 1
7395 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
7396 {
7397 hi = ~hi & ~((unsigned HOST_WIDE_INT) (-1)
7398 << (width - HOST_BITS_PER_WIDE_INT - 1));
7399 lo = ~lo;
7400 }
7401 else if (width <= HOST_BITS_PER_WIDE_INT
7402 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
7403 lo = ~lo & ~((unsigned HOST_WIDE_INT) (-1) << (width - 1));
7404 if (hi != 0)
7405 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
7406 else if (lo != 0)
7407 result = width - floor_log2 (lo) - 2;
7408 else
7409 result = width - 1;
7410 break;
7411
7412 CASE_INT_FN (BUILT_IN_POPCOUNT):
7413 result = 0;
7414 while (lo)
7415 result++, lo &= lo - 1;
7416 while (hi)
7417 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7418 break;
7419
7420 CASE_INT_FN (BUILT_IN_PARITY):
7421 result = 0;
7422 while (lo)
7423 result++, lo &= lo - 1;
7424 while (hi)
7425 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7426 result &= 1;
7427 break;
7428
7429 default:
7430 gcc_unreachable ();
7431 }
7432
7433 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7434 }
7435
7436 return NULL_TREE;
7437 }
7438
7439 /* Fold function call to builtin_bswap and the long and long long
7440 variants. Return NULL_TREE if no simplification can be made. */
7441 static tree
7442 fold_builtin_bswap (tree fndecl, tree arg)
7443 {
7444 if (! validate_arg (arg, INTEGER_TYPE))
7445 return NULL_TREE;
7446
7447 /* Optimize constant value. */
7448 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7449 {
7450 HOST_WIDE_INT hi, width, r_hi = 0;
7451 unsigned HOST_WIDE_INT lo, r_lo = 0;
7452 tree type;
7453
7454 type = TREE_TYPE (arg);
7455 width = TYPE_PRECISION (type);
7456 lo = TREE_INT_CST_LOW (arg);
7457 hi = TREE_INT_CST_HIGH (arg);
7458
7459 switch (DECL_FUNCTION_CODE (fndecl))
7460 {
7461 case BUILT_IN_BSWAP32:
7462 case BUILT_IN_BSWAP64:
7463 {
7464 int s;
7465
7466 for (s = 0; s < width; s += 8)
7467 {
7468 int d = width - s - 8;
7469 unsigned HOST_WIDE_INT byte;
7470
7471 if (s < HOST_BITS_PER_WIDE_INT)
7472 byte = (lo >> s) & 0xff;
7473 else
7474 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7475
7476 if (d < HOST_BITS_PER_WIDE_INT)
7477 r_lo |= byte << d;
7478 else
7479 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7480 }
7481 }
7482
7483 break;
7484
7485 default:
7486 gcc_unreachable ();
7487 }
7488
7489 if (width < HOST_BITS_PER_WIDE_INT)
7490 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7491 else
7492 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7493 }
7494
7495 return NULL_TREE;
7496 }
7497
7498 /* A subroutine of fold_builtin to fold the various logarithmic
7499 functions. Return NULL_TREE if no simplification can me made.
7500 FUNC is the corresponding MPFR logarithm function. */
7501
7502 static tree
7503 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7504 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7505 {
7506 if (validate_arg (arg, REAL_TYPE))
7507 {
7508 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7509 tree res;
7510 const enum built_in_function fcode = builtin_mathfn_code (arg);
7511
7512 /* Calculate the result when the argument is a constant. */
7513 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7514 return res;
7515
7516 /* Special case, optimize logN(expN(x)) = x. */
7517 if (flag_unsafe_math_optimizations
7518 && ((func == mpfr_log
7519 && (fcode == BUILT_IN_EXP
7520 || fcode == BUILT_IN_EXPF
7521 || fcode == BUILT_IN_EXPL))
7522 || (func == mpfr_log2
7523 && (fcode == BUILT_IN_EXP2
7524 || fcode == BUILT_IN_EXP2F
7525 || fcode == BUILT_IN_EXP2L))
7526 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7527 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7528
7529 /* Optimize logN(func()) for various exponential functions. We
7530 want to determine the value "x" and the power "exponent" in
7531 order to transform logN(x**exponent) into exponent*logN(x). */
7532 if (flag_unsafe_math_optimizations)
7533 {
7534 tree exponent = 0, x = 0;
7535
7536 switch (fcode)
7537 {
7538 CASE_FLT_FN (BUILT_IN_EXP):
7539 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7540 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7541 dconst_e ()));
7542 exponent = CALL_EXPR_ARG (arg, 0);
7543 break;
7544 CASE_FLT_FN (BUILT_IN_EXP2):
7545 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7546 x = build_real (type, dconst2);
7547 exponent = CALL_EXPR_ARG (arg, 0);
7548 break;
7549 CASE_FLT_FN (BUILT_IN_EXP10):
7550 CASE_FLT_FN (BUILT_IN_POW10):
7551 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7552 {
7553 REAL_VALUE_TYPE dconst10;
7554 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7555 x = build_real (type, dconst10);
7556 }
7557 exponent = CALL_EXPR_ARG (arg, 0);
7558 break;
7559 CASE_FLT_FN (BUILT_IN_SQRT):
7560 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7561 x = CALL_EXPR_ARG (arg, 0);
7562 exponent = build_real (type, dconsthalf);
7563 break;
7564 CASE_FLT_FN (BUILT_IN_CBRT):
7565 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7566 x = CALL_EXPR_ARG (arg, 0);
7567 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7568 dconst_third ()));
7569 break;
7570 CASE_FLT_FN (BUILT_IN_POW):
7571 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7572 x = CALL_EXPR_ARG (arg, 0);
7573 exponent = CALL_EXPR_ARG (arg, 1);
7574 break;
7575 default:
7576 break;
7577 }
7578
7579 /* Now perform the optimization. */
7580 if (x && exponent)
7581 {
7582 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7583 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7584 }
7585 }
7586 }
7587
7588 return NULL_TREE;
7589 }
7590
7591 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7592 NULL_TREE if no simplification can be made. */
7593
7594 static tree
7595 fold_builtin_hypot (location_t loc, tree fndecl,
7596 tree arg0, tree arg1, tree type)
7597 {
7598 tree res, narg0, narg1;
7599
7600 if (!validate_arg (arg0, REAL_TYPE)
7601 || !validate_arg (arg1, REAL_TYPE))
7602 return NULL_TREE;
7603
7604 /* Calculate the result when the argument is a constant. */
7605 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7606 return res;
7607
7608 /* If either argument to hypot has a negate or abs, strip that off.
7609 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7610 narg0 = fold_strip_sign_ops (arg0);
7611 narg1 = fold_strip_sign_ops (arg1);
7612 if (narg0 || narg1)
7613 {
7614 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7615 narg1 ? narg1 : arg1);
7616 }
7617
7618 /* If either argument is zero, hypot is fabs of the other. */
7619 if (real_zerop (arg0))
7620 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7621 else if (real_zerop (arg1))
7622 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7623
7624 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7625 if (flag_unsafe_math_optimizations
7626 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7627 {
7628 const REAL_VALUE_TYPE sqrt2_trunc
7629 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7630 return fold_build2_loc (loc, MULT_EXPR, type,
7631 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7632 build_real (type, sqrt2_trunc));
7633 }
7634
7635 return NULL_TREE;
7636 }
7637
7638
7639 /* Fold a builtin function call to pow, powf, or powl. Return
7640 NULL_TREE if no simplification can be made. */
7641 static tree
7642 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7643 {
7644 tree res;
7645
7646 if (!validate_arg (arg0, REAL_TYPE)
7647 || !validate_arg (arg1, REAL_TYPE))
7648 return NULL_TREE;
7649
7650 /* Calculate the result when the argument is a constant. */
7651 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7652 return res;
7653
7654 /* Optimize pow(1.0,y) = 1.0. */
7655 if (real_onep (arg0))
7656 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7657
7658 if (TREE_CODE (arg1) == REAL_CST
7659 && !TREE_OVERFLOW (arg1))
7660 {
7661 REAL_VALUE_TYPE cint;
7662 REAL_VALUE_TYPE c;
7663 HOST_WIDE_INT n;
7664
7665 c = TREE_REAL_CST (arg1);
7666
7667 /* Optimize pow(x,0.0) = 1.0. */
7668 if (REAL_VALUES_EQUAL (c, dconst0))
7669 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7670 arg0);
7671
7672 /* Optimize pow(x,1.0) = x. */
7673 if (REAL_VALUES_EQUAL (c, dconst1))
7674 return arg0;
7675
7676 /* Optimize pow(x,-1.0) = 1.0/x. */
7677 if (REAL_VALUES_EQUAL (c, dconstm1))
7678 return fold_build2_loc (loc, RDIV_EXPR, type,
7679 build_real (type, dconst1), arg0);
7680
7681 /* Optimize pow(x,0.5) = sqrt(x). */
7682 if (flag_unsafe_math_optimizations
7683 && REAL_VALUES_EQUAL (c, dconsthalf))
7684 {
7685 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7686
7687 if (sqrtfn != NULL_TREE)
7688 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7689 }
7690
7691 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7692 if (flag_unsafe_math_optimizations)
7693 {
7694 const REAL_VALUE_TYPE dconstroot
7695 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7696
7697 if (REAL_VALUES_EQUAL (c, dconstroot))
7698 {
7699 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7700 if (cbrtfn != NULL_TREE)
7701 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7702 }
7703 }
7704
7705 /* Check for an integer exponent. */
7706 n = real_to_integer (&c);
7707 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7708 if (real_identical (&c, &cint))
7709 {
7710 /* Attempt to evaluate pow at compile-time, unless this should
7711 raise an exception. */
7712 if (TREE_CODE (arg0) == REAL_CST
7713 && !TREE_OVERFLOW (arg0)
7714 && (n > 0
7715 || (!flag_trapping_math && !flag_errno_math)
7716 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
7717 {
7718 REAL_VALUE_TYPE x;
7719 bool inexact;
7720
7721 x = TREE_REAL_CST (arg0);
7722 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7723 if (flag_unsafe_math_optimizations || !inexact)
7724 return build_real (type, x);
7725 }
7726
7727 /* Strip sign ops from even integer powers. */
7728 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7729 {
7730 tree narg0 = fold_strip_sign_ops (arg0);
7731 if (narg0)
7732 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
7733 }
7734 }
7735 }
7736
7737 if (flag_unsafe_math_optimizations)
7738 {
7739 const enum built_in_function fcode = builtin_mathfn_code (arg0);
7740
7741 /* Optimize pow(expN(x),y) = expN(x*y). */
7742 if (BUILTIN_EXPONENT_P (fcode))
7743 {
7744 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7745 tree arg = CALL_EXPR_ARG (arg0, 0);
7746 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7747 return build_call_expr_loc (loc, expfn, 1, arg);
7748 }
7749
7750 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7751 if (BUILTIN_SQRT_P (fcode))
7752 {
7753 tree narg0 = CALL_EXPR_ARG (arg0, 0);
7754 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7755 build_real (type, dconsthalf));
7756 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
7757 }
7758
7759 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7760 if (BUILTIN_CBRT_P (fcode))
7761 {
7762 tree arg = CALL_EXPR_ARG (arg0, 0);
7763 if (tree_expr_nonnegative_p (arg))
7764 {
7765 const REAL_VALUE_TYPE dconstroot
7766 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7767 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7768 build_real (type, dconstroot));
7769 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
7770 }
7771 }
7772
7773 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
7774 if (fcode == BUILT_IN_POW
7775 || fcode == BUILT_IN_POWF
7776 || fcode == BUILT_IN_POWL)
7777 {
7778 tree arg00 = CALL_EXPR_ARG (arg0, 0);
7779 if (tree_expr_nonnegative_p (arg00))
7780 {
7781 tree arg01 = CALL_EXPR_ARG (arg0, 1);
7782 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7783 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7784 }
7785 }
7786 }
7787
7788 return NULL_TREE;
7789 }
7790
7791 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
7792 Return NULL_TREE if no simplification can be made. */
7793 static tree
7794 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
7795 tree arg0, tree arg1, tree type)
7796 {
7797 if (!validate_arg (arg0, REAL_TYPE)
7798 || !validate_arg (arg1, INTEGER_TYPE))
7799 return NULL_TREE;
7800
7801 /* Optimize pow(1.0,y) = 1.0. */
7802 if (real_onep (arg0))
7803 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7804
7805 if (host_integerp (arg1, 0))
7806 {
7807 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
7808
7809 /* Evaluate powi at compile-time. */
7810 if (TREE_CODE (arg0) == REAL_CST
7811 && !TREE_OVERFLOW (arg0))
7812 {
7813 REAL_VALUE_TYPE x;
7814 x = TREE_REAL_CST (arg0);
7815 real_powi (&x, TYPE_MODE (type), &x, c);
7816 return build_real (type, x);
7817 }
7818
7819 /* Optimize pow(x,0) = 1.0. */
7820 if (c == 0)
7821 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7822 arg0);
7823
7824 /* Optimize pow(x,1) = x. */
7825 if (c == 1)
7826 return arg0;
7827
7828 /* Optimize pow(x,-1) = 1.0/x. */
7829 if (c == -1)
7830 return fold_build2_loc (loc, RDIV_EXPR, type,
7831 build_real (type, dconst1), arg0);
7832 }
7833
7834 return NULL_TREE;
7835 }
7836
7837 /* A subroutine of fold_builtin to fold the various exponent
7838 functions. Return NULL_TREE if no simplification can be made.
7839 FUNC is the corresponding MPFR exponent function. */
7840
7841 static tree
7842 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
7843 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7844 {
7845 if (validate_arg (arg, REAL_TYPE))
7846 {
7847 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7848 tree res;
7849
7850 /* Calculate the result when the argument is a constant. */
7851 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
7852 return res;
7853
7854 /* Optimize expN(logN(x)) = x. */
7855 if (flag_unsafe_math_optimizations)
7856 {
7857 const enum built_in_function fcode = builtin_mathfn_code (arg);
7858
7859 if ((func == mpfr_exp
7860 && (fcode == BUILT_IN_LOG
7861 || fcode == BUILT_IN_LOGF
7862 || fcode == BUILT_IN_LOGL))
7863 || (func == mpfr_exp2
7864 && (fcode == BUILT_IN_LOG2
7865 || fcode == BUILT_IN_LOG2F
7866 || fcode == BUILT_IN_LOG2L))
7867 || (func == mpfr_exp10
7868 && (fcode == BUILT_IN_LOG10
7869 || fcode == BUILT_IN_LOG10F
7870 || fcode == BUILT_IN_LOG10L)))
7871 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7872 }
7873 }
7874
7875 return NULL_TREE;
7876 }
7877
7878 /* Return true if VAR is a VAR_DECL or a component thereof. */
7879
7880 static bool
7881 var_decl_component_p (tree var)
7882 {
7883 tree inner = var;
7884 while (handled_component_p (inner))
7885 inner = TREE_OPERAND (inner, 0);
7886 return SSA_VAR_P (inner);
7887 }
7888
7889 /* Fold function call to builtin memset. Return
7890 NULL_TREE if no simplification can be made. */
7891
7892 static tree
7893 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
7894 tree type, bool ignore)
7895 {
7896 tree var, ret, etype;
7897 unsigned HOST_WIDE_INT length, cval;
7898
7899 if (! validate_arg (dest, POINTER_TYPE)
7900 || ! validate_arg (c, INTEGER_TYPE)
7901 || ! validate_arg (len, INTEGER_TYPE))
7902 return NULL_TREE;
7903
7904 if (! host_integerp (len, 1))
7905 return NULL_TREE;
7906
7907 /* If the LEN parameter is zero, return DEST. */
7908 if (integer_zerop (len))
7909 return omit_one_operand_loc (loc, type, dest, c);
7910
7911 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
7912 return NULL_TREE;
7913
7914 var = dest;
7915 STRIP_NOPS (var);
7916 if (TREE_CODE (var) != ADDR_EXPR)
7917 return NULL_TREE;
7918
7919 var = TREE_OPERAND (var, 0);
7920 if (TREE_THIS_VOLATILE (var))
7921 return NULL_TREE;
7922
7923 etype = TREE_TYPE (var);
7924 if (TREE_CODE (etype) == ARRAY_TYPE)
7925 etype = TREE_TYPE (etype);
7926
7927 if (!INTEGRAL_TYPE_P (etype)
7928 && !POINTER_TYPE_P (etype))
7929 return NULL_TREE;
7930
7931 if (! var_decl_component_p (var))
7932 return NULL_TREE;
7933
7934 length = tree_low_cst (len, 1);
7935 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
7936 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
7937 return NULL_TREE;
7938
7939 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
7940 return NULL_TREE;
7941
7942 if (integer_zerop (c))
7943 cval = 0;
7944 else
7945 {
7946 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
7947 return NULL_TREE;
7948
7949 cval = TREE_INT_CST_LOW (c);
7950 cval &= 0xff;
7951 cval |= cval << 8;
7952 cval |= cval << 16;
7953 cval |= (cval << 31) << 1;
7954 }
7955
7956 ret = build_int_cst_type (etype, cval);
7957 var = build_fold_indirect_ref_loc (loc,
7958 fold_convert_loc (loc,
7959 build_pointer_type (etype),
7960 dest));
7961 ret = build2 (MODIFY_EXPR, etype, var, ret);
7962 if (ignore)
7963 return ret;
7964
7965 return omit_one_operand_loc (loc, type, dest, ret);
7966 }
7967
7968 /* Fold function call to builtin memset. Return
7969 NULL_TREE if no simplification can be made. */
7970
7971 static tree
7972 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
7973 {
7974 if (! validate_arg (dest, POINTER_TYPE)
7975 || ! validate_arg (size, INTEGER_TYPE))
7976 return NULL_TREE;
7977
7978 if (!ignore)
7979 return NULL_TREE;
7980
7981 /* New argument list transforming bzero(ptr x, int y) to
7982 memset(ptr x, int 0, size_t y). This is done this way
7983 so that if it isn't expanded inline, we fallback to
7984 calling bzero instead of memset. */
7985
7986 return fold_builtin_memset (loc, dest, integer_zero_node,
7987 fold_convert_loc (loc, size_type_node, size),
7988 void_type_node, ignore);
7989 }
7990
7991 /* Fold function call to builtin mem{{,p}cpy,move}. Return
7992 NULL_TREE if no simplification can be made.
7993 If ENDP is 0, return DEST (like memcpy).
7994 If ENDP is 1, return DEST+LEN (like mempcpy).
7995 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
7996 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
7997 (memmove). */
7998
7999 static tree
8000 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8001 tree len, tree type, bool ignore, int endp)
8002 {
8003 tree destvar, srcvar, expr;
8004
8005 if (! validate_arg (dest, POINTER_TYPE)
8006 || ! validate_arg (src, POINTER_TYPE)
8007 || ! validate_arg (len, INTEGER_TYPE))
8008 return NULL_TREE;
8009
8010 /* If the LEN parameter is zero, return DEST. */
8011 if (integer_zerop (len))
8012 return omit_one_operand_loc (loc, type, dest, src);
8013
8014 /* If SRC and DEST are the same (and not volatile), return
8015 DEST{,+LEN,+LEN-1}. */
8016 if (operand_equal_p (src, dest, 0))
8017 expr = len;
8018 else
8019 {
8020 tree srctype, desttype;
8021 unsigned int src_align, dest_align;
8022 tree off0;
8023
8024 if (endp == 3)
8025 {
8026 src_align = get_pointer_alignment (src);
8027 dest_align = get_pointer_alignment (dest);
8028
8029 /* Both DEST and SRC must be pointer types.
8030 ??? This is what old code did. Is the testing for pointer types
8031 really mandatory?
8032
8033 If either SRC is readonly or length is 1, we can use memcpy. */
8034 if (!dest_align || !src_align)
8035 return NULL_TREE;
8036 if (readonly_data_expr (src)
8037 || (host_integerp (len, 1)
8038 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8039 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8040 {
8041 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8042 if (!fn)
8043 return NULL_TREE;
8044 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8045 }
8046
8047 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8048 if (TREE_CODE (src) == ADDR_EXPR
8049 && TREE_CODE (dest) == ADDR_EXPR)
8050 {
8051 tree src_base, dest_base, fn;
8052 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8053 HOST_WIDE_INT size = -1;
8054 HOST_WIDE_INT maxsize = -1;
8055
8056 srcvar = TREE_OPERAND (src, 0);
8057 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8058 &size, &maxsize);
8059 destvar = TREE_OPERAND (dest, 0);
8060 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8061 &size, &maxsize);
8062 if (host_integerp (len, 1))
8063 maxsize = tree_low_cst (len, 1);
8064 else
8065 maxsize = -1;
8066 src_offset /= BITS_PER_UNIT;
8067 dest_offset /= BITS_PER_UNIT;
8068 if (SSA_VAR_P (src_base)
8069 && SSA_VAR_P (dest_base))
8070 {
8071 if (operand_equal_p (src_base, dest_base, 0)
8072 && ranges_overlap_p (src_offset, maxsize,
8073 dest_offset, maxsize))
8074 return NULL_TREE;
8075 }
8076 else if (TREE_CODE (src_base) == MEM_REF
8077 && TREE_CODE (dest_base) == MEM_REF)
8078 {
8079 double_int off;
8080 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8081 TREE_OPERAND (dest_base, 0), 0))
8082 return NULL_TREE;
8083 off = double_int_add (mem_ref_offset (src_base),
8084 shwi_to_double_int (src_offset));
8085 if (!double_int_fits_in_shwi_p (off))
8086 return NULL_TREE;
8087 src_offset = off.low;
8088 off = double_int_add (mem_ref_offset (dest_base),
8089 shwi_to_double_int (dest_offset));
8090 if (!double_int_fits_in_shwi_p (off))
8091 return NULL_TREE;
8092 dest_offset = off.low;
8093 if (ranges_overlap_p (src_offset, maxsize,
8094 dest_offset, maxsize))
8095 return NULL_TREE;
8096 }
8097 else
8098 return NULL_TREE;
8099
8100 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8101 if (!fn)
8102 return NULL_TREE;
8103 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8104 }
8105
8106 /* If the destination and source do not alias optimize into
8107 memcpy as well. */
8108 if ((is_gimple_min_invariant (dest)
8109 || TREE_CODE (dest) == SSA_NAME)
8110 && (is_gimple_min_invariant (src)
8111 || TREE_CODE (src) == SSA_NAME))
8112 {
8113 ao_ref destr, srcr;
8114 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8115 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8116 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8117 {
8118 tree fn;
8119 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8120 if (!fn)
8121 return NULL_TREE;
8122 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8123 }
8124 }
8125
8126 return NULL_TREE;
8127 }
8128
8129 if (!host_integerp (len, 0))
8130 return NULL_TREE;
8131 /* FIXME:
8132 This logic lose for arguments like (type *)malloc (sizeof (type)),
8133 since we strip the casts of up to VOID return value from malloc.
8134 Perhaps we ought to inherit type from non-VOID argument here? */
8135 STRIP_NOPS (src);
8136 STRIP_NOPS (dest);
8137 if (!POINTER_TYPE_P (TREE_TYPE (src))
8138 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8139 return NULL_TREE;
8140 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8141 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8142 {
8143 tree tem = TREE_OPERAND (src, 0);
8144 STRIP_NOPS (tem);
8145 if (tem != TREE_OPERAND (src, 0))
8146 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8147 }
8148 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8149 {
8150 tree tem = TREE_OPERAND (dest, 0);
8151 STRIP_NOPS (tem);
8152 if (tem != TREE_OPERAND (dest, 0))
8153 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8154 }
8155 srctype = TREE_TYPE (TREE_TYPE (src));
8156 if (TREE_CODE (srctype) == ARRAY_TYPE
8157 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8158 {
8159 srctype = TREE_TYPE (srctype);
8160 STRIP_NOPS (src);
8161 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8162 }
8163 desttype = TREE_TYPE (TREE_TYPE (dest));
8164 if (TREE_CODE (desttype) == ARRAY_TYPE
8165 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8166 {
8167 desttype = TREE_TYPE (desttype);
8168 STRIP_NOPS (dest);
8169 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8170 }
8171 if (TREE_ADDRESSABLE (srctype)
8172 || TREE_ADDRESSABLE (desttype))
8173 return NULL_TREE;
8174
8175 src_align = get_pointer_alignment (src);
8176 dest_align = get_pointer_alignment (dest);
8177 if (dest_align < TYPE_ALIGN (desttype)
8178 || src_align < TYPE_ALIGN (srctype))
8179 return NULL_TREE;
8180
8181 if (!ignore)
8182 dest = builtin_save_expr (dest);
8183
8184 /* Build accesses at offset zero with a ref-all character type. */
8185 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8186 ptr_mode, true), 0);
8187
8188 destvar = dest;
8189 STRIP_NOPS (destvar);
8190 if (TREE_CODE (destvar) == ADDR_EXPR
8191 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8192 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8193 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8194 else
8195 destvar = NULL_TREE;
8196
8197 srcvar = src;
8198 STRIP_NOPS (srcvar);
8199 if (TREE_CODE (srcvar) == ADDR_EXPR
8200 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8201 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8202 {
8203 if (!destvar
8204 || src_align >= TYPE_ALIGN (desttype))
8205 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8206 srcvar, off0);
8207 else if (!STRICT_ALIGNMENT)
8208 {
8209 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8210 src_align);
8211 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8212 }
8213 else
8214 srcvar = NULL_TREE;
8215 }
8216 else
8217 srcvar = NULL_TREE;
8218
8219 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8220 return NULL_TREE;
8221
8222 if (srcvar == NULL_TREE)
8223 {
8224 STRIP_NOPS (src);
8225 if (src_align >= TYPE_ALIGN (desttype))
8226 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8227 else
8228 {
8229 if (STRICT_ALIGNMENT)
8230 return NULL_TREE;
8231 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8232 src_align);
8233 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8234 }
8235 }
8236 else if (destvar == NULL_TREE)
8237 {
8238 STRIP_NOPS (dest);
8239 if (dest_align >= TYPE_ALIGN (srctype))
8240 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8241 else
8242 {
8243 if (STRICT_ALIGNMENT)
8244 return NULL_TREE;
8245 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8246 dest_align);
8247 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8248 }
8249 }
8250
8251 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8252 }
8253
8254 if (ignore)
8255 return expr;
8256
8257 if (endp == 0 || endp == 3)
8258 return omit_one_operand_loc (loc, type, dest, expr);
8259
8260 if (expr == len)
8261 expr = NULL_TREE;
8262
8263 if (endp == 2)
8264 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8265 ssize_int (1));
8266
8267 dest = fold_build_pointer_plus_loc (loc, dest, len);
8268 dest = fold_convert_loc (loc, type, dest);
8269 if (expr)
8270 dest = omit_one_operand_loc (loc, type, dest, expr);
8271 return dest;
8272 }
8273
8274 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8275 If LEN is not NULL, it represents the length of the string to be
8276 copied. Return NULL_TREE if no simplification can be made. */
8277
8278 tree
8279 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8280 {
8281 tree fn;
8282
8283 if (!validate_arg (dest, POINTER_TYPE)
8284 || !validate_arg (src, POINTER_TYPE))
8285 return NULL_TREE;
8286
8287 /* If SRC and DEST are the same (and not volatile), return DEST. */
8288 if (operand_equal_p (src, dest, 0))
8289 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8290
8291 if (optimize_function_for_size_p (cfun))
8292 return NULL_TREE;
8293
8294 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8295 if (!fn)
8296 return NULL_TREE;
8297
8298 if (!len)
8299 {
8300 len = c_strlen (src, 1);
8301 if (! len || TREE_SIDE_EFFECTS (len))
8302 return NULL_TREE;
8303 }
8304
8305 len = fold_convert_loc (loc, size_type_node, len);
8306 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
8307 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8308 build_call_expr_loc (loc, fn, 3, dest, src, len));
8309 }
8310
8311 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8312 Return NULL_TREE if no simplification can be made. */
8313
8314 static tree
8315 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8316 {
8317 tree fn, len, lenp1, call, type;
8318
8319 if (!validate_arg (dest, POINTER_TYPE)
8320 || !validate_arg (src, POINTER_TYPE))
8321 return NULL_TREE;
8322
8323 len = c_strlen (src, 1);
8324 if (!len
8325 || TREE_CODE (len) != INTEGER_CST)
8326 return NULL_TREE;
8327
8328 if (optimize_function_for_size_p (cfun)
8329 /* If length is zero it's small enough. */
8330 && !integer_zerop (len))
8331 return NULL_TREE;
8332
8333 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8334 if (!fn)
8335 return NULL_TREE;
8336
8337 lenp1 = size_binop_loc (loc, PLUS_EXPR,
8338 fold_convert_loc (loc, size_type_node, len),
8339 build_int_cst (size_type_node, 1));
8340 /* We use dest twice in building our expression. Save it from
8341 multiple expansions. */
8342 dest = builtin_save_expr (dest);
8343 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8344
8345 type = TREE_TYPE (TREE_TYPE (fndecl));
8346 dest = fold_build_pointer_plus_loc (loc, dest, len);
8347 dest = fold_convert_loc (loc, type, dest);
8348 dest = omit_one_operand_loc (loc, type, dest, call);
8349 return dest;
8350 }
8351
8352 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8353 If SLEN is not NULL, it represents the length of the source string.
8354 Return NULL_TREE if no simplification can be made. */
8355
8356 tree
8357 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8358 tree src, tree len, tree slen)
8359 {
8360 tree fn;
8361
8362 if (!validate_arg (dest, POINTER_TYPE)
8363 || !validate_arg (src, POINTER_TYPE)
8364 || !validate_arg (len, INTEGER_TYPE))
8365 return NULL_TREE;
8366
8367 /* If the LEN parameter is zero, return DEST. */
8368 if (integer_zerop (len))
8369 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8370
8371 /* We can't compare slen with len as constants below if len is not a
8372 constant. */
8373 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8374 return NULL_TREE;
8375
8376 if (!slen)
8377 slen = c_strlen (src, 1);
8378
8379 /* Now, we must be passed a constant src ptr parameter. */
8380 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8381 return NULL_TREE;
8382
8383 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8384
8385 /* We do not support simplification of this case, though we do
8386 support it when expanding trees into RTL. */
8387 /* FIXME: generate a call to __builtin_memset. */
8388 if (tree_int_cst_lt (slen, len))
8389 return NULL_TREE;
8390
8391 /* OK transform into builtin memcpy. */
8392 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8393 if (!fn)
8394 return NULL_TREE;
8395
8396 len = fold_convert_loc (loc, size_type_node, len);
8397 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8398 build_call_expr_loc (loc, fn, 3, dest, src, len));
8399 }
8400
8401 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8402 arguments to the call, and TYPE is its return type.
8403 Return NULL_TREE if no simplification can be made. */
8404
8405 static tree
8406 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8407 {
8408 if (!validate_arg (arg1, POINTER_TYPE)
8409 || !validate_arg (arg2, INTEGER_TYPE)
8410 || !validate_arg (len, INTEGER_TYPE))
8411 return NULL_TREE;
8412 else
8413 {
8414 const char *p1;
8415
8416 if (TREE_CODE (arg2) != INTEGER_CST
8417 || !host_integerp (len, 1))
8418 return NULL_TREE;
8419
8420 p1 = c_getstr (arg1);
8421 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8422 {
8423 char c;
8424 const char *r;
8425 tree tem;
8426
8427 if (target_char_cast (arg2, &c))
8428 return NULL_TREE;
8429
8430 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8431
8432 if (r == NULL)
8433 return build_int_cst (TREE_TYPE (arg1), 0);
8434
8435 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8436 return fold_convert_loc (loc, type, tem);
8437 }
8438 return NULL_TREE;
8439 }
8440 }
8441
8442 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8443 Return NULL_TREE if no simplification can be made. */
8444
8445 static tree
8446 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8447 {
8448 const char *p1, *p2;
8449
8450 if (!validate_arg (arg1, POINTER_TYPE)
8451 || !validate_arg (arg2, POINTER_TYPE)
8452 || !validate_arg (len, INTEGER_TYPE))
8453 return NULL_TREE;
8454
8455 /* If the LEN parameter is zero, return zero. */
8456 if (integer_zerop (len))
8457 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8458 arg1, arg2);
8459
8460 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8461 if (operand_equal_p (arg1, arg2, 0))
8462 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8463
8464 p1 = c_getstr (arg1);
8465 p2 = c_getstr (arg2);
8466
8467 /* If all arguments are constant, and the value of len is not greater
8468 than the lengths of arg1 and arg2, evaluate at compile-time. */
8469 if (host_integerp (len, 1) && p1 && p2
8470 && compare_tree_int (len, strlen (p1) + 1) <= 0
8471 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8472 {
8473 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8474
8475 if (r > 0)
8476 return integer_one_node;
8477 else if (r < 0)
8478 return integer_minus_one_node;
8479 else
8480 return integer_zero_node;
8481 }
8482
8483 /* If len parameter is one, return an expression corresponding to
8484 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8485 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8486 {
8487 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8488 tree cst_uchar_ptr_node
8489 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8490
8491 tree ind1
8492 = fold_convert_loc (loc, integer_type_node,
8493 build1 (INDIRECT_REF, cst_uchar_node,
8494 fold_convert_loc (loc,
8495 cst_uchar_ptr_node,
8496 arg1)));
8497 tree ind2
8498 = fold_convert_loc (loc, integer_type_node,
8499 build1 (INDIRECT_REF, cst_uchar_node,
8500 fold_convert_loc (loc,
8501 cst_uchar_ptr_node,
8502 arg2)));
8503 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8504 }
8505
8506 return NULL_TREE;
8507 }
8508
8509 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8510 Return NULL_TREE if no simplification can be made. */
8511
8512 static tree
8513 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8514 {
8515 const char *p1, *p2;
8516
8517 if (!validate_arg (arg1, POINTER_TYPE)
8518 || !validate_arg (arg2, POINTER_TYPE))
8519 return NULL_TREE;
8520
8521 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8522 if (operand_equal_p (arg1, arg2, 0))
8523 return integer_zero_node;
8524
8525 p1 = c_getstr (arg1);
8526 p2 = c_getstr (arg2);
8527
8528 if (p1 && p2)
8529 {
8530 const int i = strcmp (p1, p2);
8531 if (i < 0)
8532 return integer_minus_one_node;
8533 else if (i > 0)
8534 return integer_one_node;
8535 else
8536 return integer_zero_node;
8537 }
8538
8539 /* If the second arg is "", return *(const unsigned char*)arg1. */
8540 if (p2 && *p2 == '\0')
8541 {
8542 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8543 tree cst_uchar_ptr_node
8544 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8545
8546 return fold_convert_loc (loc, integer_type_node,
8547 build1 (INDIRECT_REF, cst_uchar_node,
8548 fold_convert_loc (loc,
8549 cst_uchar_ptr_node,
8550 arg1)));
8551 }
8552
8553 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8554 if (p1 && *p1 == '\0')
8555 {
8556 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8557 tree cst_uchar_ptr_node
8558 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8559
8560 tree temp
8561 = fold_convert_loc (loc, integer_type_node,
8562 build1 (INDIRECT_REF, cst_uchar_node,
8563 fold_convert_loc (loc,
8564 cst_uchar_ptr_node,
8565 arg2)));
8566 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8567 }
8568
8569 return NULL_TREE;
8570 }
8571
8572 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8573 Return NULL_TREE if no simplification can be made. */
8574
8575 static tree
8576 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8577 {
8578 const char *p1, *p2;
8579
8580 if (!validate_arg (arg1, POINTER_TYPE)
8581 || !validate_arg (arg2, POINTER_TYPE)
8582 || !validate_arg (len, INTEGER_TYPE))
8583 return NULL_TREE;
8584
8585 /* If the LEN parameter is zero, return zero. */
8586 if (integer_zerop (len))
8587 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8588 arg1, arg2);
8589
8590 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8591 if (operand_equal_p (arg1, arg2, 0))
8592 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8593
8594 p1 = c_getstr (arg1);
8595 p2 = c_getstr (arg2);
8596
8597 if (host_integerp (len, 1) && p1 && p2)
8598 {
8599 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8600 if (i > 0)
8601 return integer_one_node;
8602 else if (i < 0)
8603 return integer_minus_one_node;
8604 else
8605 return integer_zero_node;
8606 }
8607
8608 /* If the second arg is "", and the length is greater than zero,
8609 return *(const unsigned char*)arg1. */
8610 if (p2 && *p2 == '\0'
8611 && TREE_CODE (len) == INTEGER_CST
8612 && tree_int_cst_sgn (len) == 1)
8613 {
8614 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8615 tree cst_uchar_ptr_node
8616 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8617
8618 return fold_convert_loc (loc, integer_type_node,
8619 build1 (INDIRECT_REF, cst_uchar_node,
8620 fold_convert_loc (loc,
8621 cst_uchar_ptr_node,
8622 arg1)));
8623 }
8624
8625 /* If the first arg is "", and the length is greater than zero,
8626 return -*(const unsigned char*)arg2. */
8627 if (p1 && *p1 == '\0'
8628 && TREE_CODE (len) == INTEGER_CST
8629 && tree_int_cst_sgn (len) == 1)
8630 {
8631 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8632 tree cst_uchar_ptr_node
8633 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8634
8635 tree temp = fold_convert_loc (loc, integer_type_node,
8636 build1 (INDIRECT_REF, cst_uchar_node,
8637 fold_convert_loc (loc,
8638 cst_uchar_ptr_node,
8639 arg2)));
8640 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8641 }
8642
8643 /* If len parameter is one, return an expression corresponding to
8644 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8645 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8646 {
8647 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8648 tree cst_uchar_ptr_node
8649 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8650
8651 tree ind1 = fold_convert_loc (loc, integer_type_node,
8652 build1 (INDIRECT_REF, cst_uchar_node,
8653 fold_convert_loc (loc,
8654 cst_uchar_ptr_node,
8655 arg1)));
8656 tree ind2 = fold_convert_loc (loc, integer_type_node,
8657 build1 (INDIRECT_REF, cst_uchar_node,
8658 fold_convert_loc (loc,
8659 cst_uchar_ptr_node,
8660 arg2)));
8661 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8662 }
8663
8664 return NULL_TREE;
8665 }
8666
8667 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8668 ARG. Return NULL_TREE if no simplification can be made. */
8669
8670 static tree
8671 fold_builtin_signbit (location_t loc, tree arg, tree type)
8672 {
8673 if (!validate_arg (arg, REAL_TYPE))
8674 return NULL_TREE;
8675
8676 /* If ARG is a compile-time constant, determine the result. */
8677 if (TREE_CODE (arg) == REAL_CST
8678 && !TREE_OVERFLOW (arg))
8679 {
8680 REAL_VALUE_TYPE c;
8681
8682 c = TREE_REAL_CST (arg);
8683 return (REAL_VALUE_NEGATIVE (c)
8684 ? build_one_cst (type)
8685 : build_zero_cst (type));
8686 }
8687
8688 /* If ARG is non-negative, the result is always zero. */
8689 if (tree_expr_nonnegative_p (arg))
8690 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8691
8692 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8693 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8694 return fold_convert (type,
8695 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8696 build_real (TREE_TYPE (arg), dconst0)));
8697
8698 return NULL_TREE;
8699 }
8700
8701 /* Fold function call to builtin copysign, copysignf or copysignl with
8702 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8703 be made. */
8704
8705 static tree
8706 fold_builtin_copysign (location_t loc, tree fndecl,
8707 tree arg1, tree arg2, tree type)
8708 {
8709 tree tem;
8710
8711 if (!validate_arg (arg1, REAL_TYPE)
8712 || !validate_arg (arg2, REAL_TYPE))
8713 return NULL_TREE;
8714
8715 /* copysign(X,X) is X. */
8716 if (operand_equal_p (arg1, arg2, 0))
8717 return fold_convert_loc (loc, type, arg1);
8718
8719 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8720 if (TREE_CODE (arg1) == REAL_CST
8721 && TREE_CODE (arg2) == REAL_CST
8722 && !TREE_OVERFLOW (arg1)
8723 && !TREE_OVERFLOW (arg2))
8724 {
8725 REAL_VALUE_TYPE c1, c2;
8726
8727 c1 = TREE_REAL_CST (arg1);
8728 c2 = TREE_REAL_CST (arg2);
8729 /* c1.sign := c2.sign. */
8730 real_copysign (&c1, &c2);
8731 return build_real (type, c1);
8732 }
8733
8734 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8735 Remember to evaluate Y for side-effects. */
8736 if (tree_expr_nonnegative_p (arg2))
8737 return omit_one_operand_loc (loc, type,
8738 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8739 arg2);
8740
8741 /* Strip sign changing operations for the first argument. */
8742 tem = fold_strip_sign_ops (arg1);
8743 if (tem)
8744 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8745
8746 return NULL_TREE;
8747 }
8748
8749 /* Fold a call to builtin isascii with argument ARG. */
8750
8751 static tree
8752 fold_builtin_isascii (location_t loc, tree arg)
8753 {
8754 if (!validate_arg (arg, INTEGER_TYPE))
8755 return NULL_TREE;
8756 else
8757 {
8758 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8759 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8760 build_int_cst (integer_type_node,
8761 ~ (unsigned HOST_WIDE_INT) 0x7f));
8762 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8763 arg, integer_zero_node);
8764 }
8765 }
8766
8767 /* Fold a call to builtin toascii with argument ARG. */
8768
8769 static tree
8770 fold_builtin_toascii (location_t loc, tree arg)
8771 {
8772 if (!validate_arg (arg, INTEGER_TYPE))
8773 return NULL_TREE;
8774
8775 /* Transform toascii(c) -> (c & 0x7f). */
8776 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8777 build_int_cst (integer_type_node, 0x7f));
8778 }
8779
8780 /* Fold a call to builtin isdigit with argument ARG. */
8781
8782 static tree
8783 fold_builtin_isdigit (location_t loc, tree arg)
8784 {
8785 if (!validate_arg (arg, INTEGER_TYPE))
8786 return NULL_TREE;
8787 else
8788 {
8789 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8790 /* According to the C standard, isdigit is unaffected by locale.
8791 However, it definitely is affected by the target character set. */
8792 unsigned HOST_WIDE_INT target_digit0
8793 = lang_hooks.to_target_charset ('0');
8794
8795 if (target_digit0 == 0)
8796 return NULL_TREE;
8797
8798 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8799 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8800 build_int_cst (unsigned_type_node, target_digit0));
8801 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8802 build_int_cst (unsigned_type_node, 9));
8803 }
8804 }
8805
8806 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8807
8808 static tree
8809 fold_builtin_fabs (location_t loc, tree arg, tree type)
8810 {
8811 if (!validate_arg (arg, REAL_TYPE))
8812 return NULL_TREE;
8813
8814 arg = fold_convert_loc (loc, type, arg);
8815 if (TREE_CODE (arg) == REAL_CST)
8816 return fold_abs_const (arg, type);
8817 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8818 }
8819
8820 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8821
8822 static tree
8823 fold_builtin_abs (location_t loc, tree arg, tree type)
8824 {
8825 if (!validate_arg (arg, INTEGER_TYPE))
8826 return NULL_TREE;
8827
8828 arg = fold_convert_loc (loc, type, arg);
8829 if (TREE_CODE (arg) == INTEGER_CST)
8830 return fold_abs_const (arg, type);
8831 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8832 }
8833
8834 /* Fold a fma operation with arguments ARG[012]. */
8835
8836 tree
8837 fold_fma (location_t loc ATTRIBUTE_UNUSED,
8838 tree type, tree arg0, tree arg1, tree arg2)
8839 {
8840 if (TREE_CODE (arg0) == REAL_CST
8841 && TREE_CODE (arg1) == REAL_CST
8842 && TREE_CODE (arg2) == REAL_CST)
8843 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
8844
8845 return NULL_TREE;
8846 }
8847
8848 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8849
8850 static tree
8851 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8852 {
8853 if (validate_arg (arg0, REAL_TYPE)
8854 && validate_arg(arg1, REAL_TYPE)
8855 && validate_arg(arg2, REAL_TYPE))
8856 {
8857 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
8858 if (tem)
8859 return tem;
8860
8861 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8862 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8863 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8864 }
8865 return NULL_TREE;
8866 }
8867
8868 /* Fold a call to builtin fmin or fmax. */
8869
8870 static tree
8871 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8872 tree type, bool max)
8873 {
8874 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8875 {
8876 /* Calculate the result when the argument is a constant. */
8877 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8878
8879 if (res)
8880 return res;
8881
8882 /* If either argument is NaN, return the other one. Avoid the
8883 transformation if we get (and honor) a signalling NaN. Using
8884 omit_one_operand() ensures we create a non-lvalue. */
8885 if (TREE_CODE (arg0) == REAL_CST
8886 && real_isnan (&TREE_REAL_CST (arg0))
8887 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8888 || ! TREE_REAL_CST (arg0).signalling))
8889 return omit_one_operand_loc (loc, type, arg1, arg0);
8890 if (TREE_CODE (arg1) == REAL_CST
8891 && real_isnan (&TREE_REAL_CST (arg1))
8892 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
8893 || ! TREE_REAL_CST (arg1).signalling))
8894 return omit_one_operand_loc (loc, type, arg0, arg1);
8895
8896 /* Transform fmin/fmax(x,x) -> x. */
8897 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8898 return omit_one_operand_loc (loc, type, arg0, arg1);
8899
8900 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
8901 functions to return the numeric arg if the other one is NaN.
8902 These tree codes don't honor that, so only transform if
8903 -ffinite-math-only is set. C99 doesn't require -0.0 to be
8904 handled, so we don't have to worry about it either. */
8905 if (flag_finite_math_only)
8906 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
8907 fold_convert_loc (loc, type, arg0),
8908 fold_convert_loc (loc, type, arg1));
8909 }
8910 return NULL_TREE;
8911 }
8912
8913 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8914
8915 static tree
8916 fold_builtin_carg (location_t loc, tree arg, tree type)
8917 {
8918 if (validate_arg (arg, COMPLEX_TYPE)
8919 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8920 {
8921 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8922
8923 if (atan2_fn)
8924 {
8925 tree new_arg = builtin_save_expr (arg);
8926 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8927 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8928 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8929 }
8930 }
8931
8932 return NULL_TREE;
8933 }
8934
8935 /* Fold a call to builtin logb/ilogb. */
8936
8937 static tree
8938 fold_builtin_logb (location_t loc, tree arg, tree rettype)
8939 {
8940 if (! validate_arg (arg, REAL_TYPE))
8941 return NULL_TREE;
8942
8943 STRIP_NOPS (arg);
8944
8945 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8946 {
8947 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
8948
8949 switch (value->cl)
8950 {
8951 case rvc_nan:
8952 case rvc_inf:
8953 /* If arg is Inf or NaN and we're logb, return it. */
8954 if (TREE_CODE (rettype) == REAL_TYPE)
8955 return fold_convert_loc (loc, rettype, arg);
8956 /* Fall through... */
8957 case rvc_zero:
8958 /* Zero may set errno and/or raise an exception for logb, also
8959 for ilogb we don't know FP_ILOGB0. */
8960 return NULL_TREE;
8961 case rvc_normal:
8962 /* For normal numbers, proceed iff radix == 2. In GCC,
8963 normalized significands are in the range [0.5, 1.0). We
8964 want the exponent as if they were [1.0, 2.0) so get the
8965 exponent and subtract 1. */
8966 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8967 return fold_convert_loc (loc, rettype,
8968 build_int_cst (integer_type_node,
8969 REAL_EXP (value)-1));
8970 break;
8971 }
8972 }
8973
8974 return NULL_TREE;
8975 }
8976
8977 /* Fold a call to builtin significand, if radix == 2. */
8978
8979 static tree
8980 fold_builtin_significand (location_t loc, tree arg, tree rettype)
8981 {
8982 if (! validate_arg (arg, REAL_TYPE))
8983 return NULL_TREE;
8984
8985 STRIP_NOPS (arg);
8986
8987 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8988 {
8989 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
8990
8991 switch (value->cl)
8992 {
8993 case rvc_zero:
8994 case rvc_nan:
8995 case rvc_inf:
8996 /* If arg is +-0, +-Inf or +-NaN, then return it. */
8997 return fold_convert_loc (loc, rettype, arg);
8998 case rvc_normal:
8999 /* For normal numbers, proceed iff radix == 2. */
9000 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9001 {
9002 REAL_VALUE_TYPE result = *value;
9003 /* In GCC, normalized significands are in the range [0.5,
9004 1.0). We want them to be [1.0, 2.0) so set the
9005 exponent to 1. */
9006 SET_REAL_EXP (&result, 1);
9007 return build_real (rettype, result);
9008 }
9009 break;
9010 }
9011 }
9012
9013 return NULL_TREE;
9014 }
9015
9016 /* Fold a call to builtin frexp, we can assume the base is 2. */
9017
9018 static tree
9019 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9020 {
9021 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9022 return NULL_TREE;
9023
9024 STRIP_NOPS (arg0);
9025
9026 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9027 return NULL_TREE;
9028
9029 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9030
9031 /* Proceed if a valid pointer type was passed in. */
9032 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9033 {
9034 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9035 tree frac, exp;
9036
9037 switch (value->cl)
9038 {
9039 case rvc_zero:
9040 /* For +-0, return (*exp = 0, +-0). */
9041 exp = integer_zero_node;
9042 frac = arg0;
9043 break;
9044 case rvc_nan:
9045 case rvc_inf:
9046 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9047 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9048 case rvc_normal:
9049 {
9050 /* Since the frexp function always expects base 2, and in
9051 GCC normalized significands are already in the range
9052 [0.5, 1.0), we have exactly what frexp wants. */
9053 REAL_VALUE_TYPE frac_rvt = *value;
9054 SET_REAL_EXP (&frac_rvt, 0);
9055 frac = build_real (rettype, frac_rvt);
9056 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9057 }
9058 break;
9059 default:
9060 gcc_unreachable ();
9061 }
9062
9063 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9064 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9065 TREE_SIDE_EFFECTS (arg1) = 1;
9066 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9067 }
9068
9069 return NULL_TREE;
9070 }
9071
9072 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9073 then we can assume the base is two. If it's false, then we have to
9074 check the mode of the TYPE parameter in certain cases. */
9075
9076 static tree
9077 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9078 tree type, bool ldexp)
9079 {
9080 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9081 {
9082 STRIP_NOPS (arg0);
9083 STRIP_NOPS (arg1);
9084
9085 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9086 if (real_zerop (arg0) || integer_zerop (arg1)
9087 || (TREE_CODE (arg0) == REAL_CST
9088 && !real_isfinite (&TREE_REAL_CST (arg0))))
9089 return omit_one_operand_loc (loc, type, arg0, arg1);
9090
9091 /* If both arguments are constant, then try to evaluate it. */
9092 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9093 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9094 && host_integerp (arg1, 0))
9095 {
9096 /* Bound the maximum adjustment to twice the range of the
9097 mode's valid exponents. Use abs to ensure the range is
9098 positive as a sanity check. */
9099 const long max_exp_adj = 2 *
9100 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9101 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9102
9103 /* Get the user-requested adjustment. */
9104 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9105
9106 /* The requested adjustment must be inside this range. This
9107 is a preliminary cap to avoid things like overflow, we
9108 may still fail to compute the result for other reasons. */
9109 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9110 {
9111 REAL_VALUE_TYPE initial_result;
9112
9113 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9114
9115 /* Ensure we didn't overflow. */
9116 if (! real_isinf (&initial_result))
9117 {
9118 const REAL_VALUE_TYPE trunc_result
9119 = real_value_truncate (TYPE_MODE (type), initial_result);
9120
9121 /* Only proceed if the target mode can hold the
9122 resulting value. */
9123 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9124 return build_real (type, trunc_result);
9125 }
9126 }
9127 }
9128 }
9129
9130 return NULL_TREE;
9131 }
9132
9133 /* Fold a call to builtin modf. */
9134
9135 static tree
9136 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9137 {
9138 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9139 return NULL_TREE;
9140
9141 STRIP_NOPS (arg0);
9142
9143 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9144 return NULL_TREE;
9145
9146 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9147
9148 /* Proceed if a valid pointer type was passed in. */
9149 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9150 {
9151 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9152 REAL_VALUE_TYPE trunc, frac;
9153
9154 switch (value->cl)
9155 {
9156 case rvc_nan:
9157 case rvc_zero:
9158 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9159 trunc = frac = *value;
9160 break;
9161 case rvc_inf:
9162 /* For +-Inf, return (*arg1 = arg0, +-0). */
9163 frac = dconst0;
9164 frac.sign = value->sign;
9165 trunc = *value;
9166 break;
9167 case rvc_normal:
9168 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9169 real_trunc (&trunc, VOIDmode, value);
9170 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9171 /* If the original number was negative and already
9172 integral, then the fractional part is -0.0. */
9173 if (value->sign && frac.cl == rvc_zero)
9174 frac.sign = value->sign;
9175 break;
9176 }
9177
9178 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9179 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9180 build_real (rettype, trunc));
9181 TREE_SIDE_EFFECTS (arg1) = 1;
9182 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9183 build_real (rettype, frac));
9184 }
9185
9186 return NULL_TREE;
9187 }
9188
9189 /* Given a location LOC, an interclass builtin function decl FNDECL
9190 and its single argument ARG, return an folded expression computing
9191 the same, or NULL_TREE if we either couldn't or didn't want to fold
9192 (the latter happen if there's an RTL instruction available). */
9193
9194 static tree
9195 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9196 {
9197 enum machine_mode mode;
9198
9199 if (!validate_arg (arg, REAL_TYPE))
9200 return NULL_TREE;
9201
9202 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9203 return NULL_TREE;
9204
9205 mode = TYPE_MODE (TREE_TYPE (arg));
9206
9207 /* If there is no optab, try generic code. */
9208 switch (DECL_FUNCTION_CODE (fndecl))
9209 {
9210 tree result;
9211
9212 CASE_FLT_FN (BUILT_IN_ISINF):
9213 {
9214 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9215 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9216 tree const type = TREE_TYPE (arg);
9217 REAL_VALUE_TYPE r;
9218 char buf[128];
9219
9220 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9221 real_from_string (&r, buf);
9222 result = build_call_expr (isgr_fn, 2,
9223 fold_build1_loc (loc, ABS_EXPR, type, arg),
9224 build_real (type, r));
9225 return result;
9226 }
9227 CASE_FLT_FN (BUILT_IN_FINITE):
9228 case BUILT_IN_ISFINITE:
9229 {
9230 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9231 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9232 tree const type = TREE_TYPE (arg);
9233 REAL_VALUE_TYPE r;
9234 char buf[128];
9235
9236 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9237 real_from_string (&r, buf);
9238 result = build_call_expr (isle_fn, 2,
9239 fold_build1_loc (loc, ABS_EXPR, type, arg),
9240 build_real (type, r));
9241 /*result = fold_build2_loc (loc, UNGT_EXPR,
9242 TREE_TYPE (TREE_TYPE (fndecl)),
9243 fold_build1_loc (loc, ABS_EXPR, type, arg),
9244 build_real (type, r));
9245 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9246 TREE_TYPE (TREE_TYPE (fndecl)),
9247 result);*/
9248 return result;
9249 }
9250 case BUILT_IN_ISNORMAL:
9251 {
9252 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9253 islessequal(fabs(x),DBL_MAX). */
9254 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9255 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9256 tree const type = TREE_TYPE (arg);
9257 REAL_VALUE_TYPE rmax, rmin;
9258 char buf[128];
9259
9260 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9261 real_from_string (&rmax, buf);
9262 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9263 real_from_string (&rmin, buf);
9264 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9265 result = build_call_expr (isle_fn, 2, arg,
9266 build_real (type, rmax));
9267 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9268 build_call_expr (isge_fn, 2, arg,
9269 build_real (type, rmin)));
9270 return result;
9271 }
9272 default:
9273 break;
9274 }
9275
9276 return NULL_TREE;
9277 }
9278
9279 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9280 ARG is the argument for the call. */
9281
9282 static tree
9283 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9284 {
9285 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9286 REAL_VALUE_TYPE r;
9287
9288 if (!validate_arg (arg, REAL_TYPE))
9289 return NULL_TREE;
9290
9291 switch (builtin_index)
9292 {
9293 case BUILT_IN_ISINF:
9294 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9295 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9296
9297 if (TREE_CODE (arg) == REAL_CST)
9298 {
9299 r = TREE_REAL_CST (arg);
9300 if (real_isinf (&r))
9301 return real_compare (GT_EXPR, &r, &dconst0)
9302 ? integer_one_node : integer_minus_one_node;
9303 else
9304 return integer_zero_node;
9305 }
9306
9307 return NULL_TREE;
9308
9309 case BUILT_IN_ISINF_SIGN:
9310 {
9311 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9312 /* In a boolean context, GCC will fold the inner COND_EXPR to
9313 1. So e.g. "if (isinf_sign(x))" would be folded to just
9314 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9315 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9316 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9317 tree tmp = NULL_TREE;
9318
9319 arg = builtin_save_expr (arg);
9320
9321 if (signbit_fn && isinf_fn)
9322 {
9323 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9324 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9325
9326 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9327 signbit_call, integer_zero_node);
9328 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9329 isinf_call, integer_zero_node);
9330
9331 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9332 integer_minus_one_node, integer_one_node);
9333 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9334 isinf_call, tmp,
9335 integer_zero_node);
9336 }
9337
9338 return tmp;
9339 }
9340
9341 case BUILT_IN_ISFINITE:
9342 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9343 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9344 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9345
9346 if (TREE_CODE (arg) == REAL_CST)
9347 {
9348 r = TREE_REAL_CST (arg);
9349 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9350 }
9351
9352 return NULL_TREE;
9353
9354 case BUILT_IN_ISNAN:
9355 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9356 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9357
9358 if (TREE_CODE (arg) == REAL_CST)
9359 {
9360 r = TREE_REAL_CST (arg);
9361 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9362 }
9363
9364 arg = builtin_save_expr (arg);
9365 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9366
9367 default:
9368 gcc_unreachable ();
9369 }
9370 }
9371
9372 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9373 This builtin will generate code to return the appropriate floating
9374 point classification depending on the value of the floating point
9375 number passed in. The possible return values must be supplied as
9376 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9377 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9378 one floating point argument which is "type generic". */
9379
9380 static tree
9381 fold_builtin_fpclassify (location_t loc, tree exp)
9382 {
9383 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9384 arg, type, res, tmp;
9385 enum machine_mode mode;
9386 REAL_VALUE_TYPE r;
9387 char buf[128];
9388
9389 /* Verify the required arguments in the original call. */
9390 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9391 INTEGER_TYPE, INTEGER_TYPE,
9392 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9393 return NULL_TREE;
9394
9395 fp_nan = CALL_EXPR_ARG (exp, 0);
9396 fp_infinite = CALL_EXPR_ARG (exp, 1);
9397 fp_normal = CALL_EXPR_ARG (exp, 2);
9398 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9399 fp_zero = CALL_EXPR_ARG (exp, 4);
9400 arg = CALL_EXPR_ARG (exp, 5);
9401 type = TREE_TYPE (arg);
9402 mode = TYPE_MODE (type);
9403 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9404
9405 /* fpclassify(x) ->
9406 isnan(x) ? FP_NAN :
9407 (fabs(x) == Inf ? FP_INFINITE :
9408 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9409 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9410
9411 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9412 build_real (type, dconst0));
9413 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9414 tmp, fp_zero, fp_subnormal);
9415
9416 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9417 real_from_string (&r, buf);
9418 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9419 arg, build_real (type, r));
9420 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9421
9422 if (HONOR_INFINITIES (mode))
9423 {
9424 real_inf (&r);
9425 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9426 build_real (type, r));
9427 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9428 fp_infinite, res);
9429 }
9430
9431 if (HONOR_NANS (mode))
9432 {
9433 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9434 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9435 }
9436
9437 return res;
9438 }
9439
9440 /* Fold a call to an unordered comparison function such as
9441 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9442 being called and ARG0 and ARG1 are the arguments for the call.
9443 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9444 the opposite of the desired result. UNORDERED_CODE is used
9445 for modes that can hold NaNs and ORDERED_CODE is used for
9446 the rest. */
9447
9448 static tree
9449 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9450 enum tree_code unordered_code,
9451 enum tree_code ordered_code)
9452 {
9453 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9454 enum tree_code code;
9455 tree type0, type1;
9456 enum tree_code code0, code1;
9457 tree cmp_type = NULL_TREE;
9458
9459 type0 = TREE_TYPE (arg0);
9460 type1 = TREE_TYPE (arg1);
9461
9462 code0 = TREE_CODE (type0);
9463 code1 = TREE_CODE (type1);
9464
9465 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9466 /* Choose the wider of two real types. */
9467 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9468 ? type0 : type1;
9469 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9470 cmp_type = type0;
9471 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9472 cmp_type = type1;
9473
9474 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9475 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9476
9477 if (unordered_code == UNORDERED_EXPR)
9478 {
9479 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9480 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9481 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9482 }
9483
9484 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9485 : ordered_code;
9486 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9487 fold_build2_loc (loc, code, type, arg0, arg1));
9488 }
9489
9490 /* Fold a call to built-in function FNDECL with 0 arguments.
9491 IGNORE is true if the result of the function call is ignored. This
9492 function returns NULL_TREE if no simplification was possible. */
9493
9494 static tree
9495 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9496 {
9497 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9498 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9499 switch (fcode)
9500 {
9501 CASE_FLT_FN (BUILT_IN_INF):
9502 case BUILT_IN_INFD32:
9503 case BUILT_IN_INFD64:
9504 case BUILT_IN_INFD128:
9505 return fold_builtin_inf (loc, type, true);
9506
9507 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9508 return fold_builtin_inf (loc, type, false);
9509
9510 case BUILT_IN_CLASSIFY_TYPE:
9511 return fold_builtin_classify_type (NULL_TREE);
9512
9513 default:
9514 break;
9515 }
9516 return NULL_TREE;
9517 }
9518
9519 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9520 IGNORE is true if the result of the function call is ignored. This
9521 function returns NULL_TREE if no simplification was possible. */
9522
9523 static tree
9524 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9525 {
9526 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9527 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9528 switch (fcode)
9529 {
9530 case BUILT_IN_CONSTANT_P:
9531 {
9532 tree val = fold_builtin_constant_p (arg0);
9533
9534 /* Gimplification will pull the CALL_EXPR for the builtin out of
9535 an if condition. When not optimizing, we'll not CSE it back.
9536 To avoid link error types of regressions, return false now. */
9537 if (!val && !optimize)
9538 val = integer_zero_node;
9539
9540 return val;
9541 }
9542
9543 case BUILT_IN_CLASSIFY_TYPE:
9544 return fold_builtin_classify_type (arg0);
9545
9546 case BUILT_IN_STRLEN:
9547 return fold_builtin_strlen (loc, type, arg0);
9548
9549 CASE_FLT_FN (BUILT_IN_FABS):
9550 return fold_builtin_fabs (loc, arg0, type);
9551
9552 case BUILT_IN_ABS:
9553 case BUILT_IN_LABS:
9554 case BUILT_IN_LLABS:
9555 case BUILT_IN_IMAXABS:
9556 return fold_builtin_abs (loc, arg0, type);
9557
9558 CASE_FLT_FN (BUILT_IN_CONJ):
9559 if (validate_arg (arg0, COMPLEX_TYPE)
9560 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9561 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9562 break;
9563
9564 CASE_FLT_FN (BUILT_IN_CREAL):
9565 if (validate_arg (arg0, COMPLEX_TYPE)
9566 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9567 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9568 break;
9569
9570 CASE_FLT_FN (BUILT_IN_CIMAG):
9571 if (validate_arg (arg0, COMPLEX_TYPE)
9572 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9573 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9574 break;
9575
9576 CASE_FLT_FN (BUILT_IN_CCOS):
9577 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9578
9579 CASE_FLT_FN (BUILT_IN_CCOSH):
9580 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9581
9582 CASE_FLT_FN (BUILT_IN_CPROJ):
9583 return fold_builtin_cproj(loc, arg0, type);
9584
9585 CASE_FLT_FN (BUILT_IN_CSIN):
9586 if (validate_arg (arg0, COMPLEX_TYPE)
9587 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9588 return do_mpc_arg1 (arg0, type, mpc_sin);
9589 break;
9590
9591 CASE_FLT_FN (BUILT_IN_CSINH):
9592 if (validate_arg (arg0, COMPLEX_TYPE)
9593 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9594 return do_mpc_arg1 (arg0, type, mpc_sinh);
9595 break;
9596
9597 CASE_FLT_FN (BUILT_IN_CTAN):
9598 if (validate_arg (arg0, COMPLEX_TYPE)
9599 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9600 return do_mpc_arg1 (arg0, type, mpc_tan);
9601 break;
9602
9603 CASE_FLT_FN (BUILT_IN_CTANH):
9604 if (validate_arg (arg0, COMPLEX_TYPE)
9605 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9606 return do_mpc_arg1 (arg0, type, mpc_tanh);
9607 break;
9608
9609 CASE_FLT_FN (BUILT_IN_CLOG):
9610 if (validate_arg (arg0, COMPLEX_TYPE)
9611 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9612 return do_mpc_arg1 (arg0, type, mpc_log);
9613 break;
9614
9615 CASE_FLT_FN (BUILT_IN_CSQRT):
9616 if (validate_arg (arg0, COMPLEX_TYPE)
9617 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9618 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9619 break;
9620
9621 CASE_FLT_FN (BUILT_IN_CASIN):
9622 if (validate_arg (arg0, COMPLEX_TYPE)
9623 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9624 return do_mpc_arg1 (arg0, type, mpc_asin);
9625 break;
9626
9627 CASE_FLT_FN (BUILT_IN_CACOS):
9628 if (validate_arg (arg0, COMPLEX_TYPE)
9629 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9630 return do_mpc_arg1 (arg0, type, mpc_acos);
9631 break;
9632
9633 CASE_FLT_FN (BUILT_IN_CATAN):
9634 if (validate_arg (arg0, COMPLEX_TYPE)
9635 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9636 return do_mpc_arg1 (arg0, type, mpc_atan);
9637 break;
9638
9639 CASE_FLT_FN (BUILT_IN_CASINH):
9640 if (validate_arg (arg0, COMPLEX_TYPE)
9641 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9642 return do_mpc_arg1 (arg0, type, mpc_asinh);
9643 break;
9644
9645 CASE_FLT_FN (BUILT_IN_CACOSH):
9646 if (validate_arg (arg0, COMPLEX_TYPE)
9647 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9648 return do_mpc_arg1 (arg0, type, mpc_acosh);
9649 break;
9650
9651 CASE_FLT_FN (BUILT_IN_CATANH):
9652 if (validate_arg (arg0, COMPLEX_TYPE)
9653 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9654 return do_mpc_arg1 (arg0, type, mpc_atanh);
9655 break;
9656
9657 CASE_FLT_FN (BUILT_IN_CABS):
9658 return fold_builtin_cabs (loc, arg0, type, fndecl);
9659
9660 CASE_FLT_FN (BUILT_IN_CARG):
9661 return fold_builtin_carg (loc, arg0, type);
9662
9663 CASE_FLT_FN (BUILT_IN_SQRT):
9664 return fold_builtin_sqrt (loc, arg0, type);
9665
9666 CASE_FLT_FN (BUILT_IN_CBRT):
9667 return fold_builtin_cbrt (loc, arg0, type);
9668
9669 CASE_FLT_FN (BUILT_IN_ASIN):
9670 if (validate_arg (arg0, REAL_TYPE))
9671 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9672 &dconstm1, &dconst1, true);
9673 break;
9674
9675 CASE_FLT_FN (BUILT_IN_ACOS):
9676 if (validate_arg (arg0, REAL_TYPE))
9677 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9678 &dconstm1, &dconst1, true);
9679 break;
9680
9681 CASE_FLT_FN (BUILT_IN_ATAN):
9682 if (validate_arg (arg0, REAL_TYPE))
9683 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9684 break;
9685
9686 CASE_FLT_FN (BUILT_IN_ASINH):
9687 if (validate_arg (arg0, REAL_TYPE))
9688 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9689 break;
9690
9691 CASE_FLT_FN (BUILT_IN_ACOSH):
9692 if (validate_arg (arg0, REAL_TYPE))
9693 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9694 &dconst1, NULL, true);
9695 break;
9696
9697 CASE_FLT_FN (BUILT_IN_ATANH):
9698 if (validate_arg (arg0, REAL_TYPE))
9699 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9700 &dconstm1, &dconst1, false);
9701 break;
9702
9703 CASE_FLT_FN (BUILT_IN_SIN):
9704 if (validate_arg (arg0, REAL_TYPE))
9705 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9706 break;
9707
9708 CASE_FLT_FN (BUILT_IN_COS):
9709 return fold_builtin_cos (loc, arg0, type, fndecl);
9710
9711 CASE_FLT_FN (BUILT_IN_TAN):
9712 return fold_builtin_tan (arg0, type);
9713
9714 CASE_FLT_FN (BUILT_IN_CEXP):
9715 return fold_builtin_cexp (loc, arg0, type);
9716
9717 CASE_FLT_FN (BUILT_IN_CEXPI):
9718 if (validate_arg (arg0, REAL_TYPE))
9719 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9720 break;
9721
9722 CASE_FLT_FN (BUILT_IN_SINH):
9723 if (validate_arg (arg0, REAL_TYPE))
9724 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9725 break;
9726
9727 CASE_FLT_FN (BUILT_IN_COSH):
9728 return fold_builtin_cosh (loc, arg0, type, fndecl);
9729
9730 CASE_FLT_FN (BUILT_IN_TANH):
9731 if (validate_arg (arg0, REAL_TYPE))
9732 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9733 break;
9734
9735 CASE_FLT_FN (BUILT_IN_ERF):
9736 if (validate_arg (arg0, REAL_TYPE))
9737 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9738 break;
9739
9740 CASE_FLT_FN (BUILT_IN_ERFC):
9741 if (validate_arg (arg0, REAL_TYPE))
9742 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9743 break;
9744
9745 CASE_FLT_FN (BUILT_IN_TGAMMA):
9746 if (validate_arg (arg0, REAL_TYPE))
9747 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9748 break;
9749
9750 CASE_FLT_FN (BUILT_IN_EXP):
9751 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9752
9753 CASE_FLT_FN (BUILT_IN_EXP2):
9754 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9755
9756 CASE_FLT_FN (BUILT_IN_EXP10):
9757 CASE_FLT_FN (BUILT_IN_POW10):
9758 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9759
9760 CASE_FLT_FN (BUILT_IN_EXPM1):
9761 if (validate_arg (arg0, REAL_TYPE))
9762 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9763 break;
9764
9765 CASE_FLT_FN (BUILT_IN_LOG):
9766 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9767
9768 CASE_FLT_FN (BUILT_IN_LOG2):
9769 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9770
9771 CASE_FLT_FN (BUILT_IN_LOG10):
9772 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9773
9774 CASE_FLT_FN (BUILT_IN_LOG1P):
9775 if (validate_arg (arg0, REAL_TYPE))
9776 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9777 &dconstm1, NULL, false);
9778 break;
9779
9780 CASE_FLT_FN (BUILT_IN_J0):
9781 if (validate_arg (arg0, REAL_TYPE))
9782 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9783 NULL, NULL, 0);
9784 break;
9785
9786 CASE_FLT_FN (BUILT_IN_J1):
9787 if (validate_arg (arg0, REAL_TYPE))
9788 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9789 NULL, NULL, 0);
9790 break;
9791
9792 CASE_FLT_FN (BUILT_IN_Y0):
9793 if (validate_arg (arg0, REAL_TYPE))
9794 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9795 &dconst0, NULL, false);
9796 break;
9797
9798 CASE_FLT_FN (BUILT_IN_Y1):
9799 if (validate_arg (arg0, REAL_TYPE))
9800 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9801 &dconst0, NULL, false);
9802 break;
9803
9804 CASE_FLT_FN (BUILT_IN_NAN):
9805 case BUILT_IN_NAND32:
9806 case BUILT_IN_NAND64:
9807 case BUILT_IN_NAND128:
9808 return fold_builtin_nan (arg0, type, true);
9809
9810 CASE_FLT_FN (BUILT_IN_NANS):
9811 return fold_builtin_nan (arg0, type, false);
9812
9813 CASE_FLT_FN (BUILT_IN_FLOOR):
9814 return fold_builtin_floor (loc, fndecl, arg0);
9815
9816 CASE_FLT_FN (BUILT_IN_CEIL):
9817 return fold_builtin_ceil (loc, fndecl, arg0);
9818
9819 CASE_FLT_FN (BUILT_IN_TRUNC):
9820 return fold_builtin_trunc (loc, fndecl, arg0);
9821
9822 CASE_FLT_FN (BUILT_IN_ROUND):
9823 return fold_builtin_round (loc, fndecl, arg0);
9824
9825 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9826 CASE_FLT_FN (BUILT_IN_RINT):
9827 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9828
9829 CASE_FLT_FN (BUILT_IN_ICEIL):
9830 CASE_FLT_FN (BUILT_IN_LCEIL):
9831 CASE_FLT_FN (BUILT_IN_LLCEIL):
9832 CASE_FLT_FN (BUILT_IN_LFLOOR):
9833 CASE_FLT_FN (BUILT_IN_IFLOOR):
9834 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9835 CASE_FLT_FN (BUILT_IN_IROUND):
9836 CASE_FLT_FN (BUILT_IN_LROUND):
9837 CASE_FLT_FN (BUILT_IN_LLROUND):
9838 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9839
9840 CASE_FLT_FN (BUILT_IN_IRINT):
9841 CASE_FLT_FN (BUILT_IN_LRINT):
9842 CASE_FLT_FN (BUILT_IN_LLRINT):
9843 return fold_fixed_mathfn (loc, fndecl, arg0);
9844
9845 case BUILT_IN_BSWAP32:
9846 case BUILT_IN_BSWAP64:
9847 return fold_builtin_bswap (fndecl, arg0);
9848
9849 CASE_INT_FN (BUILT_IN_FFS):
9850 CASE_INT_FN (BUILT_IN_CLZ):
9851 CASE_INT_FN (BUILT_IN_CTZ):
9852 CASE_INT_FN (BUILT_IN_CLRSB):
9853 CASE_INT_FN (BUILT_IN_POPCOUNT):
9854 CASE_INT_FN (BUILT_IN_PARITY):
9855 return fold_builtin_bitop (fndecl, arg0);
9856
9857 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9858 return fold_builtin_signbit (loc, arg0, type);
9859
9860 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9861 return fold_builtin_significand (loc, arg0, type);
9862
9863 CASE_FLT_FN (BUILT_IN_ILOGB):
9864 CASE_FLT_FN (BUILT_IN_LOGB):
9865 return fold_builtin_logb (loc, arg0, type);
9866
9867 case BUILT_IN_ISASCII:
9868 return fold_builtin_isascii (loc, arg0);
9869
9870 case BUILT_IN_TOASCII:
9871 return fold_builtin_toascii (loc, arg0);
9872
9873 case BUILT_IN_ISDIGIT:
9874 return fold_builtin_isdigit (loc, arg0);
9875
9876 CASE_FLT_FN (BUILT_IN_FINITE):
9877 case BUILT_IN_FINITED32:
9878 case BUILT_IN_FINITED64:
9879 case BUILT_IN_FINITED128:
9880 case BUILT_IN_ISFINITE:
9881 {
9882 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9883 if (ret)
9884 return ret;
9885 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9886 }
9887
9888 CASE_FLT_FN (BUILT_IN_ISINF):
9889 case BUILT_IN_ISINFD32:
9890 case BUILT_IN_ISINFD64:
9891 case BUILT_IN_ISINFD128:
9892 {
9893 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9894 if (ret)
9895 return ret;
9896 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9897 }
9898
9899 case BUILT_IN_ISNORMAL:
9900 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9901
9902 case BUILT_IN_ISINF_SIGN:
9903 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9904
9905 CASE_FLT_FN (BUILT_IN_ISNAN):
9906 case BUILT_IN_ISNAND32:
9907 case BUILT_IN_ISNAND64:
9908 case BUILT_IN_ISNAND128:
9909 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9910
9911 case BUILT_IN_PRINTF:
9912 case BUILT_IN_PRINTF_UNLOCKED:
9913 case BUILT_IN_VPRINTF:
9914 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
9915
9916 case BUILT_IN_FREE:
9917 if (integer_zerop (arg0))
9918 return build_empty_stmt (loc);
9919 break;
9920
9921 default:
9922 break;
9923 }
9924
9925 return NULL_TREE;
9926
9927 }
9928
9929 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9930 IGNORE is true if the result of the function call is ignored. This
9931 function returns NULL_TREE if no simplification was possible. */
9932
9933 static tree
9934 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
9935 {
9936 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9937 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9938
9939 switch (fcode)
9940 {
9941 CASE_FLT_FN (BUILT_IN_JN):
9942 if (validate_arg (arg0, INTEGER_TYPE)
9943 && validate_arg (arg1, REAL_TYPE))
9944 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
9945 break;
9946
9947 CASE_FLT_FN (BUILT_IN_YN):
9948 if (validate_arg (arg0, INTEGER_TYPE)
9949 && validate_arg (arg1, REAL_TYPE))
9950 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
9951 &dconst0, false);
9952 break;
9953
9954 CASE_FLT_FN (BUILT_IN_DREM):
9955 CASE_FLT_FN (BUILT_IN_REMAINDER):
9956 if (validate_arg (arg0, REAL_TYPE)
9957 && validate_arg(arg1, REAL_TYPE))
9958 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
9959 break;
9960
9961 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9962 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9963 if (validate_arg (arg0, REAL_TYPE)
9964 && validate_arg(arg1, POINTER_TYPE))
9965 return do_mpfr_lgamma_r (arg0, arg1, type);
9966 break;
9967
9968 CASE_FLT_FN (BUILT_IN_ATAN2):
9969 if (validate_arg (arg0, REAL_TYPE)
9970 && validate_arg(arg1, REAL_TYPE))
9971 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9972 break;
9973
9974 CASE_FLT_FN (BUILT_IN_FDIM):
9975 if (validate_arg (arg0, REAL_TYPE)
9976 && validate_arg(arg1, REAL_TYPE))
9977 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9978 break;
9979
9980 CASE_FLT_FN (BUILT_IN_HYPOT):
9981 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
9982
9983 CASE_FLT_FN (BUILT_IN_CPOW):
9984 if (validate_arg (arg0, COMPLEX_TYPE)
9985 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9986 && validate_arg (arg1, COMPLEX_TYPE)
9987 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
9988 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
9989 break;
9990
9991 CASE_FLT_FN (BUILT_IN_LDEXP):
9992 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
9993 CASE_FLT_FN (BUILT_IN_SCALBN):
9994 CASE_FLT_FN (BUILT_IN_SCALBLN):
9995 return fold_builtin_load_exponent (loc, arg0, arg1,
9996 type, /*ldexp=*/false);
9997
9998 CASE_FLT_FN (BUILT_IN_FREXP):
9999 return fold_builtin_frexp (loc, arg0, arg1, type);
10000
10001 CASE_FLT_FN (BUILT_IN_MODF):
10002 return fold_builtin_modf (loc, arg0, arg1, type);
10003
10004 case BUILT_IN_BZERO:
10005 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10006
10007 case BUILT_IN_FPUTS:
10008 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10009
10010 case BUILT_IN_FPUTS_UNLOCKED:
10011 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10012
10013 case BUILT_IN_STRSTR:
10014 return fold_builtin_strstr (loc, arg0, arg1, type);
10015
10016 case BUILT_IN_STRCAT:
10017 return fold_builtin_strcat (loc, arg0, arg1);
10018
10019 case BUILT_IN_STRSPN:
10020 return fold_builtin_strspn (loc, arg0, arg1);
10021
10022 case BUILT_IN_STRCSPN:
10023 return fold_builtin_strcspn (loc, arg0, arg1);
10024
10025 case BUILT_IN_STRCHR:
10026 case BUILT_IN_INDEX:
10027 return fold_builtin_strchr (loc, arg0, arg1, type);
10028
10029 case BUILT_IN_STRRCHR:
10030 case BUILT_IN_RINDEX:
10031 return fold_builtin_strrchr (loc, arg0, arg1, type);
10032
10033 case BUILT_IN_STRCPY:
10034 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10035
10036 case BUILT_IN_STPCPY:
10037 if (ignore)
10038 {
10039 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10040 if (!fn)
10041 break;
10042
10043 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10044 }
10045 else
10046 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10047 break;
10048
10049 case BUILT_IN_STRCMP:
10050 return fold_builtin_strcmp (loc, arg0, arg1);
10051
10052 case BUILT_IN_STRPBRK:
10053 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10054
10055 case BUILT_IN_EXPECT:
10056 return fold_builtin_expect (loc, arg0, arg1);
10057
10058 CASE_FLT_FN (BUILT_IN_POW):
10059 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10060
10061 CASE_FLT_FN (BUILT_IN_POWI):
10062 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10063
10064 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10065 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10066
10067 CASE_FLT_FN (BUILT_IN_FMIN):
10068 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10069
10070 CASE_FLT_FN (BUILT_IN_FMAX):
10071 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10072
10073 case BUILT_IN_ISGREATER:
10074 return fold_builtin_unordered_cmp (loc, fndecl,
10075 arg0, arg1, UNLE_EXPR, LE_EXPR);
10076 case BUILT_IN_ISGREATEREQUAL:
10077 return fold_builtin_unordered_cmp (loc, fndecl,
10078 arg0, arg1, UNLT_EXPR, LT_EXPR);
10079 case BUILT_IN_ISLESS:
10080 return fold_builtin_unordered_cmp (loc, fndecl,
10081 arg0, arg1, UNGE_EXPR, GE_EXPR);
10082 case BUILT_IN_ISLESSEQUAL:
10083 return fold_builtin_unordered_cmp (loc, fndecl,
10084 arg0, arg1, UNGT_EXPR, GT_EXPR);
10085 case BUILT_IN_ISLESSGREATER:
10086 return fold_builtin_unordered_cmp (loc, fndecl,
10087 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10088 case BUILT_IN_ISUNORDERED:
10089 return fold_builtin_unordered_cmp (loc, fndecl,
10090 arg0, arg1, UNORDERED_EXPR,
10091 NOP_EXPR);
10092
10093 /* We do the folding for va_start in the expander. */
10094 case BUILT_IN_VA_START:
10095 break;
10096
10097 case BUILT_IN_SPRINTF:
10098 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10099
10100 case BUILT_IN_OBJECT_SIZE:
10101 return fold_builtin_object_size (arg0, arg1);
10102
10103 case BUILT_IN_PRINTF:
10104 case BUILT_IN_PRINTF_UNLOCKED:
10105 case BUILT_IN_VPRINTF:
10106 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10107
10108 case BUILT_IN_PRINTF_CHK:
10109 case BUILT_IN_VPRINTF_CHK:
10110 if (!validate_arg (arg0, INTEGER_TYPE)
10111 || TREE_SIDE_EFFECTS (arg0))
10112 return NULL_TREE;
10113 else
10114 return fold_builtin_printf (loc, fndecl,
10115 arg1, NULL_TREE, ignore, fcode);
10116 break;
10117
10118 case BUILT_IN_FPRINTF:
10119 case BUILT_IN_FPRINTF_UNLOCKED:
10120 case BUILT_IN_VFPRINTF:
10121 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10122 ignore, fcode);
10123
10124 default:
10125 break;
10126 }
10127 return NULL_TREE;
10128 }
10129
10130 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10131 and ARG2. IGNORE is true if the result of the function call is ignored.
10132 This function returns NULL_TREE if no simplification was possible. */
10133
10134 static tree
10135 fold_builtin_3 (location_t loc, tree fndecl,
10136 tree arg0, tree arg1, tree arg2, bool ignore)
10137 {
10138 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10139 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10140 switch (fcode)
10141 {
10142
10143 CASE_FLT_FN (BUILT_IN_SINCOS):
10144 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10145
10146 CASE_FLT_FN (BUILT_IN_FMA):
10147 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10148 break;
10149
10150 CASE_FLT_FN (BUILT_IN_REMQUO):
10151 if (validate_arg (arg0, REAL_TYPE)
10152 && validate_arg(arg1, REAL_TYPE)
10153 && validate_arg(arg2, POINTER_TYPE))
10154 return do_mpfr_remquo (arg0, arg1, arg2);
10155 break;
10156
10157 case BUILT_IN_MEMSET:
10158 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10159
10160 case BUILT_IN_BCOPY:
10161 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10162 void_type_node, true, /*endp=*/3);
10163
10164 case BUILT_IN_MEMCPY:
10165 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10166 type, ignore, /*endp=*/0);
10167
10168 case BUILT_IN_MEMPCPY:
10169 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10170 type, ignore, /*endp=*/1);
10171
10172 case BUILT_IN_MEMMOVE:
10173 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10174 type, ignore, /*endp=*/3);
10175
10176 case BUILT_IN_STRNCAT:
10177 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10178
10179 case BUILT_IN_STRNCPY:
10180 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10181
10182 case BUILT_IN_STRNCMP:
10183 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10184
10185 case BUILT_IN_MEMCHR:
10186 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10187
10188 case BUILT_IN_BCMP:
10189 case BUILT_IN_MEMCMP:
10190 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10191
10192 case BUILT_IN_SPRINTF:
10193 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10194
10195 case BUILT_IN_SNPRINTF:
10196 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10197
10198 case BUILT_IN_STRCPY_CHK:
10199 case BUILT_IN_STPCPY_CHK:
10200 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10201 ignore, fcode);
10202
10203 case BUILT_IN_STRCAT_CHK:
10204 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10205
10206 case BUILT_IN_PRINTF_CHK:
10207 case BUILT_IN_VPRINTF_CHK:
10208 if (!validate_arg (arg0, INTEGER_TYPE)
10209 || TREE_SIDE_EFFECTS (arg0))
10210 return NULL_TREE;
10211 else
10212 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10213 break;
10214
10215 case BUILT_IN_FPRINTF:
10216 case BUILT_IN_FPRINTF_UNLOCKED:
10217 case BUILT_IN_VFPRINTF:
10218 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10219 ignore, fcode);
10220
10221 case BUILT_IN_FPRINTF_CHK:
10222 case BUILT_IN_VFPRINTF_CHK:
10223 if (!validate_arg (arg1, INTEGER_TYPE)
10224 || TREE_SIDE_EFFECTS (arg1))
10225 return NULL_TREE;
10226 else
10227 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10228 ignore, fcode);
10229
10230 default:
10231 break;
10232 }
10233 return NULL_TREE;
10234 }
10235
10236 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10237 ARG2, and ARG3. IGNORE is true if the result of the function call is
10238 ignored. This function returns NULL_TREE if no simplification was
10239 possible. */
10240
10241 static tree
10242 fold_builtin_4 (location_t loc, tree fndecl,
10243 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10244 {
10245 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10246
10247 switch (fcode)
10248 {
10249 case BUILT_IN_MEMCPY_CHK:
10250 case BUILT_IN_MEMPCPY_CHK:
10251 case BUILT_IN_MEMMOVE_CHK:
10252 case BUILT_IN_MEMSET_CHK:
10253 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10254 NULL_TREE, ignore,
10255 DECL_FUNCTION_CODE (fndecl));
10256
10257 case BUILT_IN_STRNCPY_CHK:
10258 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10259
10260 case BUILT_IN_STRNCAT_CHK:
10261 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10262
10263 case BUILT_IN_SNPRINTF:
10264 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
10265
10266 case BUILT_IN_FPRINTF_CHK:
10267 case BUILT_IN_VFPRINTF_CHK:
10268 if (!validate_arg (arg1, INTEGER_TYPE)
10269 || TREE_SIDE_EFFECTS (arg1))
10270 return NULL_TREE;
10271 else
10272 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10273 ignore, fcode);
10274 break;
10275
10276 default:
10277 break;
10278 }
10279 return NULL_TREE;
10280 }
10281
10282 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10283 arguments, where NARGS <= 4. IGNORE is true if the result of the
10284 function call is ignored. This function returns NULL_TREE if no
10285 simplification was possible. Note that this only folds builtins with
10286 fixed argument patterns. Foldings that do varargs-to-varargs
10287 transformations, or that match calls with more than 4 arguments,
10288 need to be handled with fold_builtin_varargs instead. */
10289
10290 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10291
10292 static tree
10293 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10294 {
10295 tree ret = NULL_TREE;
10296
10297 switch (nargs)
10298 {
10299 case 0:
10300 ret = fold_builtin_0 (loc, fndecl, ignore);
10301 break;
10302 case 1:
10303 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10304 break;
10305 case 2:
10306 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10307 break;
10308 case 3:
10309 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10310 break;
10311 case 4:
10312 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10313 ignore);
10314 break;
10315 default:
10316 break;
10317 }
10318 if (ret)
10319 {
10320 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10321 SET_EXPR_LOCATION (ret, loc);
10322 TREE_NO_WARNING (ret) = 1;
10323 return ret;
10324 }
10325 return NULL_TREE;
10326 }
10327
10328 /* Builtins with folding operations that operate on "..." arguments
10329 need special handling; we need to store the arguments in a convenient
10330 data structure before attempting any folding. Fortunately there are
10331 only a few builtins that fall into this category. FNDECL is the
10332 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10333 result of the function call is ignored. */
10334
10335 static tree
10336 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10337 bool ignore ATTRIBUTE_UNUSED)
10338 {
10339 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10340 tree ret = NULL_TREE;
10341
10342 switch (fcode)
10343 {
10344 case BUILT_IN_SPRINTF_CHK:
10345 case BUILT_IN_VSPRINTF_CHK:
10346 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10347 break;
10348
10349 case BUILT_IN_SNPRINTF_CHK:
10350 case BUILT_IN_VSNPRINTF_CHK:
10351 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10352 break;
10353
10354 case BUILT_IN_FPCLASSIFY:
10355 ret = fold_builtin_fpclassify (loc, exp);
10356 break;
10357
10358 default:
10359 break;
10360 }
10361 if (ret)
10362 {
10363 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10364 SET_EXPR_LOCATION (ret, loc);
10365 TREE_NO_WARNING (ret) = 1;
10366 return ret;
10367 }
10368 return NULL_TREE;
10369 }
10370
10371 /* Return true if FNDECL shouldn't be folded right now.
10372 If a built-in function has an inline attribute always_inline
10373 wrapper, defer folding it after always_inline functions have
10374 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10375 might not be performed. */
10376
10377 bool
10378 avoid_folding_inline_builtin (tree fndecl)
10379 {
10380 return (DECL_DECLARED_INLINE_P (fndecl)
10381 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10382 && cfun
10383 && !cfun->always_inline_functions_inlined
10384 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10385 }
10386
10387 /* A wrapper function for builtin folding that prevents warnings for
10388 "statement without effect" and the like, caused by removing the
10389 call node earlier than the warning is generated. */
10390
10391 tree
10392 fold_call_expr (location_t loc, tree exp, bool ignore)
10393 {
10394 tree ret = NULL_TREE;
10395 tree fndecl = get_callee_fndecl (exp);
10396 if (fndecl
10397 && TREE_CODE (fndecl) == FUNCTION_DECL
10398 && DECL_BUILT_IN (fndecl)
10399 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10400 yet. Defer folding until we see all the arguments
10401 (after inlining). */
10402 && !CALL_EXPR_VA_ARG_PACK (exp))
10403 {
10404 int nargs = call_expr_nargs (exp);
10405
10406 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10407 instead last argument is __builtin_va_arg_pack (). Defer folding
10408 even in that case, until arguments are finalized. */
10409 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10410 {
10411 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10412 if (fndecl2
10413 && TREE_CODE (fndecl2) == FUNCTION_DECL
10414 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10415 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10416 return NULL_TREE;
10417 }
10418
10419 if (avoid_folding_inline_builtin (fndecl))
10420 return NULL_TREE;
10421
10422 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10423 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10424 CALL_EXPR_ARGP (exp), ignore);
10425 else
10426 {
10427 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10428 {
10429 tree *args = CALL_EXPR_ARGP (exp);
10430 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10431 }
10432 if (!ret)
10433 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10434 if (ret)
10435 return ret;
10436 }
10437 }
10438 return NULL_TREE;
10439 }
10440
10441 /* Conveniently construct a function call expression. FNDECL names the
10442 function to be called and N arguments are passed in the array
10443 ARGARRAY. */
10444
10445 tree
10446 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10447 {
10448 tree fntype = TREE_TYPE (fndecl);
10449 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10450
10451 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10452 }
10453
10454 /* Conveniently construct a function call expression. FNDECL names the
10455 function to be called and the arguments are passed in the vector
10456 VEC. */
10457
10458 tree
10459 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
10460 {
10461 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
10462 VEC_address (tree, vec));
10463 }
10464
10465
10466 /* Conveniently construct a function call expression. FNDECL names the
10467 function to be called, N is the number of arguments, and the "..."
10468 parameters are the argument expressions. */
10469
10470 tree
10471 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10472 {
10473 va_list ap;
10474 tree *argarray = XALLOCAVEC (tree, n);
10475 int i;
10476
10477 va_start (ap, n);
10478 for (i = 0; i < n; i++)
10479 argarray[i] = va_arg (ap, tree);
10480 va_end (ap);
10481 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10482 }
10483
10484 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10485 varargs macros aren't supported by all bootstrap compilers. */
10486
10487 tree
10488 build_call_expr (tree fndecl, int n, ...)
10489 {
10490 va_list ap;
10491 tree *argarray = XALLOCAVEC (tree, n);
10492 int i;
10493
10494 va_start (ap, n);
10495 for (i = 0; i < n; i++)
10496 argarray[i] = va_arg (ap, tree);
10497 va_end (ap);
10498 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10499 }
10500
10501 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10502 N arguments are passed in the array ARGARRAY. */
10503
10504 tree
10505 fold_builtin_call_array (location_t loc, tree type,
10506 tree fn,
10507 int n,
10508 tree *argarray)
10509 {
10510 tree ret = NULL_TREE;
10511 tree exp;
10512
10513 if (TREE_CODE (fn) == ADDR_EXPR)
10514 {
10515 tree fndecl = TREE_OPERAND (fn, 0);
10516 if (TREE_CODE (fndecl) == FUNCTION_DECL
10517 && DECL_BUILT_IN (fndecl))
10518 {
10519 /* If last argument is __builtin_va_arg_pack (), arguments to this
10520 function are not finalized yet. Defer folding until they are. */
10521 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10522 {
10523 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10524 if (fndecl2
10525 && TREE_CODE (fndecl2) == FUNCTION_DECL
10526 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10527 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10528 return build_call_array_loc (loc, type, fn, n, argarray);
10529 }
10530 if (avoid_folding_inline_builtin (fndecl))
10531 return build_call_array_loc (loc, type, fn, n, argarray);
10532 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10533 {
10534 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10535 if (ret)
10536 return ret;
10537
10538 return build_call_array_loc (loc, type, fn, n, argarray);
10539 }
10540 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10541 {
10542 /* First try the transformations that don't require consing up
10543 an exp. */
10544 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10545 if (ret)
10546 return ret;
10547 }
10548
10549 /* If we got this far, we need to build an exp. */
10550 exp = build_call_array_loc (loc, type, fn, n, argarray);
10551 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10552 return ret ? ret : exp;
10553 }
10554 }
10555
10556 return build_call_array_loc (loc, type, fn, n, argarray);
10557 }
10558
10559 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10560 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10561 of arguments in ARGS to be omitted. OLDNARGS is the number of
10562 elements in ARGS. */
10563
10564 static tree
10565 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10566 int skip, tree fndecl, int n, va_list newargs)
10567 {
10568 int nargs = oldnargs - skip + n;
10569 tree *buffer;
10570
10571 if (n > 0)
10572 {
10573 int i, j;
10574
10575 buffer = XALLOCAVEC (tree, nargs);
10576 for (i = 0; i < n; i++)
10577 buffer[i] = va_arg (newargs, tree);
10578 for (j = skip; j < oldnargs; j++, i++)
10579 buffer[i] = args[j];
10580 }
10581 else
10582 buffer = args + skip;
10583
10584 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10585 }
10586
10587 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10588 list ARGS along with N new arguments specified as the "..."
10589 parameters. SKIP is the number of arguments in ARGS to be omitted.
10590 OLDNARGS is the number of elements in ARGS. */
10591
10592 static tree
10593 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
10594 int skip, tree fndecl, int n, ...)
10595 {
10596 va_list ap;
10597 tree t;
10598
10599 va_start (ap, n);
10600 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
10601 va_end (ap);
10602
10603 return t;
10604 }
10605
10606 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10607 along with N new arguments specified as the "..." parameters. SKIP
10608 is the number of arguments in EXP to be omitted. This function is used
10609 to do varargs-to-varargs transformations. */
10610
10611 static tree
10612 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10613 {
10614 va_list ap;
10615 tree t;
10616
10617 va_start (ap, n);
10618 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10619 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10620 va_end (ap);
10621
10622 return t;
10623 }
10624
10625 /* Validate a single argument ARG against a tree code CODE representing
10626 a type. */
10627
10628 static bool
10629 validate_arg (const_tree arg, enum tree_code code)
10630 {
10631 if (!arg)
10632 return false;
10633 else if (code == POINTER_TYPE)
10634 return POINTER_TYPE_P (TREE_TYPE (arg));
10635 else if (code == INTEGER_TYPE)
10636 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10637 return code == TREE_CODE (TREE_TYPE (arg));
10638 }
10639
10640 /* This function validates the types of a function call argument list
10641 against a specified list of tree_codes. If the last specifier is a 0,
10642 that represents an ellipses, otherwise the last specifier must be a
10643 VOID_TYPE.
10644
10645 This is the GIMPLE version of validate_arglist. Eventually we want to
10646 completely convert builtins.c to work from GIMPLEs and the tree based
10647 validate_arglist will then be removed. */
10648
10649 bool
10650 validate_gimple_arglist (const_gimple call, ...)
10651 {
10652 enum tree_code code;
10653 bool res = 0;
10654 va_list ap;
10655 const_tree arg;
10656 size_t i;
10657
10658 va_start (ap, call);
10659 i = 0;
10660
10661 do
10662 {
10663 code = (enum tree_code) va_arg (ap, int);
10664 switch (code)
10665 {
10666 case 0:
10667 /* This signifies an ellipses, any further arguments are all ok. */
10668 res = true;
10669 goto end;
10670 case VOID_TYPE:
10671 /* This signifies an endlink, if no arguments remain, return
10672 true, otherwise return false. */
10673 res = (i == gimple_call_num_args (call));
10674 goto end;
10675 default:
10676 /* If no parameters remain or the parameter's code does not
10677 match the specified code, return false. Otherwise continue
10678 checking any remaining arguments. */
10679 arg = gimple_call_arg (call, i++);
10680 if (!validate_arg (arg, code))
10681 goto end;
10682 break;
10683 }
10684 }
10685 while (1);
10686
10687 /* We need gotos here since we can only have one VA_CLOSE in a
10688 function. */
10689 end: ;
10690 va_end (ap);
10691
10692 return res;
10693 }
10694
10695 /* This function validates the types of a function call argument list
10696 against a specified list of tree_codes. If the last specifier is a 0,
10697 that represents an ellipses, otherwise the last specifier must be a
10698 VOID_TYPE. */
10699
10700 bool
10701 validate_arglist (const_tree callexpr, ...)
10702 {
10703 enum tree_code code;
10704 bool res = 0;
10705 va_list ap;
10706 const_call_expr_arg_iterator iter;
10707 const_tree arg;
10708
10709 va_start (ap, callexpr);
10710 init_const_call_expr_arg_iterator (callexpr, &iter);
10711
10712 do
10713 {
10714 code = (enum tree_code) va_arg (ap, int);
10715 switch (code)
10716 {
10717 case 0:
10718 /* This signifies an ellipses, any further arguments are all ok. */
10719 res = true;
10720 goto end;
10721 case VOID_TYPE:
10722 /* This signifies an endlink, if no arguments remain, return
10723 true, otherwise return false. */
10724 res = !more_const_call_expr_args_p (&iter);
10725 goto end;
10726 default:
10727 /* If no parameters remain or the parameter's code does not
10728 match the specified code, return false. Otherwise continue
10729 checking any remaining arguments. */
10730 arg = next_const_call_expr_arg (&iter);
10731 if (!validate_arg (arg, code))
10732 goto end;
10733 break;
10734 }
10735 }
10736 while (1);
10737
10738 /* We need gotos here since we can only have one VA_CLOSE in a
10739 function. */
10740 end: ;
10741 va_end (ap);
10742
10743 return res;
10744 }
10745
10746 /* Default target-specific builtin expander that does nothing. */
10747
10748 rtx
10749 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10750 rtx target ATTRIBUTE_UNUSED,
10751 rtx subtarget ATTRIBUTE_UNUSED,
10752 enum machine_mode mode ATTRIBUTE_UNUSED,
10753 int ignore ATTRIBUTE_UNUSED)
10754 {
10755 return NULL_RTX;
10756 }
10757
10758 /* Returns true is EXP represents data that would potentially reside
10759 in a readonly section. */
10760
10761 static bool
10762 readonly_data_expr (tree exp)
10763 {
10764 STRIP_NOPS (exp);
10765
10766 if (TREE_CODE (exp) != ADDR_EXPR)
10767 return false;
10768
10769 exp = get_base_address (TREE_OPERAND (exp, 0));
10770 if (!exp)
10771 return false;
10772
10773 /* Make sure we call decl_readonly_section only for trees it
10774 can handle (since it returns true for everything it doesn't
10775 understand). */
10776 if (TREE_CODE (exp) == STRING_CST
10777 || TREE_CODE (exp) == CONSTRUCTOR
10778 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10779 return decl_readonly_section (exp, 0);
10780 else
10781 return false;
10782 }
10783
10784 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10785 to the call, and TYPE is its return type.
10786
10787 Return NULL_TREE if no simplification was possible, otherwise return the
10788 simplified form of the call as a tree.
10789
10790 The simplified form may be a constant or other expression which
10791 computes the same value, but in a more efficient manner (including
10792 calls to other builtin functions).
10793
10794 The call may contain arguments which need to be evaluated, but
10795 which are not useful to determine the result of the call. In
10796 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10797 COMPOUND_EXPR will be an argument which must be evaluated.
10798 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10799 COMPOUND_EXPR in the chain will contain the tree for the simplified
10800 form of the builtin function call. */
10801
10802 static tree
10803 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10804 {
10805 if (!validate_arg (s1, POINTER_TYPE)
10806 || !validate_arg (s2, POINTER_TYPE))
10807 return NULL_TREE;
10808 else
10809 {
10810 tree fn;
10811 const char *p1, *p2;
10812
10813 p2 = c_getstr (s2);
10814 if (p2 == NULL)
10815 return NULL_TREE;
10816
10817 p1 = c_getstr (s1);
10818 if (p1 != NULL)
10819 {
10820 const char *r = strstr (p1, p2);
10821 tree tem;
10822
10823 if (r == NULL)
10824 return build_int_cst (TREE_TYPE (s1), 0);
10825
10826 /* Return an offset into the constant string argument. */
10827 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10828 return fold_convert_loc (loc, type, tem);
10829 }
10830
10831 /* The argument is const char *, and the result is char *, so we need
10832 a type conversion here to avoid a warning. */
10833 if (p2[0] == '\0')
10834 return fold_convert_loc (loc, type, s1);
10835
10836 if (p2[1] != '\0')
10837 return NULL_TREE;
10838
10839 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10840 if (!fn)
10841 return NULL_TREE;
10842
10843 /* New argument list transforming strstr(s1, s2) to
10844 strchr(s1, s2[0]). */
10845 return build_call_expr_loc (loc, fn, 2, s1,
10846 build_int_cst (integer_type_node, p2[0]));
10847 }
10848 }
10849
10850 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10851 the call, and TYPE is its return type.
10852
10853 Return NULL_TREE if no simplification was possible, otherwise return the
10854 simplified form of the call as a tree.
10855
10856 The simplified form may be a constant or other expression which
10857 computes the same value, but in a more efficient manner (including
10858 calls to other builtin functions).
10859
10860 The call may contain arguments which need to be evaluated, but
10861 which are not useful to determine the result of the call. In
10862 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10863 COMPOUND_EXPR will be an argument which must be evaluated.
10864 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10865 COMPOUND_EXPR in the chain will contain the tree for the simplified
10866 form of the builtin function call. */
10867
10868 static tree
10869 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10870 {
10871 if (!validate_arg (s1, POINTER_TYPE)
10872 || !validate_arg (s2, INTEGER_TYPE))
10873 return NULL_TREE;
10874 else
10875 {
10876 const char *p1;
10877
10878 if (TREE_CODE (s2) != INTEGER_CST)
10879 return NULL_TREE;
10880
10881 p1 = c_getstr (s1);
10882 if (p1 != NULL)
10883 {
10884 char c;
10885 const char *r;
10886 tree tem;
10887
10888 if (target_char_cast (s2, &c))
10889 return NULL_TREE;
10890
10891 r = strchr (p1, c);
10892
10893 if (r == NULL)
10894 return build_int_cst (TREE_TYPE (s1), 0);
10895
10896 /* Return an offset into the constant string argument. */
10897 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10898 return fold_convert_loc (loc, type, tem);
10899 }
10900 return NULL_TREE;
10901 }
10902 }
10903
10904 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10905 the call, and TYPE is its return type.
10906
10907 Return NULL_TREE if no simplification was possible, otherwise return the
10908 simplified form of the call as a tree.
10909
10910 The simplified form may be a constant or other expression which
10911 computes the same value, but in a more efficient manner (including
10912 calls to other builtin functions).
10913
10914 The call may contain arguments which need to be evaluated, but
10915 which are not useful to determine the result of the call. In
10916 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10917 COMPOUND_EXPR will be an argument which must be evaluated.
10918 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10919 COMPOUND_EXPR in the chain will contain the tree for the simplified
10920 form of the builtin function call. */
10921
10922 static tree
10923 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10924 {
10925 if (!validate_arg (s1, POINTER_TYPE)
10926 || !validate_arg (s2, INTEGER_TYPE))
10927 return NULL_TREE;
10928 else
10929 {
10930 tree fn;
10931 const char *p1;
10932
10933 if (TREE_CODE (s2) != INTEGER_CST)
10934 return NULL_TREE;
10935
10936 p1 = c_getstr (s1);
10937 if (p1 != NULL)
10938 {
10939 char c;
10940 const char *r;
10941 tree tem;
10942
10943 if (target_char_cast (s2, &c))
10944 return NULL_TREE;
10945
10946 r = strrchr (p1, c);
10947
10948 if (r == NULL)
10949 return build_int_cst (TREE_TYPE (s1), 0);
10950
10951 /* Return an offset into the constant string argument. */
10952 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10953 return fold_convert_loc (loc, type, tem);
10954 }
10955
10956 if (! integer_zerop (s2))
10957 return NULL_TREE;
10958
10959 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10960 if (!fn)
10961 return NULL_TREE;
10962
10963 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10964 return build_call_expr_loc (loc, fn, 2, s1, s2);
10965 }
10966 }
10967
10968 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10969 to the call, and TYPE is its return type.
10970
10971 Return NULL_TREE if no simplification was possible, otherwise return the
10972 simplified form of the call as a tree.
10973
10974 The simplified form may be a constant or other expression which
10975 computes the same value, but in a more efficient manner (including
10976 calls to other builtin functions).
10977
10978 The call may contain arguments which need to be evaluated, but
10979 which are not useful to determine the result of the call. In
10980 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10981 COMPOUND_EXPR will be an argument which must be evaluated.
10982 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10983 COMPOUND_EXPR in the chain will contain the tree for the simplified
10984 form of the builtin function call. */
10985
10986 static tree
10987 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10988 {
10989 if (!validate_arg (s1, POINTER_TYPE)
10990 || !validate_arg (s2, POINTER_TYPE))
10991 return NULL_TREE;
10992 else
10993 {
10994 tree fn;
10995 const char *p1, *p2;
10996
10997 p2 = c_getstr (s2);
10998 if (p2 == NULL)
10999 return NULL_TREE;
11000
11001 p1 = c_getstr (s1);
11002 if (p1 != NULL)
11003 {
11004 const char *r = strpbrk (p1, p2);
11005 tree tem;
11006
11007 if (r == NULL)
11008 return build_int_cst (TREE_TYPE (s1), 0);
11009
11010 /* Return an offset into the constant string argument. */
11011 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11012 return fold_convert_loc (loc, type, tem);
11013 }
11014
11015 if (p2[0] == '\0')
11016 /* strpbrk(x, "") == NULL.
11017 Evaluate and ignore s1 in case it had side-effects. */
11018 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11019
11020 if (p2[1] != '\0')
11021 return NULL_TREE; /* Really call strpbrk. */
11022
11023 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11024 if (!fn)
11025 return NULL_TREE;
11026
11027 /* New argument list transforming strpbrk(s1, s2) to
11028 strchr(s1, s2[0]). */
11029 return build_call_expr_loc (loc, fn, 2, s1,
11030 build_int_cst (integer_type_node, p2[0]));
11031 }
11032 }
11033
11034 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11035 to the call.
11036
11037 Return NULL_TREE if no simplification was possible, otherwise return the
11038 simplified form of the call as a tree.
11039
11040 The simplified form may be a constant or other expression which
11041 computes the same value, but in a more efficient manner (including
11042 calls to other builtin functions).
11043
11044 The call may contain arguments which need to be evaluated, but
11045 which are not useful to determine the result of the call. In
11046 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11047 COMPOUND_EXPR will be an argument which must be evaluated.
11048 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11049 COMPOUND_EXPR in the chain will contain the tree for the simplified
11050 form of the builtin function call. */
11051
11052 static tree
11053 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11054 {
11055 if (!validate_arg (dst, POINTER_TYPE)
11056 || !validate_arg (src, POINTER_TYPE))
11057 return NULL_TREE;
11058 else
11059 {
11060 const char *p = c_getstr (src);
11061
11062 /* If the string length is zero, return the dst parameter. */
11063 if (p && *p == '\0')
11064 return dst;
11065
11066 if (optimize_insn_for_speed_p ())
11067 {
11068 /* See if we can store by pieces into (dst + strlen(dst)). */
11069 tree newdst, call;
11070 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11071 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11072
11073 if (!strlen_fn || !strcpy_fn)
11074 return NULL_TREE;
11075
11076 /* If we don't have a movstr we don't want to emit an strcpy
11077 call. We have to do that if the length of the source string
11078 isn't computable (in that case we can use memcpy probably
11079 later expanding to a sequence of mov instructions). If we
11080 have movstr instructions we can emit strcpy calls. */
11081 if (!HAVE_movstr)
11082 {
11083 tree len = c_strlen (src, 1);
11084 if (! len || TREE_SIDE_EFFECTS (len))
11085 return NULL_TREE;
11086 }
11087
11088 /* Stabilize the argument list. */
11089 dst = builtin_save_expr (dst);
11090
11091 /* Create strlen (dst). */
11092 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11093 /* Create (dst p+ strlen (dst)). */
11094
11095 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11096 newdst = builtin_save_expr (newdst);
11097
11098 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11099 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11100 }
11101 return NULL_TREE;
11102 }
11103 }
11104
11105 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11106 arguments to the call.
11107
11108 Return NULL_TREE if no simplification was possible, otherwise return the
11109 simplified form of the call as a tree.
11110
11111 The simplified form may be a constant or other expression which
11112 computes the same value, but in a more efficient manner (including
11113 calls to other builtin functions).
11114
11115 The call may contain arguments which need to be evaluated, but
11116 which are not useful to determine the result of the call. In
11117 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11118 COMPOUND_EXPR will be an argument which must be evaluated.
11119 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11120 COMPOUND_EXPR in the chain will contain the tree for the simplified
11121 form of the builtin function call. */
11122
11123 static tree
11124 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11125 {
11126 if (!validate_arg (dst, POINTER_TYPE)
11127 || !validate_arg (src, POINTER_TYPE)
11128 || !validate_arg (len, INTEGER_TYPE))
11129 return NULL_TREE;
11130 else
11131 {
11132 const char *p = c_getstr (src);
11133
11134 /* If the requested length is zero, or the src parameter string
11135 length is zero, return the dst parameter. */
11136 if (integer_zerop (len) || (p && *p == '\0'))
11137 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11138
11139 /* If the requested len is greater than or equal to the string
11140 length, call strcat. */
11141 if (TREE_CODE (len) == INTEGER_CST && p
11142 && compare_tree_int (len, strlen (p)) >= 0)
11143 {
11144 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11145
11146 /* If the replacement _DECL isn't initialized, don't do the
11147 transformation. */
11148 if (!fn)
11149 return NULL_TREE;
11150
11151 return build_call_expr_loc (loc, fn, 2, dst, src);
11152 }
11153 return NULL_TREE;
11154 }
11155 }
11156
11157 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11158 to the call.
11159
11160 Return NULL_TREE if no simplification was possible, otherwise return the
11161 simplified form of the call as a tree.
11162
11163 The simplified form may be a constant or other expression which
11164 computes the same value, but in a more efficient manner (including
11165 calls to other builtin functions).
11166
11167 The call may contain arguments which need to be evaluated, but
11168 which are not useful to determine the result of the call. In
11169 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11170 COMPOUND_EXPR will be an argument which must be evaluated.
11171 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11172 COMPOUND_EXPR in the chain will contain the tree for the simplified
11173 form of the builtin function call. */
11174
11175 static tree
11176 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11177 {
11178 if (!validate_arg (s1, POINTER_TYPE)
11179 || !validate_arg (s2, POINTER_TYPE))
11180 return NULL_TREE;
11181 else
11182 {
11183 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11184
11185 /* If both arguments are constants, evaluate at compile-time. */
11186 if (p1 && p2)
11187 {
11188 const size_t r = strspn (p1, p2);
11189 return size_int (r);
11190 }
11191
11192 /* If either argument is "", return NULL_TREE. */
11193 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11194 /* Evaluate and ignore both arguments in case either one has
11195 side-effects. */
11196 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11197 s1, s2);
11198 return NULL_TREE;
11199 }
11200 }
11201
11202 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11203 to the call.
11204
11205 Return NULL_TREE if no simplification was possible, otherwise return the
11206 simplified form of the call as a tree.
11207
11208 The simplified form may be a constant or other expression which
11209 computes the same value, but in a more efficient manner (including
11210 calls to other builtin functions).
11211
11212 The call may contain arguments which need to be evaluated, but
11213 which are not useful to determine the result of the call. In
11214 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11215 COMPOUND_EXPR will be an argument which must be evaluated.
11216 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11217 COMPOUND_EXPR in the chain will contain the tree for the simplified
11218 form of the builtin function call. */
11219
11220 static tree
11221 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11222 {
11223 if (!validate_arg (s1, POINTER_TYPE)
11224 || !validate_arg (s2, POINTER_TYPE))
11225 return NULL_TREE;
11226 else
11227 {
11228 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11229
11230 /* If both arguments are constants, evaluate at compile-time. */
11231 if (p1 && p2)
11232 {
11233 const size_t r = strcspn (p1, p2);
11234 return size_int (r);
11235 }
11236
11237 /* If the first argument is "", return NULL_TREE. */
11238 if (p1 && *p1 == '\0')
11239 {
11240 /* Evaluate and ignore argument s2 in case it has
11241 side-effects. */
11242 return omit_one_operand_loc (loc, size_type_node,
11243 size_zero_node, s2);
11244 }
11245
11246 /* If the second argument is "", return __builtin_strlen(s1). */
11247 if (p2 && *p2 == '\0')
11248 {
11249 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11250
11251 /* If the replacement _DECL isn't initialized, don't do the
11252 transformation. */
11253 if (!fn)
11254 return NULL_TREE;
11255
11256 return build_call_expr_loc (loc, fn, 1, s1);
11257 }
11258 return NULL_TREE;
11259 }
11260 }
11261
11262 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11263 to the call. IGNORE is true if the value returned
11264 by the builtin will be ignored. UNLOCKED is true is true if this
11265 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11266 the known length of the string. Return NULL_TREE if no simplification
11267 was possible. */
11268
11269 tree
11270 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11271 bool ignore, bool unlocked, tree len)
11272 {
11273 /* If we're using an unlocked function, assume the other unlocked
11274 functions exist explicitly. */
11275 tree const fn_fputc = (unlocked
11276 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
11277 : builtin_decl_implicit (BUILT_IN_FPUTC));
11278 tree const fn_fwrite = (unlocked
11279 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
11280 : builtin_decl_implicit (BUILT_IN_FWRITE));
11281
11282 /* If the return value is used, don't do the transformation. */
11283 if (!ignore)
11284 return NULL_TREE;
11285
11286 /* Verify the arguments in the original call. */
11287 if (!validate_arg (arg0, POINTER_TYPE)
11288 || !validate_arg (arg1, POINTER_TYPE))
11289 return NULL_TREE;
11290
11291 if (! len)
11292 len = c_strlen (arg0, 0);
11293
11294 /* Get the length of the string passed to fputs. If the length
11295 can't be determined, punt. */
11296 if (!len
11297 || TREE_CODE (len) != INTEGER_CST)
11298 return NULL_TREE;
11299
11300 switch (compare_tree_int (len, 1))
11301 {
11302 case -1: /* length is 0, delete the call entirely . */
11303 return omit_one_operand_loc (loc, integer_type_node,
11304 integer_zero_node, arg1);;
11305
11306 case 0: /* length is 1, call fputc. */
11307 {
11308 const char *p = c_getstr (arg0);
11309
11310 if (p != NULL)
11311 {
11312 if (fn_fputc)
11313 return build_call_expr_loc (loc, fn_fputc, 2,
11314 build_int_cst
11315 (integer_type_node, p[0]), arg1);
11316 else
11317 return NULL_TREE;
11318 }
11319 }
11320 /* FALLTHROUGH */
11321 case 1: /* length is greater than 1, call fwrite. */
11322 {
11323 /* If optimizing for size keep fputs. */
11324 if (optimize_function_for_size_p (cfun))
11325 return NULL_TREE;
11326 /* New argument list transforming fputs(string, stream) to
11327 fwrite(string, 1, len, stream). */
11328 if (fn_fwrite)
11329 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11330 size_one_node, len, arg1);
11331 else
11332 return NULL_TREE;
11333 }
11334 default:
11335 gcc_unreachable ();
11336 }
11337 return NULL_TREE;
11338 }
11339
11340 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11341 produced. False otherwise. This is done so that we don't output the error
11342 or warning twice or three times. */
11343
11344 bool
11345 fold_builtin_next_arg (tree exp, bool va_start_p)
11346 {
11347 tree fntype = TREE_TYPE (current_function_decl);
11348 int nargs = call_expr_nargs (exp);
11349 tree arg;
11350
11351 if (!stdarg_p (fntype))
11352 {
11353 error ("%<va_start%> used in function with fixed args");
11354 return true;
11355 }
11356
11357 if (va_start_p)
11358 {
11359 if (va_start_p && (nargs != 2))
11360 {
11361 error ("wrong number of arguments to function %<va_start%>");
11362 return true;
11363 }
11364 arg = CALL_EXPR_ARG (exp, 1);
11365 }
11366 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11367 when we checked the arguments and if needed issued a warning. */
11368 else
11369 {
11370 if (nargs == 0)
11371 {
11372 /* Evidently an out of date version of <stdarg.h>; can't validate
11373 va_start's second argument, but can still work as intended. */
11374 warning (0, "%<__builtin_next_arg%> called without an argument");
11375 return true;
11376 }
11377 else if (nargs > 1)
11378 {
11379 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11380 return true;
11381 }
11382 arg = CALL_EXPR_ARG (exp, 0);
11383 }
11384
11385 if (TREE_CODE (arg) == SSA_NAME)
11386 arg = SSA_NAME_VAR (arg);
11387
11388 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11389 or __builtin_next_arg (0) the first time we see it, after checking
11390 the arguments and if needed issuing a warning. */
11391 if (!integer_zerop (arg))
11392 {
11393 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11394
11395 /* Strip off all nops for the sake of the comparison. This
11396 is not quite the same as STRIP_NOPS. It does more.
11397 We must also strip off INDIRECT_EXPR for C++ reference
11398 parameters. */
11399 while (CONVERT_EXPR_P (arg)
11400 || TREE_CODE (arg) == INDIRECT_REF)
11401 arg = TREE_OPERAND (arg, 0);
11402 if (arg != last_parm)
11403 {
11404 /* FIXME: Sometimes with the tree optimizers we can get the
11405 not the last argument even though the user used the last
11406 argument. We just warn and set the arg to be the last
11407 argument so that we will get wrong-code because of
11408 it. */
11409 warning (0, "second parameter of %<va_start%> not last named argument");
11410 }
11411
11412 /* Undefined by C99 7.15.1.4p4 (va_start):
11413 "If the parameter parmN is declared with the register storage
11414 class, with a function or array type, or with a type that is
11415 not compatible with the type that results after application of
11416 the default argument promotions, the behavior is undefined."
11417 */
11418 else if (DECL_REGISTER (arg))
11419 warning (0, "undefined behaviour when second parameter of "
11420 "%<va_start%> is declared with %<register%> storage");
11421
11422 /* We want to verify the second parameter just once before the tree
11423 optimizers are run and then avoid keeping it in the tree,
11424 as otherwise we could warn even for correct code like:
11425 void foo (int i, ...)
11426 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11427 if (va_start_p)
11428 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11429 else
11430 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11431 }
11432 return false;
11433 }
11434
11435
11436 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11437 ORIG may be null if this is a 2-argument call. We don't attempt to
11438 simplify calls with more than 3 arguments.
11439
11440 Return NULL_TREE if no simplification was possible, otherwise return the
11441 simplified form of the call as a tree. If IGNORED is true, it means that
11442 the caller does not use the returned value of the function. */
11443
11444 static tree
11445 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11446 tree orig, int ignored)
11447 {
11448 tree call, retval;
11449 const char *fmt_str = NULL;
11450
11451 /* Verify the required arguments in the original call. We deal with two
11452 types of sprintf() calls: 'sprintf (str, fmt)' and
11453 'sprintf (dest, "%s", orig)'. */
11454 if (!validate_arg (dest, POINTER_TYPE)
11455 || !validate_arg (fmt, POINTER_TYPE))
11456 return NULL_TREE;
11457 if (orig && !validate_arg (orig, POINTER_TYPE))
11458 return NULL_TREE;
11459
11460 /* Check whether the format is a literal string constant. */
11461 fmt_str = c_getstr (fmt);
11462 if (fmt_str == NULL)
11463 return NULL_TREE;
11464
11465 call = NULL_TREE;
11466 retval = NULL_TREE;
11467
11468 if (!init_target_chars ())
11469 return NULL_TREE;
11470
11471 /* If the format doesn't contain % args or %%, use strcpy. */
11472 if (strchr (fmt_str, target_percent) == NULL)
11473 {
11474 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11475
11476 if (!fn)
11477 return NULL_TREE;
11478
11479 /* Don't optimize sprintf (buf, "abc", ptr++). */
11480 if (orig)
11481 return NULL_TREE;
11482
11483 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11484 'format' is known to contain no % formats. */
11485 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11486 if (!ignored)
11487 retval = build_int_cst (integer_type_node, strlen (fmt_str));
11488 }
11489
11490 /* If the format is "%s", use strcpy if the result isn't used. */
11491 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11492 {
11493 tree fn;
11494 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11495
11496 if (!fn)
11497 return NULL_TREE;
11498
11499 /* Don't crash on sprintf (str1, "%s"). */
11500 if (!orig)
11501 return NULL_TREE;
11502
11503 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11504 if (!ignored)
11505 {
11506 retval = c_strlen (orig, 1);
11507 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11508 return NULL_TREE;
11509 }
11510 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11511 }
11512
11513 if (call && retval)
11514 {
11515 retval = fold_convert_loc
11516 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
11517 retval);
11518 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11519 }
11520 else
11521 return call;
11522 }
11523
11524 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
11525 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
11526 attempt to simplify calls with more than 4 arguments.
11527
11528 Return NULL_TREE if no simplification was possible, otherwise return the
11529 simplified form of the call as a tree. If IGNORED is true, it means that
11530 the caller does not use the returned value of the function. */
11531
11532 static tree
11533 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
11534 tree orig, int ignored)
11535 {
11536 tree call, retval;
11537 const char *fmt_str = NULL;
11538 unsigned HOST_WIDE_INT destlen;
11539
11540 /* Verify the required arguments in the original call. We deal with two
11541 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
11542 'snprintf (dest, cst, "%s", orig)'. */
11543 if (!validate_arg (dest, POINTER_TYPE)
11544 || !validate_arg (destsize, INTEGER_TYPE)
11545 || !validate_arg (fmt, POINTER_TYPE))
11546 return NULL_TREE;
11547 if (orig && !validate_arg (orig, POINTER_TYPE))
11548 return NULL_TREE;
11549
11550 if (!host_integerp (destsize, 1))
11551 return NULL_TREE;
11552
11553 /* Check whether the format is a literal string constant. */
11554 fmt_str = c_getstr (fmt);
11555 if (fmt_str == NULL)
11556 return NULL_TREE;
11557
11558 call = NULL_TREE;
11559 retval = NULL_TREE;
11560
11561 if (!init_target_chars ())
11562 return NULL_TREE;
11563
11564 destlen = tree_low_cst (destsize, 1);
11565
11566 /* If the format doesn't contain % args or %%, use strcpy. */
11567 if (strchr (fmt_str, target_percent) == NULL)
11568 {
11569 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11570 size_t len = strlen (fmt_str);
11571
11572 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
11573 if (orig)
11574 return NULL_TREE;
11575
11576 /* We could expand this as
11577 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
11578 or to
11579 memcpy (str, fmt_with_nul_at_cstm1, cst);
11580 but in the former case that might increase code size
11581 and in the latter case grow .rodata section too much.
11582 So punt for now. */
11583 if (len >= destlen)
11584 return NULL_TREE;
11585
11586 if (!fn)
11587 return NULL_TREE;
11588
11589 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
11590 'format' is known to contain no % formats and
11591 strlen (fmt) < cst. */
11592 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11593
11594 if (!ignored)
11595 retval = build_int_cst (integer_type_node, strlen (fmt_str));
11596 }
11597
11598 /* If the format is "%s", use strcpy if the result isn't used. */
11599 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11600 {
11601 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11602 unsigned HOST_WIDE_INT origlen;
11603
11604 /* Don't crash on snprintf (str1, cst, "%s"). */
11605 if (!orig)
11606 return NULL_TREE;
11607
11608 retval = c_strlen (orig, 1);
11609 if (!retval || !host_integerp (retval, 1))
11610 return NULL_TREE;
11611
11612 origlen = tree_low_cst (retval, 1);
11613 /* We could expand this as
11614 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
11615 or to
11616 memcpy (str1, str2_with_nul_at_cstm1, cst);
11617 but in the former case that might increase code size
11618 and in the latter case grow .rodata section too much.
11619 So punt for now. */
11620 if (origlen >= destlen)
11621 return NULL_TREE;
11622
11623 /* Convert snprintf (str1, cst, "%s", str2) into
11624 strcpy (str1, str2) if strlen (str2) < cst. */
11625 if (!fn)
11626 return NULL_TREE;
11627
11628 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11629
11630 if (ignored)
11631 retval = NULL_TREE;
11632 }
11633
11634 if (call && retval)
11635 {
11636 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
11637 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
11638 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11639 }
11640 else
11641 return call;
11642 }
11643
11644 /* Expand a call EXP to __builtin_object_size. */
11645
11646 rtx
11647 expand_builtin_object_size (tree exp)
11648 {
11649 tree ost;
11650 int object_size_type;
11651 tree fndecl = get_callee_fndecl (exp);
11652
11653 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11654 {
11655 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11656 exp, fndecl);
11657 expand_builtin_trap ();
11658 return const0_rtx;
11659 }
11660
11661 ost = CALL_EXPR_ARG (exp, 1);
11662 STRIP_NOPS (ost);
11663
11664 if (TREE_CODE (ost) != INTEGER_CST
11665 || tree_int_cst_sgn (ost) < 0
11666 || compare_tree_int (ost, 3) > 0)
11667 {
11668 error ("%Klast argument of %D is not integer constant between 0 and 3",
11669 exp, fndecl);
11670 expand_builtin_trap ();
11671 return const0_rtx;
11672 }
11673
11674 object_size_type = tree_low_cst (ost, 0);
11675
11676 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11677 }
11678
11679 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11680 FCODE is the BUILT_IN_* to use.
11681 Return NULL_RTX if we failed; the caller should emit a normal call,
11682 otherwise try to get the result in TARGET, if convenient (and in
11683 mode MODE if that's convenient). */
11684
11685 static rtx
11686 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11687 enum built_in_function fcode)
11688 {
11689 tree dest, src, len, size;
11690
11691 if (!validate_arglist (exp,
11692 POINTER_TYPE,
11693 fcode == BUILT_IN_MEMSET_CHK
11694 ? INTEGER_TYPE : POINTER_TYPE,
11695 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11696 return NULL_RTX;
11697
11698 dest = CALL_EXPR_ARG (exp, 0);
11699 src = CALL_EXPR_ARG (exp, 1);
11700 len = CALL_EXPR_ARG (exp, 2);
11701 size = CALL_EXPR_ARG (exp, 3);
11702
11703 if (! host_integerp (size, 1))
11704 return NULL_RTX;
11705
11706 if (host_integerp (len, 1) || integer_all_onesp (size))
11707 {
11708 tree fn;
11709
11710 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11711 {
11712 warning_at (tree_nonartificial_location (exp),
11713 0, "%Kcall to %D will always overflow destination buffer",
11714 exp, get_callee_fndecl (exp));
11715 return NULL_RTX;
11716 }
11717
11718 fn = NULL_TREE;
11719 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11720 mem{cpy,pcpy,move,set} is available. */
11721 switch (fcode)
11722 {
11723 case BUILT_IN_MEMCPY_CHK:
11724 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11725 break;
11726 case BUILT_IN_MEMPCPY_CHK:
11727 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11728 break;
11729 case BUILT_IN_MEMMOVE_CHK:
11730 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11731 break;
11732 case BUILT_IN_MEMSET_CHK:
11733 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11734 break;
11735 default:
11736 break;
11737 }
11738
11739 if (! fn)
11740 return NULL_RTX;
11741
11742 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11743 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11744 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11745 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11746 }
11747 else if (fcode == BUILT_IN_MEMSET_CHK)
11748 return NULL_RTX;
11749 else
11750 {
11751 unsigned int dest_align = get_pointer_alignment (dest);
11752
11753 /* If DEST is not a pointer type, call the normal function. */
11754 if (dest_align == 0)
11755 return NULL_RTX;
11756
11757 /* If SRC and DEST are the same (and not volatile), do nothing. */
11758 if (operand_equal_p (src, dest, 0))
11759 {
11760 tree expr;
11761
11762 if (fcode != BUILT_IN_MEMPCPY_CHK)
11763 {
11764 /* Evaluate and ignore LEN in case it has side-effects. */
11765 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11766 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11767 }
11768
11769 expr = fold_build_pointer_plus (dest, len);
11770 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11771 }
11772
11773 /* __memmove_chk special case. */
11774 if (fcode == BUILT_IN_MEMMOVE_CHK)
11775 {
11776 unsigned int src_align = get_pointer_alignment (src);
11777
11778 if (src_align == 0)
11779 return NULL_RTX;
11780
11781 /* If src is categorized for a readonly section we can use
11782 normal __memcpy_chk. */
11783 if (readonly_data_expr (src))
11784 {
11785 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11786 if (!fn)
11787 return NULL_RTX;
11788 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11789 dest, src, len, size);
11790 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11791 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11792 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11793 }
11794 }
11795 return NULL_RTX;
11796 }
11797 }
11798
11799 /* Emit warning if a buffer overflow is detected at compile time. */
11800
11801 static void
11802 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11803 {
11804 int is_strlen = 0;
11805 tree len, size;
11806 location_t loc = tree_nonartificial_location (exp);
11807
11808 switch (fcode)
11809 {
11810 case BUILT_IN_STRCPY_CHK:
11811 case BUILT_IN_STPCPY_CHK:
11812 /* For __strcat_chk the warning will be emitted only if overflowing
11813 by at least strlen (dest) + 1 bytes. */
11814 case BUILT_IN_STRCAT_CHK:
11815 len = CALL_EXPR_ARG (exp, 1);
11816 size = CALL_EXPR_ARG (exp, 2);
11817 is_strlen = 1;
11818 break;
11819 case BUILT_IN_STRNCAT_CHK:
11820 case BUILT_IN_STRNCPY_CHK:
11821 len = CALL_EXPR_ARG (exp, 2);
11822 size = CALL_EXPR_ARG (exp, 3);
11823 break;
11824 case BUILT_IN_SNPRINTF_CHK:
11825 case BUILT_IN_VSNPRINTF_CHK:
11826 len = CALL_EXPR_ARG (exp, 1);
11827 size = CALL_EXPR_ARG (exp, 3);
11828 break;
11829 default:
11830 gcc_unreachable ();
11831 }
11832
11833 if (!len || !size)
11834 return;
11835
11836 if (! host_integerp (size, 1) || integer_all_onesp (size))
11837 return;
11838
11839 if (is_strlen)
11840 {
11841 len = c_strlen (len, 1);
11842 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11843 return;
11844 }
11845 else if (fcode == BUILT_IN_STRNCAT_CHK)
11846 {
11847 tree src = CALL_EXPR_ARG (exp, 1);
11848 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11849 return;
11850 src = c_strlen (src, 1);
11851 if (! src || ! host_integerp (src, 1))
11852 {
11853 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11854 exp, get_callee_fndecl (exp));
11855 return;
11856 }
11857 else if (tree_int_cst_lt (src, size))
11858 return;
11859 }
11860 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11861 return;
11862
11863 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11864 exp, get_callee_fndecl (exp));
11865 }
11866
11867 /* Emit warning if a buffer overflow is detected at compile time
11868 in __sprintf_chk/__vsprintf_chk calls. */
11869
11870 static void
11871 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11872 {
11873 tree size, len, fmt;
11874 const char *fmt_str;
11875 int nargs = call_expr_nargs (exp);
11876
11877 /* Verify the required arguments in the original call. */
11878
11879 if (nargs < 4)
11880 return;
11881 size = CALL_EXPR_ARG (exp, 2);
11882 fmt = CALL_EXPR_ARG (exp, 3);
11883
11884 if (! host_integerp (size, 1) || integer_all_onesp (size))
11885 return;
11886
11887 /* Check whether the format is a literal string constant. */
11888 fmt_str = c_getstr (fmt);
11889 if (fmt_str == NULL)
11890 return;
11891
11892 if (!init_target_chars ())
11893 return;
11894
11895 /* If the format doesn't contain % args or %%, we know its size. */
11896 if (strchr (fmt_str, target_percent) == 0)
11897 len = build_int_cstu (size_type_node, strlen (fmt_str));
11898 /* If the format is "%s" and first ... argument is a string literal,
11899 we know it too. */
11900 else if (fcode == BUILT_IN_SPRINTF_CHK
11901 && strcmp (fmt_str, target_percent_s) == 0)
11902 {
11903 tree arg;
11904
11905 if (nargs < 5)
11906 return;
11907 arg = CALL_EXPR_ARG (exp, 4);
11908 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11909 return;
11910
11911 len = c_strlen (arg, 1);
11912 if (!len || ! host_integerp (len, 1))
11913 return;
11914 }
11915 else
11916 return;
11917
11918 if (! tree_int_cst_lt (len, size))
11919 warning_at (tree_nonartificial_location (exp),
11920 0, "%Kcall to %D will always overflow destination buffer",
11921 exp, get_callee_fndecl (exp));
11922 }
11923
11924 /* Emit warning if a free is called with address of a variable. */
11925
11926 static void
11927 maybe_emit_free_warning (tree exp)
11928 {
11929 tree arg = CALL_EXPR_ARG (exp, 0);
11930
11931 STRIP_NOPS (arg);
11932 if (TREE_CODE (arg) != ADDR_EXPR)
11933 return;
11934
11935 arg = get_base_address (TREE_OPERAND (arg, 0));
11936 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11937 return;
11938
11939 if (SSA_VAR_P (arg))
11940 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11941 "%Kattempt to free a non-heap object %qD", exp, arg);
11942 else
11943 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11944 "%Kattempt to free a non-heap object", exp);
11945 }
11946
11947 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11948 if possible. */
11949
11950 tree
11951 fold_builtin_object_size (tree ptr, tree ost)
11952 {
11953 unsigned HOST_WIDE_INT bytes;
11954 int object_size_type;
11955
11956 if (!validate_arg (ptr, POINTER_TYPE)
11957 || !validate_arg (ost, INTEGER_TYPE))
11958 return NULL_TREE;
11959
11960 STRIP_NOPS (ost);
11961
11962 if (TREE_CODE (ost) != INTEGER_CST
11963 || tree_int_cst_sgn (ost) < 0
11964 || compare_tree_int (ost, 3) > 0)
11965 return NULL_TREE;
11966
11967 object_size_type = tree_low_cst (ost, 0);
11968
11969 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11970 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11971 and (size_t) 0 for types 2 and 3. */
11972 if (TREE_SIDE_EFFECTS (ptr))
11973 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11974
11975 if (TREE_CODE (ptr) == ADDR_EXPR)
11976 {
11977 bytes = compute_builtin_object_size (ptr, object_size_type);
11978 if (double_int_fits_to_tree_p (size_type_node,
11979 uhwi_to_double_int (bytes)))
11980 return build_int_cstu (size_type_node, bytes);
11981 }
11982 else if (TREE_CODE (ptr) == SSA_NAME)
11983 {
11984 /* If object size is not known yet, delay folding until
11985 later. Maybe subsequent passes will help determining
11986 it. */
11987 bytes = compute_builtin_object_size (ptr, object_size_type);
11988 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11989 && double_int_fits_to_tree_p (size_type_node,
11990 uhwi_to_double_int (bytes)))
11991 return build_int_cstu (size_type_node, bytes);
11992 }
11993
11994 return NULL_TREE;
11995 }
11996
11997 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11998 DEST, SRC, LEN, and SIZE are the arguments to the call.
11999 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12000 code of the builtin. If MAXLEN is not NULL, it is maximum length
12001 passed as third argument. */
12002
12003 tree
12004 fold_builtin_memory_chk (location_t loc, tree fndecl,
12005 tree dest, tree src, tree len, tree size,
12006 tree maxlen, bool ignore,
12007 enum built_in_function fcode)
12008 {
12009 tree fn;
12010
12011 if (!validate_arg (dest, POINTER_TYPE)
12012 || !validate_arg (src,
12013 (fcode == BUILT_IN_MEMSET_CHK
12014 ? INTEGER_TYPE : POINTER_TYPE))
12015 || !validate_arg (len, INTEGER_TYPE)
12016 || !validate_arg (size, INTEGER_TYPE))
12017 return NULL_TREE;
12018
12019 /* If SRC and DEST are the same (and not volatile), return DEST
12020 (resp. DEST+LEN for __mempcpy_chk). */
12021 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12022 {
12023 if (fcode != BUILT_IN_MEMPCPY_CHK)
12024 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12025 dest, len);
12026 else
12027 {
12028 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12029 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12030 }
12031 }
12032
12033 if (! host_integerp (size, 1))
12034 return NULL_TREE;
12035
12036 if (! integer_all_onesp (size))
12037 {
12038 if (! host_integerp (len, 1))
12039 {
12040 /* If LEN is not constant, try MAXLEN too.
12041 For MAXLEN only allow optimizing into non-_ocs function
12042 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12043 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12044 {
12045 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12046 {
12047 /* (void) __mempcpy_chk () can be optimized into
12048 (void) __memcpy_chk (). */
12049 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12050 if (!fn)
12051 return NULL_TREE;
12052
12053 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12054 }
12055 return NULL_TREE;
12056 }
12057 }
12058 else
12059 maxlen = len;
12060
12061 if (tree_int_cst_lt (size, maxlen))
12062 return NULL_TREE;
12063 }
12064
12065 fn = NULL_TREE;
12066 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12067 mem{cpy,pcpy,move,set} is available. */
12068 switch (fcode)
12069 {
12070 case BUILT_IN_MEMCPY_CHK:
12071 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12072 break;
12073 case BUILT_IN_MEMPCPY_CHK:
12074 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12075 break;
12076 case BUILT_IN_MEMMOVE_CHK:
12077 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12078 break;
12079 case BUILT_IN_MEMSET_CHK:
12080 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12081 break;
12082 default:
12083 break;
12084 }
12085
12086 if (!fn)
12087 return NULL_TREE;
12088
12089 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12090 }
12091
12092 /* Fold a call to the __st[rp]cpy_chk builtin.
12093 DEST, SRC, and SIZE are the arguments to the call.
12094 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12095 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12096 strings passed as second argument. */
12097
12098 tree
12099 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12100 tree src, tree size,
12101 tree maxlen, bool ignore,
12102 enum built_in_function fcode)
12103 {
12104 tree len, fn;
12105
12106 if (!validate_arg (dest, POINTER_TYPE)
12107 || !validate_arg (src, POINTER_TYPE)
12108 || !validate_arg (size, INTEGER_TYPE))
12109 return NULL_TREE;
12110
12111 /* If SRC and DEST are the same (and not volatile), return DEST. */
12112 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12113 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12114
12115 if (! host_integerp (size, 1))
12116 return NULL_TREE;
12117
12118 if (! integer_all_onesp (size))
12119 {
12120 len = c_strlen (src, 1);
12121 if (! len || ! host_integerp (len, 1))
12122 {
12123 /* If LEN is not constant, try MAXLEN too.
12124 For MAXLEN only allow optimizing into non-_ocs function
12125 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12126 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12127 {
12128 if (fcode == BUILT_IN_STPCPY_CHK)
12129 {
12130 if (! ignore)
12131 return NULL_TREE;
12132
12133 /* If return value of __stpcpy_chk is ignored,
12134 optimize into __strcpy_chk. */
12135 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12136 if (!fn)
12137 return NULL_TREE;
12138
12139 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12140 }
12141
12142 if (! len || TREE_SIDE_EFFECTS (len))
12143 return NULL_TREE;
12144
12145 /* If c_strlen returned something, but not a constant,
12146 transform __strcpy_chk into __memcpy_chk. */
12147 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12148 if (!fn)
12149 return NULL_TREE;
12150
12151 len = fold_convert_loc (loc, size_type_node, len);
12152 len = size_binop_loc (loc, PLUS_EXPR, len,
12153 build_int_cst (size_type_node, 1));
12154 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12155 build_call_expr_loc (loc, fn, 4,
12156 dest, src, len, size));
12157 }
12158 }
12159 else
12160 maxlen = len;
12161
12162 if (! tree_int_cst_lt (maxlen, size))
12163 return NULL_TREE;
12164 }
12165
12166 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12167 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12168 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12169 if (!fn)
12170 return NULL_TREE;
12171
12172 return build_call_expr_loc (loc, fn, 2, dest, src);
12173 }
12174
12175 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12176 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12177 length passed as third argument. */
12178
12179 tree
12180 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12181 tree len, tree size, tree maxlen)
12182 {
12183 tree fn;
12184
12185 if (!validate_arg (dest, POINTER_TYPE)
12186 || !validate_arg (src, POINTER_TYPE)
12187 || !validate_arg (len, INTEGER_TYPE)
12188 || !validate_arg (size, INTEGER_TYPE))
12189 return NULL_TREE;
12190
12191 if (! host_integerp (size, 1))
12192 return NULL_TREE;
12193
12194 if (! integer_all_onesp (size))
12195 {
12196 if (! host_integerp (len, 1))
12197 {
12198 /* If LEN is not constant, try MAXLEN too.
12199 For MAXLEN only allow optimizing into non-_ocs function
12200 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12201 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12202 return NULL_TREE;
12203 }
12204 else
12205 maxlen = len;
12206
12207 if (tree_int_cst_lt (size, maxlen))
12208 return NULL_TREE;
12209 }
12210
12211 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12212 fn = builtin_decl_explicit (BUILT_IN_STRNCPY);
12213 if (!fn)
12214 return NULL_TREE;
12215
12216 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12217 }
12218
12219 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12220 are the arguments to the call. */
12221
12222 static tree
12223 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12224 tree src, tree size)
12225 {
12226 tree fn;
12227 const char *p;
12228
12229 if (!validate_arg (dest, POINTER_TYPE)
12230 || !validate_arg (src, POINTER_TYPE)
12231 || !validate_arg (size, INTEGER_TYPE))
12232 return NULL_TREE;
12233
12234 p = c_getstr (src);
12235 /* If the SRC parameter is "", return DEST. */
12236 if (p && *p == '\0')
12237 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12238
12239 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12240 return NULL_TREE;
12241
12242 /* If __builtin_strcat_chk is used, assume strcat is available. */
12243 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
12244 if (!fn)
12245 return NULL_TREE;
12246
12247 return build_call_expr_loc (loc, fn, 2, dest, src);
12248 }
12249
12250 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12251 LEN, and SIZE. */
12252
12253 static tree
12254 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12255 tree dest, tree src, tree len, tree size)
12256 {
12257 tree fn;
12258 const char *p;
12259
12260 if (!validate_arg (dest, POINTER_TYPE)
12261 || !validate_arg (src, POINTER_TYPE)
12262 || !validate_arg (size, INTEGER_TYPE)
12263 || !validate_arg (size, INTEGER_TYPE))
12264 return NULL_TREE;
12265
12266 p = c_getstr (src);
12267 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12268 if (p && *p == '\0')
12269 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12270 else if (integer_zerop (len))
12271 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12272
12273 if (! host_integerp (size, 1))
12274 return NULL_TREE;
12275
12276 if (! integer_all_onesp (size))
12277 {
12278 tree src_len = c_strlen (src, 1);
12279 if (src_len
12280 && host_integerp (src_len, 1)
12281 && host_integerp (len, 1)
12282 && ! tree_int_cst_lt (len, src_len))
12283 {
12284 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12285 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
12286 if (!fn)
12287 return NULL_TREE;
12288
12289 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12290 }
12291 return NULL_TREE;
12292 }
12293
12294 /* If __builtin_strncat_chk is used, assume strncat is available. */
12295 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
12296 if (!fn)
12297 return NULL_TREE;
12298
12299 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12300 }
12301
12302 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12303 Return NULL_TREE if a normal call should be emitted rather than
12304 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12305 or BUILT_IN_VSPRINTF_CHK. */
12306
12307 static tree
12308 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
12309 enum built_in_function fcode)
12310 {
12311 tree dest, size, len, fn, fmt, flag;
12312 const char *fmt_str;
12313
12314 /* Verify the required arguments in the original call. */
12315 if (nargs < 4)
12316 return NULL_TREE;
12317 dest = args[0];
12318 if (!validate_arg (dest, POINTER_TYPE))
12319 return NULL_TREE;
12320 flag = args[1];
12321 if (!validate_arg (flag, INTEGER_TYPE))
12322 return NULL_TREE;
12323 size = args[2];
12324 if (!validate_arg (size, INTEGER_TYPE))
12325 return NULL_TREE;
12326 fmt = args[3];
12327 if (!validate_arg (fmt, POINTER_TYPE))
12328 return NULL_TREE;
12329
12330 if (! host_integerp (size, 1))
12331 return NULL_TREE;
12332
12333 len = NULL_TREE;
12334
12335 if (!init_target_chars ())
12336 return NULL_TREE;
12337
12338 /* Check whether the format is a literal string constant. */
12339 fmt_str = c_getstr (fmt);
12340 if (fmt_str != NULL)
12341 {
12342 /* If the format doesn't contain % args or %%, we know the size. */
12343 if (strchr (fmt_str, target_percent) == 0)
12344 {
12345 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12346 len = build_int_cstu (size_type_node, strlen (fmt_str));
12347 }
12348 /* If the format is "%s" and first ... argument is a string literal,
12349 we know the size too. */
12350 else if (fcode == BUILT_IN_SPRINTF_CHK
12351 && strcmp (fmt_str, target_percent_s) == 0)
12352 {
12353 tree arg;
12354
12355 if (nargs == 5)
12356 {
12357 arg = args[4];
12358 if (validate_arg (arg, POINTER_TYPE))
12359 {
12360 len = c_strlen (arg, 1);
12361 if (! len || ! host_integerp (len, 1))
12362 len = NULL_TREE;
12363 }
12364 }
12365 }
12366 }
12367
12368 if (! integer_all_onesp (size))
12369 {
12370 if (! len || ! tree_int_cst_lt (len, size))
12371 return NULL_TREE;
12372 }
12373
12374 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12375 or if format doesn't contain % chars or is "%s". */
12376 if (! integer_zerop (flag))
12377 {
12378 if (fmt_str == NULL)
12379 return NULL_TREE;
12380 if (strchr (fmt_str, target_percent) != NULL
12381 && strcmp (fmt_str, target_percent_s))
12382 return NULL_TREE;
12383 }
12384
12385 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12386 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
12387 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
12388 if (!fn)
12389 return NULL_TREE;
12390
12391 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
12392 }
12393
12394 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12395 a normal call should be emitted rather than expanding the function
12396 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12397
12398 static tree
12399 fold_builtin_sprintf_chk (location_t loc, tree exp,
12400 enum built_in_function fcode)
12401 {
12402 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
12403 CALL_EXPR_ARGP (exp), fcode);
12404 }
12405
12406 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
12407 NULL_TREE if a normal call should be emitted rather than expanding
12408 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12409 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12410 passed as second argument. */
12411
12412 static tree
12413 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
12414 tree maxlen, enum built_in_function fcode)
12415 {
12416 tree dest, size, len, fn, fmt, flag;
12417 const char *fmt_str;
12418
12419 /* Verify the required arguments in the original call. */
12420 if (nargs < 5)
12421 return NULL_TREE;
12422 dest = args[0];
12423 if (!validate_arg (dest, POINTER_TYPE))
12424 return NULL_TREE;
12425 len = args[1];
12426 if (!validate_arg (len, INTEGER_TYPE))
12427 return NULL_TREE;
12428 flag = args[2];
12429 if (!validate_arg (flag, INTEGER_TYPE))
12430 return NULL_TREE;
12431 size = args[3];
12432 if (!validate_arg (size, INTEGER_TYPE))
12433 return NULL_TREE;
12434 fmt = args[4];
12435 if (!validate_arg (fmt, POINTER_TYPE))
12436 return NULL_TREE;
12437
12438 if (! host_integerp (size, 1))
12439 return NULL_TREE;
12440
12441 if (! integer_all_onesp (size))
12442 {
12443 if (! host_integerp (len, 1))
12444 {
12445 /* If LEN is not constant, try MAXLEN too.
12446 For MAXLEN only allow optimizing into non-_ocs function
12447 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12448 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12449 return NULL_TREE;
12450 }
12451 else
12452 maxlen = len;
12453
12454 if (tree_int_cst_lt (size, maxlen))
12455 return NULL_TREE;
12456 }
12457
12458 if (!init_target_chars ())
12459 return NULL_TREE;
12460
12461 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12462 or if format doesn't contain % chars or is "%s". */
12463 if (! integer_zerop (flag))
12464 {
12465 fmt_str = c_getstr (fmt);
12466 if (fmt_str == NULL)
12467 return NULL_TREE;
12468 if (strchr (fmt_str, target_percent) != NULL
12469 && strcmp (fmt_str, target_percent_s))
12470 return NULL_TREE;
12471 }
12472
12473 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12474 available. */
12475 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
12476 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
12477 if (!fn)
12478 return NULL_TREE;
12479
12480 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
12481 }
12482
12483 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12484 a normal call should be emitted rather than expanding the function
12485 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12486 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12487 passed as second argument. */
12488
12489 tree
12490 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12491 enum built_in_function fcode)
12492 {
12493 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
12494 CALL_EXPR_ARGP (exp), maxlen, fcode);
12495 }
12496
12497 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12498 FMT and ARG are the arguments to the call; we don't fold cases with
12499 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12500
12501 Return NULL_TREE if no simplification was possible, otherwise return the
12502 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12503 code of the function to be simplified. */
12504
12505 static tree
12506 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12507 tree arg, bool ignore,
12508 enum built_in_function fcode)
12509 {
12510 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12511 const char *fmt_str = NULL;
12512
12513 /* If the return value is used, don't do the transformation. */
12514 if (! ignore)
12515 return NULL_TREE;
12516
12517 /* Verify the required arguments in the original call. */
12518 if (!validate_arg (fmt, POINTER_TYPE))
12519 return NULL_TREE;
12520
12521 /* Check whether the format is a literal string constant. */
12522 fmt_str = c_getstr (fmt);
12523 if (fmt_str == NULL)
12524 return NULL_TREE;
12525
12526 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12527 {
12528 /* If we're using an unlocked function, assume the other
12529 unlocked functions exist explicitly. */
12530 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
12531 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
12532 }
12533 else
12534 {
12535 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
12536 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
12537 }
12538
12539 if (!init_target_chars ())
12540 return NULL_TREE;
12541
12542 if (strcmp (fmt_str, target_percent_s) == 0
12543 || strchr (fmt_str, target_percent) == NULL)
12544 {
12545 const char *str;
12546
12547 if (strcmp (fmt_str, target_percent_s) == 0)
12548 {
12549 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12550 return NULL_TREE;
12551
12552 if (!arg || !validate_arg (arg, POINTER_TYPE))
12553 return NULL_TREE;
12554
12555 str = c_getstr (arg);
12556 if (str == NULL)
12557 return NULL_TREE;
12558 }
12559 else
12560 {
12561 /* The format specifier doesn't contain any '%' characters. */
12562 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12563 && arg)
12564 return NULL_TREE;
12565 str = fmt_str;
12566 }
12567
12568 /* If the string was "", printf does nothing. */
12569 if (str[0] == '\0')
12570 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12571
12572 /* If the string has length of 1, call putchar. */
12573 if (str[1] == '\0')
12574 {
12575 /* Given printf("c"), (where c is any one character,)
12576 convert "c"[0] to an int and pass that to the replacement
12577 function. */
12578 newarg = build_int_cst (integer_type_node, str[0]);
12579 if (fn_putchar)
12580 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12581 }
12582 else
12583 {
12584 /* If the string was "string\n", call puts("string"). */
12585 size_t len = strlen (str);
12586 if ((unsigned char)str[len - 1] == target_newline
12587 && (size_t) (int) len == len
12588 && (int) len > 0)
12589 {
12590 char *newstr;
12591 tree offset_node, string_cst;
12592
12593 /* Create a NUL-terminated string that's one char shorter
12594 than the original, stripping off the trailing '\n'. */
12595 newarg = build_string_literal (len, str);
12596 string_cst = string_constant (newarg, &offset_node);
12597 gcc_checking_assert (string_cst
12598 && (TREE_STRING_LENGTH (string_cst)
12599 == (int) len)
12600 && integer_zerop (offset_node)
12601 && (unsigned char)
12602 TREE_STRING_POINTER (string_cst)[len - 1]
12603 == target_newline);
12604 /* build_string_literal creates a new STRING_CST,
12605 modify it in place to avoid double copying. */
12606 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
12607 newstr[len - 1] = '\0';
12608 if (fn_puts)
12609 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12610 }
12611 else
12612 /* We'd like to arrange to call fputs(string,stdout) here,
12613 but we need stdout and don't have a way to get it yet. */
12614 return NULL_TREE;
12615 }
12616 }
12617
12618 /* The other optimizations can be done only on the non-va_list variants. */
12619 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12620 return NULL_TREE;
12621
12622 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12623 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12624 {
12625 if (!arg || !validate_arg (arg, POINTER_TYPE))
12626 return NULL_TREE;
12627 if (fn_puts)
12628 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12629 }
12630
12631 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12632 else if (strcmp (fmt_str, target_percent_c) == 0)
12633 {
12634 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12635 return NULL_TREE;
12636 if (fn_putchar)
12637 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12638 }
12639
12640 if (!call)
12641 return NULL_TREE;
12642
12643 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12644 }
12645
12646 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12647 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12648 more than 3 arguments, and ARG may be null in the 2-argument case.
12649
12650 Return NULL_TREE if no simplification was possible, otherwise return the
12651 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12652 code of the function to be simplified. */
12653
12654 static tree
12655 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12656 tree fmt, tree arg, bool ignore,
12657 enum built_in_function fcode)
12658 {
12659 tree fn_fputc, fn_fputs, call = NULL_TREE;
12660 const char *fmt_str = NULL;
12661
12662 /* If the return value is used, don't do the transformation. */
12663 if (! ignore)
12664 return NULL_TREE;
12665
12666 /* Verify the required arguments in the original call. */
12667 if (!validate_arg (fp, POINTER_TYPE))
12668 return NULL_TREE;
12669 if (!validate_arg (fmt, POINTER_TYPE))
12670 return NULL_TREE;
12671
12672 /* Check whether the format is a literal string constant. */
12673 fmt_str = c_getstr (fmt);
12674 if (fmt_str == NULL)
12675 return NULL_TREE;
12676
12677 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12678 {
12679 /* If we're using an unlocked function, assume the other
12680 unlocked functions exist explicitly. */
12681 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
12682 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
12683 }
12684 else
12685 {
12686 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
12687 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
12688 }
12689
12690 if (!init_target_chars ())
12691 return NULL_TREE;
12692
12693 /* If the format doesn't contain % args or %%, use strcpy. */
12694 if (strchr (fmt_str, target_percent) == NULL)
12695 {
12696 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12697 && arg)
12698 return NULL_TREE;
12699
12700 /* If the format specifier was "", fprintf does nothing. */
12701 if (fmt_str[0] == '\0')
12702 {
12703 /* If FP has side-effects, just wait until gimplification is
12704 done. */
12705 if (TREE_SIDE_EFFECTS (fp))
12706 return NULL_TREE;
12707
12708 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12709 }
12710
12711 /* When "string" doesn't contain %, replace all cases of
12712 fprintf (fp, string) with fputs (string, fp). The fputs
12713 builtin will take care of special cases like length == 1. */
12714 if (fn_fputs)
12715 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12716 }
12717
12718 /* The other optimizations can be done only on the non-va_list variants. */
12719 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12720 return NULL_TREE;
12721
12722 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12723 else if (strcmp (fmt_str, target_percent_s) == 0)
12724 {
12725 if (!arg || !validate_arg (arg, POINTER_TYPE))
12726 return NULL_TREE;
12727 if (fn_fputs)
12728 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12729 }
12730
12731 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12732 else if (strcmp (fmt_str, target_percent_c) == 0)
12733 {
12734 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12735 return NULL_TREE;
12736 if (fn_fputc)
12737 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12738 }
12739
12740 if (!call)
12741 return NULL_TREE;
12742 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12743 }
12744
12745 /* Initialize format string characters in the target charset. */
12746
12747 static bool
12748 init_target_chars (void)
12749 {
12750 static bool init;
12751 if (!init)
12752 {
12753 target_newline = lang_hooks.to_target_charset ('\n');
12754 target_percent = lang_hooks.to_target_charset ('%');
12755 target_c = lang_hooks.to_target_charset ('c');
12756 target_s = lang_hooks.to_target_charset ('s');
12757 if (target_newline == 0 || target_percent == 0 || target_c == 0
12758 || target_s == 0)
12759 return false;
12760
12761 target_percent_c[0] = target_percent;
12762 target_percent_c[1] = target_c;
12763 target_percent_c[2] = '\0';
12764
12765 target_percent_s[0] = target_percent;
12766 target_percent_s[1] = target_s;
12767 target_percent_s[2] = '\0';
12768
12769 target_percent_s_newline[0] = target_percent;
12770 target_percent_s_newline[1] = target_s;
12771 target_percent_s_newline[2] = target_newline;
12772 target_percent_s_newline[3] = '\0';
12773
12774 init = true;
12775 }
12776 return true;
12777 }
12778
12779 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12780 and no overflow/underflow occurred. INEXACT is true if M was not
12781 exactly calculated. TYPE is the tree type for the result. This
12782 function assumes that you cleared the MPFR flags and then
12783 calculated M to see if anything subsequently set a flag prior to
12784 entering this function. Return NULL_TREE if any checks fail. */
12785
12786 static tree
12787 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12788 {
12789 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12790 overflow/underflow occurred. If -frounding-math, proceed iff the
12791 result of calling FUNC was exact. */
12792 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12793 && (!flag_rounding_math || !inexact))
12794 {
12795 REAL_VALUE_TYPE rr;
12796
12797 real_from_mpfr (&rr, m, type, GMP_RNDN);
12798 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12799 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12800 but the mpft_t is not, then we underflowed in the
12801 conversion. */
12802 if (real_isfinite (&rr)
12803 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12804 {
12805 REAL_VALUE_TYPE rmode;
12806
12807 real_convert (&rmode, TYPE_MODE (type), &rr);
12808 /* Proceed iff the specified mode can hold the value. */
12809 if (real_identical (&rmode, &rr))
12810 return build_real (type, rmode);
12811 }
12812 }
12813 return NULL_TREE;
12814 }
12815
12816 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12817 number and no overflow/underflow occurred. INEXACT is true if M
12818 was not exactly calculated. TYPE is the tree type for the result.
12819 This function assumes that you cleared the MPFR flags and then
12820 calculated M to see if anything subsequently set a flag prior to
12821 entering this function. Return NULL_TREE if any checks fail, if
12822 FORCE_CONVERT is true, then bypass the checks. */
12823
12824 static tree
12825 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12826 {
12827 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12828 overflow/underflow occurred. If -frounding-math, proceed iff the
12829 result of calling FUNC was exact. */
12830 if (force_convert
12831 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12832 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12833 && (!flag_rounding_math || !inexact)))
12834 {
12835 REAL_VALUE_TYPE re, im;
12836
12837 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12838 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12839 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12840 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12841 but the mpft_t is not, then we underflowed in the
12842 conversion. */
12843 if (force_convert
12844 || (real_isfinite (&re) && real_isfinite (&im)
12845 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12846 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12847 {
12848 REAL_VALUE_TYPE re_mode, im_mode;
12849
12850 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12851 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12852 /* Proceed iff the specified mode can hold the value. */
12853 if (force_convert
12854 || (real_identical (&re_mode, &re)
12855 && real_identical (&im_mode, &im)))
12856 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12857 build_real (TREE_TYPE (type), im_mode));
12858 }
12859 }
12860 return NULL_TREE;
12861 }
12862
12863 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12864 FUNC on it and return the resulting value as a tree with type TYPE.
12865 If MIN and/or MAX are not NULL, then the supplied ARG must be
12866 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12867 acceptable values, otherwise they are not. The mpfr precision is
12868 set to the precision of TYPE. We assume that function FUNC returns
12869 zero if the result could be calculated exactly within the requested
12870 precision. */
12871
12872 static tree
12873 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12874 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12875 bool inclusive)
12876 {
12877 tree result = NULL_TREE;
12878
12879 STRIP_NOPS (arg);
12880
12881 /* To proceed, MPFR must exactly represent the target floating point
12882 format, which only happens when the target base equals two. */
12883 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12884 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12885 {
12886 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12887
12888 if (real_isfinite (ra)
12889 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12890 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12891 {
12892 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12893 const int prec = fmt->p;
12894 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12895 int inexact;
12896 mpfr_t m;
12897
12898 mpfr_init2 (m, prec);
12899 mpfr_from_real (m, ra, GMP_RNDN);
12900 mpfr_clear_flags ();
12901 inexact = func (m, m, rnd);
12902 result = do_mpfr_ckconv (m, type, inexact);
12903 mpfr_clear (m);
12904 }
12905 }
12906
12907 return result;
12908 }
12909
12910 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12911 FUNC on it and return the resulting value as a tree with type TYPE.
12912 The mpfr precision is set to the precision of TYPE. We assume that
12913 function FUNC returns zero if the result could be calculated
12914 exactly within the requested precision. */
12915
12916 static tree
12917 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12918 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12919 {
12920 tree result = NULL_TREE;
12921
12922 STRIP_NOPS (arg1);
12923 STRIP_NOPS (arg2);
12924
12925 /* To proceed, MPFR must exactly represent the target floating point
12926 format, which only happens when the target base equals two. */
12927 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12928 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12929 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12930 {
12931 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12932 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12933
12934 if (real_isfinite (ra1) && real_isfinite (ra2))
12935 {
12936 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12937 const int prec = fmt->p;
12938 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12939 int inexact;
12940 mpfr_t m1, m2;
12941
12942 mpfr_inits2 (prec, m1, m2, NULL);
12943 mpfr_from_real (m1, ra1, GMP_RNDN);
12944 mpfr_from_real (m2, ra2, GMP_RNDN);
12945 mpfr_clear_flags ();
12946 inexact = func (m1, m1, m2, rnd);
12947 result = do_mpfr_ckconv (m1, type, inexact);
12948 mpfr_clears (m1, m2, NULL);
12949 }
12950 }
12951
12952 return result;
12953 }
12954
12955 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12956 FUNC on it and return the resulting value as a tree with type TYPE.
12957 The mpfr precision is set to the precision of TYPE. We assume that
12958 function FUNC returns zero if the result could be calculated
12959 exactly within the requested precision. */
12960
12961 static tree
12962 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12963 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12964 {
12965 tree result = NULL_TREE;
12966
12967 STRIP_NOPS (arg1);
12968 STRIP_NOPS (arg2);
12969 STRIP_NOPS (arg3);
12970
12971 /* To proceed, MPFR must exactly represent the target floating point
12972 format, which only happens when the target base equals two. */
12973 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12974 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12975 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12976 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12977 {
12978 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12979 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12980 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12981
12982 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12983 {
12984 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12985 const int prec = fmt->p;
12986 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12987 int inexact;
12988 mpfr_t m1, m2, m3;
12989
12990 mpfr_inits2 (prec, m1, m2, m3, NULL);
12991 mpfr_from_real (m1, ra1, GMP_RNDN);
12992 mpfr_from_real (m2, ra2, GMP_RNDN);
12993 mpfr_from_real (m3, ra3, GMP_RNDN);
12994 mpfr_clear_flags ();
12995 inexact = func (m1, m1, m2, m3, rnd);
12996 result = do_mpfr_ckconv (m1, type, inexact);
12997 mpfr_clears (m1, m2, m3, NULL);
12998 }
12999 }
13000
13001 return result;
13002 }
13003
13004 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13005 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13006 If ARG_SINP and ARG_COSP are NULL then the result is returned
13007 as a complex value.
13008 The type is taken from the type of ARG and is used for setting the
13009 precision of the calculation and results. */
13010
13011 static tree
13012 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13013 {
13014 tree const type = TREE_TYPE (arg);
13015 tree result = NULL_TREE;
13016
13017 STRIP_NOPS (arg);
13018
13019 /* To proceed, MPFR must exactly represent the target floating point
13020 format, which only happens when the target base equals two. */
13021 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13022 && TREE_CODE (arg) == REAL_CST
13023 && !TREE_OVERFLOW (arg))
13024 {
13025 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13026
13027 if (real_isfinite (ra))
13028 {
13029 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13030 const int prec = fmt->p;
13031 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13032 tree result_s, result_c;
13033 int inexact;
13034 mpfr_t m, ms, mc;
13035
13036 mpfr_inits2 (prec, m, ms, mc, NULL);
13037 mpfr_from_real (m, ra, GMP_RNDN);
13038 mpfr_clear_flags ();
13039 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13040 result_s = do_mpfr_ckconv (ms, type, inexact);
13041 result_c = do_mpfr_ckconv (mc, type, inexact);
13042 mpfr_clears (m, ms, mc, NULL);
13043 if (result_s && result_c)
13044 {
13045 /* If we are to return in a complex value do so. */
13046 if (!arg_sinp && !arg_cosp)
13047 return build_complex (build_complex_type (type),
13048 result_c, result_s);
13049
13050 /* Dereference the sin/cos pointer arguments. */
13051 arg_sinp = build_fold_indirect_ref (arg_sinp);
13052 arg_cosp = build_fold_indirect_ref (arg_cosp);
13053 /* Proceed if valid pointer type were passed in. */
13054 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13055 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13056 {
13057 /* Set the values. */
13058 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13059 result_s);
13060 TREE_SIDE_EFFECTS (result_s) = 1;
13061 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13062 result_c);
13063 TREE_SIDE_EFFECTS (result_c) = 1;
13064 /* Combine the assignments into a compound expr. */
13065 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13066 result_s, result_c));
13067 }
13068 }
13069 }
13070 }
13071 return result;
13072 }
13073
13074 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13075 two-argument mpfr order N Bessel function FUNC on them and return
13076 the resulting value as a tree with type TYPE. The mpfr precision
13077 is set to the precision of TYPE. We assume that function FUNC
13078 returns zero if the result could be calculated exactly within the
13079 requested precision. */
13080 static tree
13081 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13082 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13083 const REAL_VALUE_TYPE *min, bool inclusive)
13084 {
13085 tree result = NULL_TREE;
13086
13087 STRIP_NOPS (arg1);
13088 STRIP_NOPS (arg2);
13089
13090 /* To proceed, MPFR must exactly represent the target floating point
13091 format, which only happens when the target base equals two. */
13092 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13093 && host_integerp (arg1, 0)
13094 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13095 {
13096 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13097 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13098
13099 if (n == (long)n
13100 && real_isfinite (ra)
13101 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13102 {
13103 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13104 const int prec = fmt->p;
13105 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13106 int inexact;
13107 mpfr_t m;
13108
13109 mpfr_init2 (m, prec);
13110 mpfr_from_real (m, ra, GMP_RNDN);
13111 mpfr_clear_flags ();
13112 inexact = func (m, n, m, rnd);
13113 result = do_mpfr_ckconv (m, type, inexact);
13114 mpfr_clear (m);
13115 }
13116 }
13117
13118 return result;
13119 }
13120
13121 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13122 the pointer *(ARG_QUO) and return the result. The type is taken
13123 from the type of ARG0 and is used for setting the precision of the
13124 calculation and results. */
13125
13126 static tree
13127 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13128 {
13129 tree const type = TREE_TYPE (arg0);
13130 tree result = NULL_TREE;
13131
13132 STRIP_NOPS (arg0);
13133 STRIP_NOPS (arg1);
13134
13135 /* To proceed, MPFR must exactly represent the target floating point
13136 format, which only happens when the target base equals two. */
13137 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13138 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13139 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13140 {
13141 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13142 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13143
13144 if (real_isfinite (ra0) && real_isfinite (ra1))
13145 {
13146 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13147 const int prec = fmt->p;
13148 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13149 tree result_rem;
13150 long integer_quo;
13151 mpfr_t m0, m1;
13152
13153 mpfr_inits2 (prec, m0, m1, NULL);
13154 mpfr_from_real (m0, ra0, GMP_RNDN);
13155 mpfr_from_real (m1, ra1, GMP_RNDN);
13156 mpfr_clear_flags ();
13157 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13158 /* Remquo is independent of the rounding mode, so pass
13159 inexact=0 to do_mpfr_ckconv(). */
13160 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13161 mpfr_clears (m0, m1, NULL);
13162 if (result_rem)
13163 {
13164 /* MPFR calculates quo in the host's long so it may
13165 return more bits in quo than the target int can hold
13166 if sizeof(host long) > sizeof(target int). This can
13167 happen even for native compilers in LP64 mode. In
13168 these cases, modulo the quo value with the largest
13169 number that the target int can hold while leaving one
13170 bit for the sign. */
13171 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13172 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13173
13174 /* Dereference the quo pointer argument. */
13175 arg_quo = build_fold_indirect_ref (arg_quo);
13176 /* Proceed iff a valid pointer type was passed in. */
13177 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13178 {
13179 /* Set the value. */
13180 tree result_quo
13181 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13182 build_int_cst (TREE_TYPE (arg_quo),
13183 integer_quo));
13184 TREE_SIDE_EFFECTS (result_quo) = 1;
13185 /* Combine the quo assignment with the rem. */
13186 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13187 result_quo, result_rem));
13188 }
13189 }
13190 }
13191 }
13192 return result;
13193 }
13194
13195 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13196 resulting value as a tree with type TYPE. The mpfr precision is
13197 set to the precision of TYPE. We assume that this mpfr function
13198 returns zero if the result could be calculated exactly within the
13199 requested precision. In addition, the integer pointer represented
13200 by ARG_SG will be dereferenced and set to the appropriate signgam
13201 (-1,1) value. */
13202
13203 static tree
13204 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13205 {
13206 tree result = NULL_TREE;
13207
13208 STRIP_NOPS (arg);
13209
13210 /* To proceed, MPFR must exactly represent the target floating point
13211 format, which only happens when the target base equals two. Also
13212 verify ARG is a constant and that ARG_SG is an int pointer. */
13213 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13214 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13215 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13216 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13217 {
13218 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13219
13220 /* In addition to NaN and Inf, the argument cannot be zero or a
13221 negative integer. */
13222 if (real_isfinite (ra)
13223 && ra->cl != rvc_zero
13224 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13225 {
13226 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13227 const int prec = fmt->p;
13228 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13229 int inexact, sg;
13230 mpfr_t m;
13231 tree result_lg;
13232
13233 mpfr_init2 (m, prec);
13234 mpfr_from_real (m, ra, GMP_RNDN);
13235 mpfr_clear_flags ();
13236 inexact = mpfr_lgamma (m, &sg, m, rnd);
13237 result_lg = do_mpfr_ckconv (m, type, inexact);
13238 mpfr_clear (m);
13239 if (result_lg)
13240 {
13241 tree result_sg;
13242
13243 /* Dereference the arg_sg pointer argument. */
13244 arg_sg = build_fold_indirect_ref (arg_sg);
13245 /* Assign the signgam value into *arg_sg. */
13246 result_sg = fold_build2 (MODIFY_EXPR,
13247 TREE_TYPE (arg_sg), arg_sg,
13248 build_int_cst (TREE_TYPE (arg_sg), sg));
13249 TREE_SIDE_EFFECTS (result_sg) = 1;
13250 /* Combine the signgam assignment with the lgamma result. */
13251 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13252 result_sg, result_lg));
13253 }
13254 }
13255 }
13256
13257 return result;
13258 }
13259
13260 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13261 function FUNC on it and return the resulting value as a tree with
13262 type TYPE. The mpfr precision is set to the precision of TYPE. We
13263 assume that function FUNC returns zero if the result could be
13264 calculated exactly within the requested precision. */
13265
13266 static tree
13267 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13268 {
13269 tree result = NULL_TREE;
13270
13271 STRIP_NOPS (arg);
13272
13273 /* To proceed, MPFR must exactly represent the target floating point
13274 format, which only happens when the target base equals two. */
13275 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13276 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13277 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13278 {
13279 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13280 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13281
13282 if (real_isfinite (re) && real_isfinite (im))
13283 {
13284 const struct real_format *const fmt =
13285 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13286 const int prec = fmt->p;
13287 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13288 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13289 int inexact;
13290 mpc_t m;
13291
13292 mpc_init2 (m, prec);
13293 mpfr_from_real (mpc_realref(m), re, rnd);
13294 mpfr_from_real (mpc_imagref(m), im, rnd);
13295 mpfr_clear_flags ();
13296 inexact = func (m, m, crnd);
13297 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13298 mpc_clear (m);
13299 }
13300 }
13301
13302 return result;
13303 }
13304
13305 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13306 mpc function FUNC on it and return the resulting value as a tree
13307 with type TYPE. The mpfr precision is set to the precision of
13308 TYPE. We assume that function FUNC returns zero if the result
13309 could be calculated exactly within the requested precision. If
13310 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13311 in the arguments and/or results. */
13312
13313 tree
13314 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13315 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13316 {
13317 tree result = NULL_TREE;
13318
13319 STRIP_NOPS (arg0);
13320 STRIP_NOPS (arg1);
13321
13322 /* To proceed, MPFR must exactly represent the target floating point
13323 format, which only happens when the target base equals two. */
13324 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13325 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13326 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13327 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13328 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13329 {
13330 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13331 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13332 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13333 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13334
13335 if (do_nonfinite
13336 || (real_isfinite (re0) && real_isfinite (im0)
13337 && real_isfinite (re1) && real_isfinite (im1)))
13338 {
13339 const struct real_format *const fmt =
13340 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13341 const int prec = fmt->p;
13342 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13343 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13344 int inexact;
13345 mpc_t m0, m1;
13346
13347 mpc_init2 (m0, prec);
13348 mpc_init2 (m1, prec);
13349 mpfr_from_real (mpc_realref(m0), re0, rnd);
13350 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13351 mpfr_from_real (mpc_realref(m1), re1, rnd);
13352 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13353 mpfr_clear_flags ();
13354 inexact = func (m0, m0, m1, crnd);
13355 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13356 mpc_clear (m0);
13357 mpc_clear (m1);
13358 }
13359 }
13360
13361 return result;
13362 }
13363
13364 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13365 a normal call should be emitted rather than expanding the function
13366 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13367
13368 static tree
13369 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13370 {
13371 int nargs = gimple_call_num_args (stmt);
13372
13373 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
13374 (nargs > 0
13375 ? gimple_call_arg_ptr (stmt, 0)
13376 : &error_mark_node), fcode);
13377 }
13378
13379 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13380 a normal call should be emitted rather than expanding the function
13381 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13382 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13383 passed as second argument. */
13384
13385 tree
13386 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13387 enum built_in_function fcode)
13388 {
13389 int nargs = gimple_call_num_args (stmt);
13390
13391 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
13392 (nargs > 0
13393 ? gimple_call_arg_ptr (stmt, 0)
13394 : &error_mark_node), maxlen, fcode);
13395 }
13396
13397 /* Builtins with folding operations that operate on "..." arguments
13398 need special handling; we need to store the arguments in a convenient
13399 data structure before attempting any folding. Fortunately there are
13400 only a few builtins that fall into this category. FNDECL is the
13401 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13402 result of the function call is ignored. */
13403
13404 static tree
13405 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13406 bool ignore ATTRIBUTE_UNUSED)
13407 {
13408 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13409 tree ret = NULL_TREE;
13410
13411 switch (fcode)
13412 {
13413 case BUILT_IN_SPRINTF_CHK:
13414 case BUILT_IN_VSPRINTF_CHK:
13415 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13416 break;
13417
13418 case BUILT_IN_SNPRINTF_CHK:
13419 case BUILT_IN_VSNPRINTF_CHK:
13420 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13421
13422 default:
13423 break;
13424 }
13425 if (ret)
13426 {
13427 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13428 TREE_NO_WARNING (ret) = 1;
13429 return ret;
13430 }
13431 return NULL_TREE;
13432 }
13433
13434 /* A wrapper function for builtin folding that prevents warnings for
13435 "statement without effect" and the like, caused by removing the
13436 call node earlier than the warning is generated. */
13437
13438 tree
13439 fold_call_stmt (gimple stmt, bool ignore)
13440 {
13441 tree ret = NULL_TREE;
13442 tree fndecl = gimple_call_fndecl (stmt);
13443 location_t loc = gimple_location (stmt);
13444 if (fndecl
13445 && TREE_CODE (fndecl) == FUNCTION_DECL
13446 && DECL_BUILT_IN (fndecl)
13447 && !gimple_call_va_arg_pack_p (stmt))
13448 {
13449 int nargs = gimple_call_num_args (stmt);
13450 tree *args = (nargs > 0
13451 ? gimple_call_arg_ptr (stmt, 0)
13452 : &error_mark_node);
13453
13454 if (avoid_folding_inline_builtin (fndecl))
13455 return NULL_TREE;
13456 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13457 {
13458 return targetm.fold_builtin (fndecl, nargs, args, ignore);
13459 }
13460 else
13461 {
13462 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13463 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13464 if (!ret)
13465 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13466 if (ret)
13467 {
13468 /* Propagate location information from original call to
13469 expansion of builtin. Otherwise things like
13470 maybe_emit_chk_warning, that operate on the expansion
13471 of a builtin, will use the wrong location information. */
13472 if (gimple_has_location (stmt))
13473 {
13474 tree realret = ret;
13475 if (TREE_CODE (ret) == NOP_EXPR)
13476 realret = TREE_OPERAND (ret, 0);
13477 if (CAN_HAVE_LOCATION_P (realret)
13478 && !EXPR_HAS_LOCATION (realret))
13479 SET_EXPR_LOCATION (realret, loc);
13480 return realret;
13481 }
13482 return ret;
13483 }
13484 }
13485 }
13486 return NULL_TREE;
13487 }
13488
13489 /* Look up the function in builtin_decl that corresponds to DECL
13490 and set ASMSPEC as its user assembler name. DECL must be a
13491 function decl that declares a builtin. */
13492
13493 void
13494 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13495 {
13496 tree builtin;
13497 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13498 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13499 && asmspec != 0);
13500
13501 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
13502 set_user_assembler_name (
13503 builtin, asmspec);
13504 switch (DECL_FUNCTION_CODE (decl))
13505 {
13506 case BUILT_IN_MEMCPY:
13507 init_block_move_fn (asmspec);
13508 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13509 break;
13510 case BUILT_IN_MEMSET:
13511 init_block_clear_fn (asmspec);
13512 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13513 break;
13514 case BUILT_IN_MEMMOVE:
13515 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13516 break;
13517 case BUILT_IN_MEMCMP:
13518 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13519 break;
13520 case BUILT_IN_ABORT:
13521 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13522 break;
13523 case BUILT_IN_FFS:
13524 if (INT_TYPE_SIZE < BITS_PER_WORD)
13525 {
13526 set_user_assembler_libfunc ("ffs", asmspec);
13527 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13528 MODE_INT, 0), "ffs");
13529 }
13530 break;
13531 default:
13532 break;
13533 }
13534 }
13535
13536 /* Return true if DECL is a builtin that expands to a constant or similarly
13537 simple code. */
13538 bool
13539 is_simple_builtin (tree decl)
13540 {
13541 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13542 switch (DECL_FUNCTION_CODE (decl))
13543 {
13544 /* Builtins that expand to constants. */
13545 case BUILT_IN_CONSTANT_P:
13546 case BUILT_IN_EXPECT:
13547 case BUILT_IN_OBJECT_SIZE:
13548 case BUILT_IN_UNREACHABLE:
13549 /* Simple register moves or loads from stack. */
13550 case BUILT_IN_ASSUME_ALIGNED:
13551 case BUILT_IN_RETURN_ADDRESS:
13552 case BUILT_IN_EXTRACT_RETURN_ADDR:
13553 case BUILT_IN_FROB_RETURN_ADDR:
13554 case BUILT_IN_RETURN:
13555 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
13556 case BUILT_IN_FRAME_ADDRESS:
13557 case BUILT_IN_VA_END:
13558 case BUILT_IN_STACK_SAVE:
13559 case BUILT_IN_STACK_RESTORE:
13560 /* Exception state returns or moves registers around. */
13561 case BUILT_IN_EH_FILTER:
13562 case BUILT_IN_EH_POINTER:
13563 case BUILT_IN_EH_COPY_VALUES:
13564 return true;
13565
13566 default:
13567 return false;
13568 }
13569
13570 return false;
13571 }
13572
13573 /* Return true if DECL is a builtin that is not expensive, i.e., they are
13574 most probably expanded inline into reasonably simple code. This is a
13575 superset of is_simple_builtin. */
13576 bool
13577 is_inexpensive_builtin (tree decl)
13578 {
13579 if (!decl)
13580 return false;
13581 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
13582 return true;
13583 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13584 switch (DECL_FUNCTION_CODE (decl))
13585 {
13586 case BUILT_IN_ABS:
13587 case BUILT_IN_ALLOCA:
13588 case BUILT_IN_ALLOCA_WITH_ALIGN:
13589 case BUILT_IN_BSWAP32:
13590 case BUILT_IN_BSWAP64:
13591 case BUILT_IN_CLZ:
13592 case BUILT_IN_CLZIMAX:
13593 case BUILT_IN_CLZL:
13594 case BUILT_IN_CLZLL:
13595 case BUILT_IN_CTZ:
13596 case BUILT_IN_CTZIMAX:
13597 case BUILT_IN_CTZL:
13598 case BUILT_IN_CTZLL:
13599 case BUILT_IN_FFS:
13600 case BUILT_IN_FFSIMAX:
13601 case BUILT_IN_FFSL:
13602 case BUILT_IN_FFSLL:
13603 case BUILT_IN_IMAXABS:
13604 case BUILT_IN_FINITE:
13605 case BUILT_IN_FINITEF:
13606 case BUILT_IN_FINITEL:
13607 case BUILT_IN_FINITED32:
13608 case BUILT_IN_FINITED64:
13609 case BUILT_IN_FINITED128:
13610 case BUILT_IN_FPCLASSIFY:
13611 case BUILT_IN_ISFINITE:
13612 case BUILT_IN_ISINF_SIGN:
13613 case BUILT_IN_ISINF:
13614 case BUILT_IN_ISINFF:
13615 case BUILT_IN_ISINFL:
13616 case BUILT_IN_ISINFD32:
13617 case BUILT_IN_ISINFD64:
13618 case BUILT_IN_ISINFD128:
13619 case BUILT_IN_ISNAN:
13620 case BUILT_IN_ISNANF:
13621 case BUILT_IN_ISNANL:
13622 case BUILT_IN_ISNAND32:
13623 case BUILT_IN_ISNAND64:
13624 case BUILT_IN_ISNAND128:
13625 case BUILT_IN_ISNORMAL:
13626 case BUILT_IN_ISGREATER:
13627 case BUILT_IN_ISGREATEREQUAL:
13628 case BUILT_IN_ISLESS:
13629 case BUILT_IN_ISLESSEQUAL:
13630 case BUILT_IN_ISLESSGREATER:
13631 case BUILT_IN_ISUNORDERED:
13632 case BUILT_IN_VA_ARG_PACK:
13633 case BUILT_IN_VA_ARG_PACK_LEN:
13634 case BUILT_IN_VA_COPY:
13635 case BUILT_IN_TRAP:
13636 case BUILT_IN_SAVEREGS:
13637 case BUILT_IN_POPCOUNTL:
13638 case BUILT_IN_POPCOUNTLL:
13639 case BUILT_IN_POPCOUNTIMAX:
13640 case BUILT_IN_POPCOUNT:
13641 case BUILT_IN_PARITYL:
13642 case BUILT_IN_PARITYLL:
13643 case BUILT_IN_PARITYIMAX:
13644 case BUILT_IN_PARITY:
13645 case BUILT_IN_LABS:
13646 case BUILT_IN_LLABS:
13647 case BUILT_IN_PREFETCH:
13648 return true;
13649
13650 default:
13651 return is_simple_builtin (decl);
13652 }
13653
13654 return false;
13655 }