1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic-core.h"
55 #ifndef SLOW_UNALIGNED_ACCESS
56 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
62 static tree
do_mpc_arg1 (tree
, tree
, int (*)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
));
64 struct target_builtins default_target_builtins
;
66 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
69 /* Define the names of the builtin function types and codes. */
70 const char *const built_in_class_names
[4]
71 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
73 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
74 const char * built_in_names
[(int) END_BUILTINS
] =
76 #include "builtins.def"
80 /* Setup an array of _DECL trees, make sure each element is
81 initialized to NULL_TREE. */
82 tree built_in_decls
[(int) END_BUILTINS
];
83 /* Declarations used when constructing the builtin implicitly in the compiler.
84 It may be NULL_TREE when this is invalid (for instance runtime is not
85 required to implement the function call in all cases). */
86 tree implicit_built_in_decls
[(int) END_BUILTINS
];
88 static const char *c_getstr (tree
);
89 static rtx
c_readstr (const char *, enum machine_mode
);
90 static int target_char_cast (tree
, char *);
91 static rtx
get_memory_rtx (tree
, tree
);
92 static int apply_args_size (void);
93 static int apply_result_size (void);
94 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
95 static rtx
result_vector (int, rtx
);
97 static void expand_builtin_update_setjmp_buf (rtx
);
98 static void expand_builtin_prefetch (tree
);
99 static rtx
expand_builtin_apply_args (void);
100 static rtx
expand_builtin_apply_args_1 (void);
101 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
102 static void expand_builtin_return (rtx
);
103 static enum type_class
type_to_class (tree
);
104 static rtx
expand_builtin_classify_type (tree
);
105 static void expand_errno_check (tree
, rtx
);
106 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
107 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
108 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
109 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
, rtx
);
110 static rtx
expand_builtin_sincos (tree
);
111 static rtx
expand_builtin_cexpi (tree
, rtx
, rtx
);
112 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
113 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
114 static rtx
expand_builtin_next_arg (void);
115 static rtx
expand_builtin_va_start (tree
);
116 static rtx
expand_builtin_va_end (tree
);
117 static rtx
expand_builtin_va_copy (tree
);
118 static rtx
expand_builtin_memcmp (tree
, rtx
, enum machine_mode
);
119 static rtx
expand_builtin_strcmp (tree
, rtx
);
120 static rtx
expand_builtin_strncmp (tree
, rtx
, enum machine_mode
);
121 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
122 static rtx
expand_builtin_memcpy (tree
, rtx
);
123 static rtx
expand_builtin_mempcpy (tree
, rtx
, enum machine_mode
);
124 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
125 enum machine_mode
, int);
126 static rtx
expand_builtin_strcpy (tree
, rtx
);
127 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
128 static rtx
expand_builtin_stpcpy (tree
, rtx
, enum machine_mode
);
129 static rtx
expand_builtin_strncpy (tree
, rtx
);
130 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, enum machine_mode
);
131 static rtx
expand_builtin_memset (tree
, rtx
, enum machine_mode
);
132 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, enum machine_mode
, tree
);
133 static rtx
expand_builtin_bzero (tree
);
134 static rtx
expand_builtin_strlen (tree
, rtx
, enum machine_mode
);
135 static rtx
expand_builtin_alloca (tree
, rtx
);
136 static rtx
expand_builtin_unop (enum machine_mode
, tree
, rtx
, rtx
, optab
);
137 static rtx
expand_builtin_frame_address (tree
, tree
);
138 static tree
stabilize_va_list_loc (location_t
, tree
, int);
139 static rtx
expand_builtin_expect (tree
, rtx
);
140 static tree
fold_builtin_constant_p (tree
);
141 static tree
fold_builtin_expect (location_t
, tree
, tree
);
142 static tree
fold_builtin_classify_type (tree
);
143 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
144 static tree
fold_builtin_inf (location_t
, tree
, int);
145 static tree
fold_builtin_nan (tree
, tree
, int);
146 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
147 static bool validate_arg (const_tree
, enum tree_code code
);
148 static bool integer_valued_real_p (tree
);
149 static tree
fold_trunc_transparent_mathfn (location_t
, tree
, tree
);
150 static bool readonly_data_expr (tree
);
151 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
152 static rtx
expand_builtin_signbit (tree
, rtx
);
153 static tree
fold_builtin_sqrt (location_t
, tree
, tree
);
154 static tree
fold_builtin_cbrt (location_t
, tree
, tree
);
155 static tree
fold_builtin_pow (location_t
, tree
, tree
, tree
, tree
);
156 static tree
fold_builtin_powi (location_t
, tree
, tree
, tree
, tree
);
157 static tree
fold_builtin_cos (location_t
, tree
, tree
, tree
);
158 static tree
fold_builtin_cosh (location_t
, tree
, tree
, tree
);
159 static tree
fold_builtin_tan (tree
, tree
);
160 static tree
fold_builtin_trunc (location_t
, tree
, tree
);
161 static tree
fold_builtin_floor (location_t
, tree
, tree
);
162 static tree
fold_builtin_ceil (location_t
, tree
, tree
);
163 static tree
fold_builtin_round (location_t
, tree
, tree
);
164 static tree
fold_builtin_int_roundingfn (location_t
, tree
, tree
);
165 static tree
fold_builtin_bitop (tree
, tree
);
166 static tree
fold_builtin_memory_op (location_t
, tree
, tree
, tree
, tree
, bool, int);
167 static tree
fold_builtin_strchr (location_t
, tree
, tree
, tree
);
168 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
169 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
170 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
171 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
172 static tree
fold_builtin_signbit (location_t
, tree
, tree
);
173 static tree
fold_builtin_copysign (location_t
, tree
, tree
, tree
, tree
);
174 static tree
fold_builtin_isascii (location_t
, tree
);
175 static tree
fold_builtin_toascii (location_t
, tree
);
176 static tree
fold_builtin_isdigit (location_t
, tree
);
177 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
178 static tree
fold_builtin_abs (location_t
, tree
, tree
);
179 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
181 static tree
fold_builtin_n (location_t
, tree
, tree
*, int, bool);
182 static tree
fold_builtin_0 (location_t
, tree
, bool);
183 static tree
fold_builtin_1 (location_t
, tree
, tree
, bool);
184 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
, bool);
185 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
, bool);
186 static tree
fold_builtin_4 (location_t
, tree
, tree
, tree
, tree
, tree
, bool);
187 static tree
fold_builtin_varargs (location_t
, tree
, tree
, bool);
189 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
190 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
191 static tree
fold_builtin_strrchr (location_t
, tree
, tree
, tree
);
192 static tree
fold_builtin_strcat (location_t
, tree
, tree
);
193 static tree
fold_builtin_strncat (location_t
, tree
, tree
, tree
);
194 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
195 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
196 static tree
fold_builtin_sprintf (location_t
, tree
, tree
, tree
, int);
198 static rtx
expand_builtin_object_size (tree
);
199 static rtx
expand_builtin_memory_chk (tree
, rtx
, enum machine_mode
,
200 enum built_in_function
);
201 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
202 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
203 static void maybe_emit_free_warning (tree
);
204 static tree
fold_builtin_object_size (tree
, tree
);
205 static tree
fold_builtin_strcat_chk (location_t
, tree
, tree
, tree
, tree
);
206 static tree
fold_builtin_strncat_chk (location_t
, tree
, tree
, tree
, tree
, tree
);
207 static tree
fold_builtin_sprintf_chk (location_t
, tree
, enum built_in_function
);
208 static tree
fold_builtin_printf (location_t
, tree
, tree
, tree
, bool, enum built_in_function
);
209 static tree
fold_builtin_fprintf (location_t
, tree
, tree
, tree
, tree
, bool,
210 enum built_in_function
);
211 static bool init_target_chars (void);
213 static unsigned HOST_WIDE_INT target_newline
;
214 static unsigned HOST_WIDE_INT target_percent
;
215 static unsigned HOST_WIDE_INT target_c
;
216 static unsigned HOST_WIDE_INT target_s
;
217 static char target_percent_c
[3];
218 static char target_percent_s
[3];
219 static char target_percent_s_newline
[4];
220 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
221 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
222 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
223 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
224 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
225 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
226 static tree
do_mpfr_sincos (tree
, tree
, tree
);
227 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
228 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
229 const REAL_VALUE_TYPE
*, bool);
230 static tree
do_mpfr_remquo (tree
, tree
, tree
);
231 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
233 /* Return true if NAME starts with __builtin_ or __sync_. */
236 is_builtin_name (const char *name
)
238 if (strncmp (name
, "__builtin_", 10) == 0)
240 if (strncmp (name
, "__sync_", 7) == 0)
246 /* Return true if DECL is a function symbol representing a built-in. */
249 is_builtin_fn (tree decl
)
251 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
255 /* Return true if NODE should be considered for inline expansion regardless
256 of the optimization level. This means whenever a function is invoked with
257 its "internal" name, which normally contains the prefix "__builtin". */
260 called_as_built_in (tree node
)
262 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
263 we want the name used to call the function, not the name it
265 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
266 return is_builtin_name (name
);
269 /* Return the alignment in bits of EXP, an object.
270 Don't return more than MAX_ALIGN no matter what. */
273 get_object_alignment (tree exp
, unsigned int max_align
)
275 HOST_WIDE_INT bitsize
, bitpos
;
277 enum machine_mode mode
;
278 int unsignedp
, volatilep
;
279 unsigned int align
, inner
;
281 /* Get the innermost object and the constant (bitpos) and possibly
282 variable (offset) offset of the access. */
283 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
284 &mode
, &unsignedp
, &volatilep
, true);
286 /* Extract alignment information from the innermost object and
287 possibly adjust bitpos and offset. */
288 if (TREE_CODE (exp
) == CONST_DECL
)
289 exp
= DECL_INITIAL (exp
);
291 && TREE_CODE (exp
) != LABEL_DECL
)
292 align
= DECL_ALIGN (exp
);
293 else if (CONSTANT_CLASS_P (exp
))
295 align
= TYPE_ALIGN (TREE_TYPE (exp
));
296 #ifdef CONSTANT_ALIGNMENT
297 align
= (unsigned)CONSTANT_ALIGNMENT (exp
, align
);
300 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
301 align
= TYPE_ALIGN (TREE_TYPE (exp
));
302 else if (TREE_CODE (exp
) == INDIRECT_REF
)
303 align
= TYPE_ALIGN (TREE_TYPE (exp
));
304 else if (TREE_CODE (exp
) == MISALIGNED_INDIRECT_REF
)
306 tree op1
= TREE_OPERAND (exp
, 1);
307 align
= integer_zerop (op1
) ? BITS_PER_UNIT
: TREE_INT_CST_LOW (op1
);
309 else if (TREE_CODE (exp
) == MEM_REF
)
311 tree addr
= TREE_OPERAND (exp
, 0);
312 struct ptr_info_def
*pi
;
313 if (TREE_CODE (addr
) == BIT_AND_EXPR
314 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
316 align
= (TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1))
317 & -TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1)));
318 align
*= BITS_PER_UNIT
;
319 addr
= TREE_OPERAND (addr
, 0);
322 align
= BITS_PER_UNIT
;
323 if (TREE_CODE (addr
) == SSA_NAME
324 && (pi
= SSA_NAME_PTR_INFO (addr
)))
326 bitpos
+= (pi
->misalign
* BITS_PER_UNIT
) & ~(align
- 1);
327 align
= MAX (pi
->align
* BITS_PER_UNIT
, align
);
329 else if (TREE_CODE (addr
) == ADDR_EXPR
)
330 align
= MAX (align
, get_object_alignment (TREE_OPERAND (addr
, 0),
332 bitpos
+= mem_ref_offset (exp
).low
* BITS_PER_UNIT
;
334 else if (TREE_CODE (exp
) == TARGET_MEM_REF
336 && POINTER_TYPE_P (TREE_TYPE (TMR_BASE (exp
))))
338 struct ptr_info_def
*pi
;
339 tree addr
= TMR_BASE (exp
);
340 if (TREE_CODE (addr
) == BIT_AND_EXPR
341 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
343 align
= (TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1))
344 & -TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1)));
345 align
*= BITS_PER_UNIT
;
346 addr
= TREE_OPERAND (addr
, 0);
349 align
= BITS_PER_UNIT
;
350 if (TREE_CODE (addr
) == SSA_NAME
351 && (pi
= SSA_NAME_PTR_INFO (addr
)))
353 bitpos
+= (pi
->misalign
* BITS_PER_UNIT
) & ~(align
- 1);
354 align
= MAX (pi
->align
* BITS_PER_UNIT
, align
);
356 else if (TREE_CODE (addr
) == ADDR_EXPR
)
357 align
= MAX (align
, get_object_alignment (TREE_OPERAND (addr
, 0),
359 if (TMR_OFFSET (exp
))
360 bitpos
+= TREE_INT_CST_LOW (TMR_OFFSET (exp
)) * BITS_PER_UNIT
;
361 if (TMR_INDEX (exp
) && TMR_STEP (exp
))
363 unsigned HOST_WIDE_INT step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
364 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
366 else if (TMR_INDEX (exp
))
367 align
= BITS_PER_UNIT
;
369 else if (TREE_CODE (exp
) == TARGET_MEM_REF
372 align
= get_object_alignment (TMR_SYMBOL (exp
), max_align
);
373 if (TMR_OFFSET (exp
))
374 bitpos
+= TREE_INT_CST_LOW (TMR_OFFSET (exp
)) * BITS_PER_UNIT
;
375 if (TMR_INDEX (exp
) && TMR_STEP (exp
))
377 unsigned HOST_WIDE_INT step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
378 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
380 else if (TMR_INDEX (exp
))
381 align
= BITS_PER_UNIT
;
384 align
= BITS_PER_UNIT
;
386 /* If there is a non-constant offset part extract the maximum
387 alignment that can prevail. */
393 if (TREE_CODE (offset
) == PLUS_EXPR
)
395 next_offset
= TREE_OPERAND (offset
, 0);
396 offset
= TREE_OPERAND (offset
, 1);
400 if (host_integerp (offset
, 1))
402 /* Any overflow in calculating offset_bits won't change
405 = ((unsigned) tree_low_cst (offset
, 1) * BITS_PER_UNIT
);
408 inner
= MIN (inner
, (offset_bits
& -offset_bits
));
410 else if (TREE_CODE (offset
) == MULT_EXPR
411 && host_integerp (TREE_OPERAND (offset
, 1), 1))
413 /* Any overflow in calculating offset_factor won't change
415 unsigned offset_factor
416 = ((unsigned) tree_low_cst (TREE_OPERAND (offset
, 1), 1)
420 inner
= MIN (inner
, (offset_factor
& -offset_factor
));
424 inner
= MIN (inner
, BITS_PER_UNIT
);
427 offset
= next_offset
;
430 /* Alignment is innermost object alignment adjusted by the constant
431 and non-constant offset parts. */
432 align
= MIN (align
, inner
);
433 bitpos
= bitpos
& (align
- 1);
435 /* align and bitpos now specify known low bits of the pointer.
436 ptr & (align - 1) == bitpos. */
439 align
= (bitpos
& -bitpos
);
441 return MIN (align
, max_align
);
444 /* Returns true iff we can trust that alignment information has been
445 calculated properly. */
448 can_trust_pointer_alignment (void)
450 /* We rely on TER to compute accurate alignment information. */
451 return (optimize
&& flag_tree_ter
);
454 /* Return the alignment in bits of EXP, a pointer valued expression.
455 But don't return more than MAX_ALIGN no matter what.
456 The alignment returned is, by default, the alignment of the thing that
457 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
459 Otherwise, look at the expression to see if we can do better, i.e., if the
460 expression is actually pointing at an object whose alignment is tighter. */
463 get_pointer_alignment (tree exp
, unsigned int max_align
)
467 if (TREE_CODE (exp
) == ADDR_EXPR
)
468 return get_object_alignment (TREE_OPERAND (exp
, 0), max_align
);
469 else if (TREE_CODE (exp
) == SSA_NAME
470 && POINTER_TYPE_P (TREE_TYPE (exp
)))
472 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
475 return BITS_PER_UNIT
;
476 if (pi
->misalign
!= 0)
477 align
= (pi
->misalign
& -pi
->misalign
);
480 return MIN (max_align
, align
* BITS_PER_UNIT
);
483 return POINTER_TYPE_P (TREE_TYPE (exp
)) ? BITS_PER_UNIT
: 0;
486 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
487 way, because it could contain a zero byte in the middle.
488 TREE_STRING_LENGTH is the size of the character array, not the string.
490 ONLY_VALUE should be nonzero if the result is not going to be emitted
491 into the instruction stream and zero if it is going to be expanded.
492 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
493 is returned, otherwise NULL, since
494 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
495 evaluate the side-effects.
497 The value returned is of type `ssizetype'.
499 Unfortunately, string_constant can't access the values of const char
500 arrays with initializers, so neither can we do so here. */
503 c_strlen (tree src
, int only_value
)
506 HOST_WIDE_INT offset
;
512 if (TREE_CODE (src
) == COND_EXPR
513 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
517 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
518 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
519 if (tree_int_cst_equal (len1
, len2
))
523 if (TREE_CODE (src
) == COMPOUND_EXPR
524 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
525 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
527 if (EXPR_HAS_LOCATION (src
))
528 loc
= EXPR_LOCATION (src
);
530 loc
= input_location
;
532 src
= string_constant (src
, &offset_node
);
536 max
= TREE_STRING_LENGTH (src
) - 1;
537 ptr
= TREE_STRING_POINTER (src
);
539 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
541 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
542 compute the offset to the following null if we don't know where to
543 start searching for it. */
546 for (i
= 0; i
< max
; i
++)
550 /* We don't know the starting offset, but we do know that the string
551 has no internal zero bytes. We can assume that the offset falls
552 within the bounds of the string; otherwise, the programmer deserves
553 what he gets. Subtract the offset from the length of the string,
554 and return that. This would perhaps not be valid if we were dealing
555 with named arrays in addition to literal string constants. */
557 return size_diffop_loc (loc
, size_int (max
), offset_node
);
560 /* We have a known offset into the string. Start searching there for
561 a null character if we can represent it as a single HOST_WIDE_INT. */
562 if (offset_node
== 0)
564 else if (! host_integerp (offset_node
, 0))
567 offset
= tree_low_cst (offset_node
, 0);
569 /* If the offset is known to be out of bounds, warn, and call strlen at
571 if (offset
< 0 || offset
> max
)
573 /* Suppress multiple warnings for propagated constant strings. */
574 if (! TREE_NO_WARNING (src
))
576 warning_at (loc
, 0, "offset outside bounds of constant string");
577 TREE_NO_WARNING (src
) = 1;
582 /* Use strlen to search for the first zero byte. Since any strings
583 constructed with build_string will have nulls appended, we win even
584 if we get handed something like (char[4])"abcd".
586 Since OFFSET is our starting index into the string, no further
587 calculation is needed. */
588 return ssize_int (strlen (ptr
+ offset
));
591 /* Return a char pointer for a C string if it is a string constant
592 or sum of string constant and integer constant. */
599 src
= string_constant (src
, &offset_node
);
603 if (offset_node
== 0)
604 return TREE_STRING_POINTER (src
);
605 else if (!host_integerp (offset_node
, 1)
606 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
609 return TREE_STRING_POINTER (src
) + tree_low_cst (offset_node
, 1);
612 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
613 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
616 c_readstr (const char *str
, enum machine_mode mode
)
622 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
627 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
630 if (WORDS_BIG_ENDIAN
)
631 j
= GET_MODE_SIZE (mode
) - i
- 1;
632 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
633 && GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
634 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
636 gcc_assert (j
< 2 * HOST_BITS_PER_WIDE_INT
);
639 ch
= (unsigned char) str
[i
];
640 c
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
642 return immed_double_const (c
[0], c
[1], mode
);
645 /* Cast a target constant CST to target CHAR and if that value fits into
646 host char type, return zero and put that value into variable pointed to by
650 target_char_cast (tree cst
, char *p
)
652 unsigned HOST_WIDE_INT val
, hostval
;
654 if (!host_integerp (cst
, 1)
655 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
658 val
= tree_low_cst (cst
, 1);
659 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
660 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
663 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
664 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
673 /* Similar to save_expr, but assumes that arbitrary code is not executed
674 in between the multiple evaluations. In particular, we assume that a
675 non-addressable local variable will not be modified. */
678 builtin_save_expr (tree exp
)
680 if (TREE_ADDRESSABLE (exp
) == 0
681 && (TREE_CODE (exp
) == PARM_DECL
682 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
))))
685 return save_expr (exp
);
688 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
689 times to get the address of either a higher stack frame, or a return
690 address located within it (depending on FNDECL_CODE). */
693 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
697 #ifdef INITIAL_FRAME_ADDRESS_RTX
698 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
702 /* For a zero count with __builtin_return_address, we don't care what
703 frame address we return, because target-specific definitions will
704 override us. Therefore frame pointer elimination is OK, and using
705 the soft frame pointer is OK.
707 For a nonzero count, or a zero count with __builtin_frame_address,
708 we require a stable offset from the current frame pointer to the
709 previous one, so we must use the hard frame pointer, and
710 we must disable frame pointer elimination. */
711 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
712 tem
= frame_pointer_rtx
;
715 tem
= hard_frame_pointer_rtx
;
717 /* Tell reload not to eliminate the frame pointer. */
718 crtl
->accesses_prior_frames
= 1;
722 /* Some machines need special handling before we can access
723 arbitrary frames. For example, on the SPARC, we must first flush
724 all register windows to the stack. */
725 #ifdef SETUP_FRAME_ADDRESSES
727 SETUP_FRAME_ADDRESSES ();
730 /* On the SPARC, the return address is not in the frame, it is in a
731 register. There is no way to access it off of the current frame
732 pointer, but it can be accessed off the previous frame pointer by
733 reading the value from the register window save area. */
734 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
735 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
739 /* Scan back COUNT frames to the specified frame. */
740 for (i
= 0; i
< count
; i
++)
742 /* Assume the dynamic chain pointer is in the word that the
743 frame address points to, unless otherwise specified. */
744 #ifdef DYNAMIC_CHAIN_ADDRESS
745 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
747 tem
= memory_address (Pmode
, tem
);
748 tem
= gen_frame_mem (Pmode
, tem
);
749 tem
= copy_to_reg (tem
);
752 /* For __builtin_frame_address, return what we've got. But, on
753 the SPARC for example, we may have to add a bias. */
754 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
755 #ifdef FRAME_ADDR_RTX
756 return FRAME_ADDR_RTX (tem
);
761 /* For __builtin_return_address, get the return address from that frame. */
762 #ifdef RETURN_ADDR_RTX
763 tem
= RETURN_ADDR_RTX (count
, tem
);
765 tem
= memory_address (Pmode
,
766 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
767 tem
= gen_frame_mem (Pmode
, tem
);
772 /* Alias set used for setjmp buffer. */
773 static alias_set_type setjmp_alias_set
= -1;
775 /* Construct the leading half of a __builtin_setjmp call. Control will
776 return to RECEIVER_LABEL. This is also called directly by the SJLJ
777 exception handling code. */
780 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
782 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
786 if (setjmp_alias_set
== -1)
787 setjmp_alias_set
= new_alias_set ();
789 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
791 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
793 /* We store the frame pointer and the address of receiver_label in
794 the buffer and use the rest of it for the stack save area, which
795 is machine-dependent. */
797 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
798 set_mem_alias_set (mem
, setjmp_alias_set
);
799 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
801 mem
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
, GET_MODE_SIZE (Pmode
))),
802 set_mem_alias_set (mem
, setjmp_alias_set
);
804 emit_move_insn (validize_mem (mem
),
805 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
807 stack_save
= gen_rtx_MEM (sa_mode
,
808 plus_constant (buf_addr
,
809 2 * GET_MODE_SIZE (Pmode
)));
810 set_mem_alias_set (stack_save
, setjmp_alias_set
);
811 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
813 /* If there is further processing to do, do it. */
814 #ifdef HAVE_builtin_setjmp_setup
815 if (HAVE_builtin_setjmp_setup
)
816 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
819 /* Tell optimize_save_area_alloca that extra work is going to
820 need to go on during alloca. */
821 cfun
->calls_setjmp
= 1;
823 /* We have a nonlocal label. */
824 cfun
->has_nonlocal_label
= 1;
827 /* Construct the trailing part of a __builtin_setjmp call. This is
828 also called directly by the SJLJ exception handling code. */
831 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
835 /* Clobber the FP when we get here, so we have to make sure it's
836 marked as used by this function. */
837 emit_use (hard_frame_pointer_rtx
);
839 /* Mark the static chain as clobbered here so life information
840 doesn't get messed up for it. */
841 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
842 if (chain
&& REG_P (chain
))
843 emit_clobber (chain
);
845 /* Now put in the code to restore the frame pointer, and argument
846 pointer, if needed. */
847 #ifdef HAVE_nonlocal_goto
848 if (! HAVE_nonlocal_goto
)
851 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
852 /* This might change the hard frame pointer in ways that aren't
853 apparent to early optimization passes, so force a clobber. */
854 emit_clobber (hard_frame_pointer_rtx
);
857 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
858 if (fixed_regs
[ARG_POINTER_REGNUM
])
860 #ifdef ELIMINABLE_REGS
862 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
864 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
865 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
866 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
869 if (i
== ARRAY_SIZE (elim_regs
))
872 /* Now restore our arg pointer from the address at which it
873 was saved in our stack frame. */
874 emit_move_insn (crtl
->args
.internal_arg_pointer
,
875 copy_to_reg (get_arg_pointer_save_area ()));
880 #ifdef HAVE_builtin_setjmp_receiver
881 if (HAVE_builtin_setjmp_receiver
)
882 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
885 #ifdef HAVE_nonlocal_goto_receiver
886 if (HAVE_nonlocal_goto_receiver
)
887 emit_insn (gen_nonlocal_goto_receiver ());
892 /* We must not allow the code we just generated to be reordered by
893 scheduling. Specifically, the update of the frame pointer must
894 happen immediately, not later. */
895 emit_insn (gen_blockage ());
898 /* __builtin_longjmp is passed a pointer to an array of five words (not
899 all will be used on all machines). It operates similarly to the C
900 library function of the same name, but is more efficient. Much of
901 the code below is copied from the handling of non-local gotos. */
904 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
906 rtx fp
, lab
, stack
, insn
, last
;
907 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
909 /* DRAP is needed for stack realign if longjmp is expanded to current
911 if (SUPPORTS_STACK_ALIGNMENT
)
912 crtl
->need_drap
= true;
914 if (setjmp_alias_set
== -1)
915 setjmp_alias_set
= new_alias_set ();
917 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
919 buf_addr
= force_reg (Pmode
, buf_addr
);
921 /* We require that the user must pass a second argument of 1, because
922 that is what builtin_setjmp will return. */
923 gcc_assert (value
== const1_rtx
);
925 last
= get_last_insn ();
926 #ifdef HAVE_builtin_longjmp
927 if (HAVE_builtin_longjmp
)
928 emit_insn (gen_builtin_longjmp (buf_addr
));
932 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
933 lab
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
,
934 GET_MODE_SIZE (Pmode
)));
936 stack
= gen_rtx_MEM (sa_mode
, plus_constant (buf_addr
,
937 2 * GET_MODE_SIZE (Pmode
)));
938 set_mem_alias_set (fp
, setjmp_alias_set
);
939 set_mem_alias_set (lab
, setjmp_alias_set
);
940 set_mem_alias_set (stack
, setjmp_alias_set
);
942 /* Pick up FP, label, and SP from the block and jump. This code is
943 from expand_goto in stmt.c; see there for detailed comments. */
944 #ifdef HAVE_nonlocal_goto
945 if (HAVE_nonlocal_goto
)
946 /* We have to pass a value to the nonlocal_goto pattern that will
947 get copied into the static_chain pointer, but it does not matter
948 what that value is, because builtin_setjmp does not use it. */
949 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
953 lab
= copy_to_reg (lab
);
955 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
956 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
958 emit_move_insn (hard_frame_pointer_rtx
, fp
);
959 emit_stack_restore (SAVE_NONLOCAL
, stack
, NULL_RTX
);
961 emit_use (hard_frame_pointer_rtx
);
962 emit_use (stack_pointer_rtx
);
963 emit_indirect_jump (lab
);
967 /* Search backwards and mark the jump insn as a non-local goto.
968 Note that this precludes the use of __builtin_longjmp to a
969 __builtin_setjmp target in the same function. However, we've
970 already cautioned the user that these functions are for
971 internal exception handling use only. */
972 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
974 gcc_assert (insn
!= last
);
978 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
981 else if (CALL_P (insn
))
986 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
987 and the address of the save area. */
990 expand_builtin_nonlocal_goto (tree exp
)
992 tree t_label
, t_save_area
;
993 rtx r_label
, r_save_area
, r_fp
, r_sp
, insn
;
995 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
998 t_label
= CALL_EXPR_ARG (exp
, 0);
999 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1001 r_label
= expand_normal (t_label
);
1002 r_label
= convert_memory_address (Pmode
, r_label
);
1003 r_save_area
= expand_normal (t_save_area
);
1004 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1005 /* Copy the address of the save location to a register just in case it was based
1006 on the frame pointer. */
1007 r_save_area
= copy_to_reg (r_save_area
);
1008 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1009 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1010 plus_constant (r_save_area
, GET_MODE_SIZE (Pmode
)));
1012 crtl
->has_nonlocal_goto
= 1;
1014 #ifdef HAVE_nonlocal_goto
1015 /* ??? We no longer need to pass the static chain value, afaik. */
1016 if (HAVE_nonlocal_goto
)
1017 emit_insn (gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1021 r_label
= copy_to_reg (r_label
);
1023 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1024 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1026 /* Restore frame pointer for containing function.
1027 This sets the actual hard register used for the frame pointer
1028 to the location of the function's incoming static chain info.
1029 The non-local goto handler will then adjust it to contain the
1030 proper value and reload the argument pointer, if needed. */
1031 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1032 emit_stack_restore (SAVE_NONLOCAL
, r_sp
, NULL_RTX
);
1034 /* USE of hard_frame_pointer_rtx added for consistency;
1035 not clear if really needed. */
1036 emit_use (hard_frame_pointer_rtx
);
1037 emit_use (stack_pointer_rtx
);
1039 /* If the architecture is using a GP register, we must
1040 conservatively assume that the target function makes use of it.
1041 The prologue of functions with nonlocal gotos must therefore
1042 initialize the GP register to the appropriate value, and we
1043 must then make sure that this value is live at the point
1044 of the jump. (Note that this doesn't necessarily apply
1045 to targets with a nonlocal_goto pattern; they are free
1046 to implement it in their own way. Note also that this is
1047 a no-op if the GP register is a global invariant.) */
1048 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1049 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1050 emit_use (pic_offset_table_rtx
);
1052 emit_indirect_jump (r_label
);
1055 /* Search backwards to the jump insn and mark it as a
1057 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1061 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1064 else if (CALL_P (insn
))
1071 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1072 (not all will be used on all machines) that was passed to __builtin_setjmp.
1073 It updates the stack pointer in that block to correspond to the current
1077 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1079 enum machine_mode sa_mode
= Pmode
;
1083 #ifdef HAVE_save_stack_nonlocal
1084 if (HAVE_save_stack_nonlocal
)
1085 sa_mode
= insn_data
[(int) CODE_FOR_save_stack_nonlocal
].operand
[0].mode
;
1087 #ifdef STACK_SAVEAREA_MODE
1088 sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1092 = gen_rtx_MEM (sa_mode
,
1095 plus_constant (buf_addr
, 2 * GET_MODE_SIZE (Pmode
))));
1099 emit_insn (gen_setjmp ());
1102 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
1105 /* Expand a call to __builtin_prefetch. For a target that does not support
1106 data prefetch, evaluate the memory address argument in case it has side
1110 expand_builtin_prefetch (tree exp
)
1112 tree arg0
, arg1
, arg2
;
1116 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1119 arg0
= CALL_EXPR_ARG (exp
, 0);
1121 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1122 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1124 nargs
= call_expr_nargs (exp
);
1126 arg1
= CALL_EXPR_ARG (exp
, 1);
1128 arg1
= integer_zero_node
;
1130 arg2
= CALL_EXPR_ARG (exp
, 2);
1132 arg2
= integer_three_node
;
1134 /* Argument 0 is an address. */
1135 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1137 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1138 if (TREE_CODE (arg1
) != INTEGER_CST
)
1140 error ("second argument to %<__builtin_prefetch%> must be a constant");
1141 arg1
= integer_zero_node
;
1143 op1
= expand_normal (arg1
);
1144 /* Argument 1 must be either zero or one. */
1145 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1147 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1152 /* Argument 2 (locality) must be a compile-time constant int. */
1153 if (TREE_CODE (arg2
) != INTEGER_CST
)
1155 error ("third argument to %<__builtin_prefetch%> must be a constant");
1156 arg2
= integer_zero_node
;
1158 op2
= expand_normal (arg2
);
1159 /* Argument 2 must be 0, 1, 2, or 3. */
1160 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1162 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1166 #ifdef HAVE_prefetch
1169 if ((! (*insn_data
[(int) CODE_FOR_prefetch
].operand
[0].predicate
)
1171 insn_data
[(int) CODE_FOR_prefetch
].operand
[0].mode
))
1172 || (GET_MODE (op0
) != Pmode
))
1174 op0
= convert_memory_address (Pmode
, op0
);
1175 op0
= force_reg (Pmode
, op0
);
1177 emit_insn (gen_prefetch (op0
, op1
, op2
));
1181 /* Don't do anything with direct references to volatile memory, but
1182 generate code to handle other side effects. */
1183 if (!MEM_P (op0
) && side_effects_p (op0
))
1187 /* Get a MEM rtx for expression EXP which is the address of an operand
1188 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1189 the maximum length of the block of memory that might be accessed or
1193 get_memory_rtx (tree exp
, tree len
)
1195 tree orig_exp
= exp
;
1199 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1200 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1201 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1202 exp
= TREE_OPERAND (exp
, 0);
1204 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1205 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1207 /* Get an expression we can use to find the attributes to assign to MEM.
1208 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1209 we can. First remove any nops. */
1210 while (CONVERT_EXPR_P (exp
)
1211 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1212 exp
= TREE_OPERAND (exp
, 0);
1215 if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
1216 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1217 && host_integerp (TREE_OPERAND (exp
, 1), 0)
1218 && (off
= tree_low_cst (TREE_OPERAND (exp
, 1), 0)) > 0)
1219 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
1220 else if (TREE_CODE (exp
) == ADDR_EXPR
)
1221 exp
= TREE_OPERAND (exp
, 0);
1222 else if (POINTER_TYPE_P (TREE_TYPE (exp
)))
1223 exp
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (exp
)), exp
);
1227 /* Honor attributes derived from exp, except for the alias set
1228 (as builtin stringops may alias with anything) and the size
1229 (as stringops may access multiple array elements). */
1232 set_mem_attributes (mem
, exp
, 0);
1235 mem
= adjust_automodify_address_nv (mem
, BLKmode
, NULL
, off
);
1237 /* Allow the string and memory builtins to overflow from one
1238 field into another, see http://gcc.gnu.org/PR23561.
1239 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1240 memory accessed by the string or memory builtin will fit
1241 within the field. */
1242 if (MEM_EXPR (mem
) && TREE_CODE (MEM_EXPR (mem
)) == COMPONENT_REF
)
1244 tree mem_expr
= MEM_EXPR (mem
);
1245 HOST_WIDE_INT offset
= -1, length
= -1;
1248 while (TREE_CODE (inner
) == ARRAY_REF
1249 || CONVERT_EXPR_P (inner
)
1250 || TREE_CODE (inner
) == VIEW_CONVERT_EXPR
1251 || TREE_CODE (inner
) == SAVE_EXPR
)
1252 inner
= TREE_OPERAND (inner
, 0);
1254 gcc_assert (TREE_CODE (inner
) == COMPONENT_REF
);
1256 if (MEM_OFFSET (mem
)
1257 && CONST_INT_P (MEM_OFFSET (mem
)))
1258 offset
= INTVAL (MEM_OFFSET (mem
));
1260 if (offset
>= 0 && len
&& host_integerp (len
, 0))
1261 length
= tree_low_cst (len
, 0);
1263 while (TREE_CODE (inner
) == COMPONENT_REF
)
1265 tree field
= TREE_OPERAND (inner
, 1);
1266 gcc_assert (TREE_CODE (mem_expr
) == COMPONENT_REF
);
1267 gcc_assert (field
== TREE_OPERAND (mem_expr
, 1));
1269 /* Bitfields are generally not byte-addressable. */
1270 gcc_assert (!DECL_BIT_FIELD (field
)
1271 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 1)
1272 % BITS_PER_UNIT
) == 0
1273 && host_integerp (DECL_SIZE (field
), 0)
1274 && (TREE_INT_CST_LOW (DECL_SIZE (field
))
1275 % BITS_PER_UNIT
) == 0));
1277 /* If we can prove that the memory starting at XEXP (mem, 0) and
1278 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1279 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1280 fields without DECL_SIZE_UNIT like flexible array members. */
1282 && DECL_SIZE_UNIT (field
)
1283 && host_integerp (DECL_SIZE_UNIT (field
), 0))
1286 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field
));
1289 && offset
+ length
<= size
)
1294 && host_integerp (DECL_FIELD_OFFSET (field
), 0))
1295 offset
+= TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field
))
1296 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 1)
1304 mem_expr
= TREE_OPERAND (mem_expr
, 0);
1305 inner
= TREE_OPERAND (inner
, 0);
1308 if (mem_expr
== NULL
)
1310 if (mem_expr
!= MEM_EXPR (mem
))
1312 set_mem_expr (mem
, mem_expr
);
1313 set_mem_offset (mem
, offset
>= 0 ? GEN_INT (offset
) : NULL_RTX
);
1316 set_mem_alias_set (mem
, 0);
1317 set_mem_size (mem
, NULL_RTX
);
1323 /* Built-in functions to perform an untyped call and return. */
1325 #define apply_args_mode \
1326 (this_target_builtins->x_apply_args_mode)
1327 #define apply_result_mode \
1328 (this_target_builtins->x_apply_result_mode)
1330 /* Return the size required for the block returned by __builtin_apply_args,
1331 and initialize apply_args_mode. */
1334 apply_args_size (void)
1336 static int size
= -1;
1339 enum machine_mode mode
;
1341 /* The values computed by this function never change. */
1344 /* The first value is the incoming arg-pointer. */
1345 size
= GET_MODE_SIZE (Pmode
);
1347 /* The second value is the structure value address unless this is
1348 passed as an "invisible" first argument. */
1349 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1350 size
+= GET_MODE_SIZE (Pmode
);
1352 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1353 if (FUNCTION_ARG_REGNO_P (regno
))
1355 mode
= reg_raw_mode
[regno
];
1357 gcc_assert (mode
!= VOIDmode
);
1359 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1360 if (size
% align
!= 0)
1361 size
= CEIL (size
, align
) * align
;
1362 size
+= GET_MODE_SIZE (mode
);
1363 apply_args_mode
[regno
] = mode
;
1367 apply_args_mode
[regno
] = VOIDmode
;
1373 /* Return the size required for the block returned by __builtin_apply,
1374 and initialize apply_result_mode. */
1377 apply_result_size (void)
1379 static int size
= -1;
1381 enum machine_mode mode
;
1383 /* The values computed by this function never change. */
1388 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1389 if (targetm
.calls
.function_value_regno_p (regno
))
1391 mode
= reg_raw_mode
[regno
];
1393 gcc_assert (mode
!= VOIDmode
);
1395 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1396 if (size
% align
!= 0)
1397 size
= CEIL (size
, align
) * align
;
1398 size
+= GET_MODE_SIZE (mode
);
1399 apply_result_mode
[regno
] = mode
;
1402 apply_result_mode
[regno
] = VOIDmode
;
1404 /* Allow targets that use untyped_call and untyped_return to override
1405 the size so that machine-specific information can be stored here. */
1406 #ifdef APPLY_RESULT_SIZE
1407 size
= APPLY_RESULT_SIZE
;
1413 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1414 /* Create a vector describing the result block RESULT. If SAVEP is true,
1415 the result block is used to save the values; otherwise it is used to
1416 restore the values. */
1419 result_vector (int savep
, rtx result
)
1421 int regno
, size
, align
, nelts
;
1422 enum machine_mode mode
;
1424 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1427 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1428 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1430 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1431 if (size
% align
!= 0)
1432 size
= CEIL (size
, align
) * align
;
1433 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1434 mem
= adjust_address (result
, mode
, size
);
1435 savevec
[nelts
++] = (savep
1436 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
1437 : gen_rtx_SET (VOIDmode
, reg
, mem
));
1438 size
+= GET_MODE_SIZE (mode
);
1440 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1442 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1444 /* Save the state required to perform an untyped call with the same
1445 arguments as were passed to the current function. */
1448 expand_builtin_apply_args_1 (void)
1451 int size
, align
, regno
;
1452 enum machine_mode mode
;
1453 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1455 /* Create a block where the arg-pointer, structure value address,
1456 and argument registers can be saved. */
1457 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1459 /* Walk past the arg-pointer and structure value address. */
1460 size
= GET_MODE_SIZE (Pmode
);
1461 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1462 size
+= GET_MODE_SIZE (Pmode
);
1464 /* Save each register used in calling a function to the block. */
1465 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1466 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1468 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1469 if (size
% align
!= 0)
1470 size
= CEIL (size
, align
) * align
;
1472 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1474 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1475 size
+= GET_MODE_SIZE (mode
);
1478 /* Save the arg pointer to the block. */
1479 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1480 #ifdef STACK_GROWS_DOWNWARD
1481 /* We need the pointer as the caller actually passed them to us, not
1482 as we might have pretended they were passed. Make sure it's a valid
1483 operand, as emit_move_insn isn't expected to handle a PLUS. */
1485 = force_operand (plus_constant (tem
, crtl
->args
.pretend_args_size
),
1488 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1490 size
= GET_MODE_SIZE (Pmode
);
1492 /* Save the structure value address unless this is passed as an
1493 "invisible" first argument. */
1494 if (struct_incoming_value
)
1496 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1497 copy_to_reg (struct_incoming_value
));
1498 size
+= GET_MODE_SIZE (Pmode
);
1501 /* Return the address of the block. */
1502 return copy_addr_to_reg (XEXP (registers
, 0));
1505 /* __builtin_apply_args returns block of memory allocated on
1506 the stack into which is stored the arg pointer, structure
1507 value address, static chain, and all the registers that might
1508 possibly be used in performing a function call. The code is
1509 moved to the start of the function so the incoming values are
1513 expand_builtin_apply_args (void)
1515 /* Don't do __builtin_apply_args more than once in a function.
1516 Save the result of the first call and reuse it. */
1517 if (apply_args_value
!= 0)
1518 return apply_args_value
;
1520 /* When this function is called, it means that registers must be
1521 saved on entry to this function. So we migrate the
1522 call to the first insn of this function. */
1527 temp
= expand_builtin_apply_args_1 ();
1531 apply_args_value
= temp
;
1533 /* Put the insns after the NOTE that starts the function.
1534 If this is inside a start_sequence, make the outer-level insn
1535 chain current, so the code is placed at the start of the
1536 function. If internal_arg_pointer is a non-virtual pseudo,
1537 it needs to be placed after the function that initializes
1539 push_topmost_sequence ();
1540 if (REG_P (crtl
->args
.internal_arg_pointer
)
1541 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1542 emit_insn_before (seq
, parm_birth_insn
);
1544 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1545 pop_topmost_sequence ();
1550 /* Perform an untyped call and save the state required to perform an
1551 untyped return of whatever value was returned by the given function. */
1554 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1556 int size
, align
, regno
;
1557 enum machine_mode mode
;
1558 rtx incoming_args
, result
, reg
, dest
, src
, call_insn
;
1559 rtx old_stack_level
= 0;
1560 rtx call_fusage
= 0;
1561 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1563 arguments
= convert_memory_address (Pmode
, arguments
);
1565 /* Create a block where the return registers can be saved. */
1566 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1568 /* Fetch the arg pointer from the ARGUMENTS block. */
1569 incoming_args
= gen_reg_rtx (Pmode
);
1570 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1571 #ifndef STACK_GROWS_DOWNWARD
1572 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1573 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1576 /* Push a new argument block and copy the arguments. Do not allow
1577 the (potential) memcpy call below to interfere with our stack
1579 do_pending_stack_adjust ();
1582 /* Save the stack with nonlocal if available. */
1583 #ifdef HAVE_save_stack_nonlocal
1584 if (HAVE_save_stack_nonlocal
)
1585 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
, NULL_RTX
);
1588 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
1590 /* Allocate a block of memory onto the stack and copy the memory
1591 arguments to the outgoing arguments address. */
1592 allocate_dynamic_stack_space (argsize
, 0, BITS_PER_UNIT
);
1594 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1595 may have already set current_function_calls_alloca to true.
1596 current_function_calls_alloca won't be set if argsize is zero,
1597 so we have to guarantee need_drap is true here. */
1598 if (SUPPORTS_STACK_ALIGNMENT
)
1599 crtl
->need_drap
= true;
1601 dest
= virtual_outgoing_args_rtx
;
1602 #ifndef STACK_GROWS_DOWNWARD
1603 if (CONST_INT_P (argsize
))
1604 dest
= plus_constant (dest
, -INTVAL (argsize
));
1606 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1608 dest
= gen_rtx_MEM (BLKmode
, dest
);
1609 set_mem_align (dest
, PARM_BOUNDARY
);
1610 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1611 set_mem_align (src
, PARM_BOUNDARY
);
1612 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1614 /* Refer to the argument block. */
1616 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1617 set_mem_align (arguments
, PARM_BOUNDARY
);
1619 /* Walk past the arg-pointer and structure value address. */
1620 size
= GET_MODE_SIZE (Pmode
);
1622 size
+= GET_MODE_SIZE (Pmode
);
1624 /* Restore each of the registers previously saved. Make USE insns
1625 for each of these registers for use in making the call. */
1626 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1627 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1629 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1630 if (size
% align
!= 0)
1631 size
= CEIL (size
, align
) * align
;
1632 reg
= gen_rtx_REG (mode
, regno
);
1633 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1634 use_reg (&call_fusage
, reg
);
1635 size
+= GET_MODE_SIZE (mode
);
1638 /* Restore the structure value address unless this is passed as an
1639 "invisible" first argument. */
1640 size
= GET_MODE_SIZE (Pmode
);
1643 rtx value
= gen_reg_rtx (Pmode
);
1644 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1645 emit_move_insn (struct_value
, value
);
1646 if (REG_P (struct_value
))
1647 use_reg (&call_fusage
, struct_value
);
1648 size
+= GET_MODE_SIZE (Pmode
);
1651 /* All arguments and registers used for the call are set up by now! */
1652 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1654 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1655 and we don't want to load it into a register as an optimization,
1656 because prepare_call_address already did it if it should be done. */
1657 if (GET_CODE (function
) != SYMBOL_REF
)
1658 function
= memory_address (FUNCTION_MODE
, function
);
1660 /* Generate the actual call instruction and save the return value. */
1661 #ifdef HAVE_untyped_call
1662 if (HAVE_untyped_call
)
1663 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1664 result
, result_vector (1, result
)));
1667 #ifdef HAVE_call_value
1668 if (HAVE_call_value
)
1672 /* Locate the unique return register. It is not possible to
1673 express a call that sets more than one return register using
1674 call_value; use untyped_call for that. In fact, untyped_call
1675 only needs to save the return registers in the given block. */
1676 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1677 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1679 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1681 valreg
= gen_rtx_REG (mode
, regno
);
1684 emit_call_insn (GEN_CALL_VALUE (valreg
,
1685 gen_rtx_MEM (FUNCTION_MODE
, function
),
1686 const0_rtx
, NULL_RTX
, const0_rtx
));
1688 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1694 /* Find the CALL insn we just emitted, and attach the register usage
1696 call_insn
= last_call_insn ();
1697 add_function_usage_to (call_insn
, call_fusage
);
1699 /* Restore the stack. */
1700 #ifdef HAVE_save_stack_nonlocal
1701 if (HAVE_save_stack_nonlocal
)
1702 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
, NULL_RTX
);
1705 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
1709 /* Return the address of the result block. */
1710 result
= copy_addr_to_reg (XEXP (result
, 0));
1711 return convert_memory_address (ptr_mode
, result
);
1714 /* Perform an untyped return. */
1717 expand_builtin_return (rtx result
)
1719 int size
, align
, regno
;
1720 enum machine_mode mode
;
1722 rtx call_fusage
= 0;
1724 result
= convert_memory_address (Pmode
, result
);
1726 apply_result_size ();
1727 result
= gen_rtx_MEM (BLKmode
, result
);
1729 #ifdef HAVE_untyped_return
1730 if (HAVE_untyped_return
)
1732 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1738 /* Restore the return value and note that each value is used. */
1740 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1741 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1743 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1744 if (size
% align
!= 0)
1745 size
= CEIL (size
, align
) * align
;
1746 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1747 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1749 push_to_sequence (call_fusage
);
1751 call_fusage
= get_insns ();
1753 size
+= GET_MODE_SIZE (mode
);
1756 /* Put the USE insns before the return. */
1757 emit_insn (call_fusage
);
1759 /* Return whatever values was restored by jumping directly to the end
1761 expand_naked_return ();
1764 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1766 static enum type_class
1767 type_to_class (tree type
)
1769 switch (TREE_CODE (type
))
1771 case VOID_TYPE
: return void_type_class
;
1772 case INTEGER_TYPE
: return integer_type_class
;
1773 case ENUMERAL_TYPE
: return enumeral_type_class
;
1774 case BOOLEAN_TYPE
: return boolean_type_class
;
1775 case POINTER_TYPE
: return pointer_type_class
;
1776 case REFERENCE_TYPE
: return reference_type_class
;
1777 case OFFSET_TYPE
: return offset_type_class
;
1778 case REAL_TYPE
: return real_type_class
;
1779 case COMPLEX_TYPE
: return complex_type_class
;
1780 case FUNCTION_TYPE
: return function_type_class
;
1781 case METHOD_TYPE
: return method_type_class
;
1782 case RECORD_TYPE
: return record_type_class
;
1784 case QUAL_UNION_TYPE
: return union_type_class
;
1785 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1786 ? string_type_class
: array_type_class
);
1787 case LANG_TYPE
: return lang_type_class
;
1788 default: return no_type_class
;
1792 /* Expand a call EXP to __builtin_classify_type. */
1795 expand_builtin_classify_type (tree exp
)
1797 if (call_expr_nargs (exp
))
1798 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1799 return GEN_INT (no_type_class
);
1802 /* This helper macro, meant to be used in mathfn_built_in below,
1803 determines which among a set of three builtin math functions is
1804 appropriate for a given type mode. The `F' and `L' cases are
1805 automatically generated from the `double' case. */
1806 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1807 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1808 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1809 fcodel = BUILT_IN_MATHFN##L ; break;
1810 /* Similar to above, but appends _R after any F/L suffix. */
1811 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1812 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1813 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1814 fcodel = BUILT_IN_MATHFN##L_R ; break;
1816 /* Return mathematic function equivalent to FN but operating directly
1817 on TYPE, if available. If IMPLICIT is true find the function in
1818 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1819 can't do the conversion, return zero. */
1822 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit
)
1824 tree
const *const fn_arr
1825 = implicit
? implicit_built_in_decls
: built_in_decls
;
1826 enum built_in_function fcode
, fcodef
, fcodel
;
1830 CASE_MATHFN (BUILT_IN_ACOS
)
1831 CASE_MATHFN (BUILT_IN_ACOSH
)
1832 CASE_MATHFN (BUILT_IN_ASIN
)
1833 CASE_MATHFN (BUILT_IN_ASINH
)
1834 CASE_MATHFN (BUILT_IN_ATAN
)
1835 CASE_MATHFN (BUILT_IN_ATAN2
)
1836 CASE_MATHFN (BUILT_IN_ATANH
)
1837 CASE_MATHFN (BUILT_IN_CBRT
)
1838 CASE_MATHFN (BUILT_IN_CEIL
)
1839 CASE_MATHFN (BUILT_IN_CEXPI
)
1840 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1841 CASE_MATHFN (BUILT_IN_COS
)
1842 CASE_MATHFN (BUILT_IN_COSH
)
1843 CASE_MATHFN (BUILT_IN_DREM
)
1844 CASE_MATHFN (BUILT_IN_ERF
)
1845 CASE_MATHFN (BUILT_IN_ERFC
)
1846 CASE_MATHFN (BUILT_IN_EXP
)
1847 CASE_MATHFN (BUILT_IN_EXP10
)
1848 CASE_MATHFN (BUILT_IN_EXP2
)
1849 CASE_MATHFN (BUILT_IN_EXPM1
)
1850 CASE_MATHFN (BUILT_IN_FABS
)
1851 CASE_MATHFN (BUILT_IN_FDIM
)
1852 CASE_MATHFN (BUILT_IN_FLOOR
)
1853 CASE_MATHFN (BUILT_IN_FMA
)
1854 CASE_MATHFN (BUILT_IN_FMAX
)
1855 CASE_MATHFN (BUILT_IN_FMIN
)
1856 CASE_MATHFN (BUILT_IN_FMOD
)
1857 CASE_MATHFN (BUILT_IN_FREXP
)
1858 CASE_MATHFN (BUILT_IN_GAMMA
)
1859 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1860 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1861 CASE_MATHFN (BUILT_IN_HYPOT
)
1862 CASE_MATHFN (BUILT_IN_ILOGB
)
1863 CASE_MATHFN (BUILT_IN_INF
)
1864 CASE_MATHFN (BUILT_IN_ISINF
)
1865 CASE_MATHFN (BUILT_IN_J0
)
1866 CASE_MATHFN (BUILT_IN_J1
)
1867 CASE_MATHFN (BUILT_IN_JN
)
1868 CASE_MATHFN (BUILT_IN_LCEIL
)
1869 CASE_MATHFN (BUILT_IN_LDEXP
)
1870 CASE_MATHFN (BUILT_IN_LFLOOR
)
1871 CASE_MATHFN (BUILT_IN_LGAMMA
)
1872 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1873 CASE_MATHFN (BUILT_IN_LLCEIL
)
1874 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1875 CASE_MATHFN (BUILT_IN_LLRINT
)
1876 CASE_MATHFN (BUILT_IN_LLROUND
)
1877 CASE_MATHFN (BUILT_IN_LOG
)
1878 CASE_MATHFN (BUILT_IN_LOG10
)
1879 CASE_MATHFN (BUILT_IN_LOG1P
)
1880 CASE_MATHFN (BUILT_IN_LOG2
)
1881 CASE_MATHFN (BUILT_IN_LOGB
)
1882 CASE_MATHFN (BUILT_IN_LRINT
)
1883 CASE_MATHFN (BUILT_IN_LROUND
)
1884 CASE_MATHFN (BUILT_IN_MODF
)
1885 CASE_MATHFN (BUILT_IN_NAN
)
1886 CASE_MATHFN (BUILT_IN_NANS
)
1887 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1888 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1889 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1890 CASE_MATHFN (BUILT_IN_POW
)
1891 CASE_MATHFN (BUILT_IN_POWI
)
1892 CASE_MATHFN (BUILT_IN_POW10
)
1893 CASE_MATHFN (BUILT_IN_REMAINDER
)
1894 CASE_MATHFN (BUILT_IN_REMQUO
)
1895 CASE_MATHFN (BUILT_IN_RINT
)
1896 CASE_MATHFN (BUILT_IN_ROUND
)
1897 CASE_MATHFN (BUILT_IN_SCALB
)
1898 CASE_MATHFN (BUILT_IN_SCALBLN
)
1899 CASE_MATHFN (BUILT_IN_SCALBN
)
1900 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1901 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1902 CASE_MATHFN (BUILT_IN_SIN
)
1903 CASE_MATHFN (BUILT_IN_SINCOS
)
1904 CASE_MATHFN (BUILT_IN_SINH
)
1905 CASE_MATHFN (BUILT_IN_SQRT
)
1906 CASE_MATHFN (BUILT_IN_TAN
)
1907 CASE_MATHFN (BUILT_IN_TANH
)
1908 CASE_MATHFN (BUILT_IN_TGAMMA
)
1909 CASE_MATHFN (BUILT_IN_TRUNC
)
1910 CASE_MATHFN (BUILT_IN_Y0
)
1911 CASE_MATHFN (BUILT_IN_Y1
)
1912 CASE_MATHFN (BUILT_IN_YN
)
1918 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1919 return fn_arr
[fcode
];
1920 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1921 return fn_arr
[fcodef
];
1922 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1923 return fn_arr
[fcodel
];
1928 /* Like mathfn_built_in_1(), but always use the implicit array. */
1931 mathfn_built_in (tree type
, enum built_in_function fn
)
1933 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1936 /* If errno must be maintained, expand the RTL to check if the result,
1937 TARGET, of a built-in function call, EXP, is NaN, and if so set
1941 expand_errno_check (tree exp
, rtx target
)
1943 rtx lab
= gen_label_rtx ();
1945 /* Test the result; if it is NaN, set errno=EDOM because
1946 the argument was not in the domain. */
1947 do_compare_rtx_and_jump (target
, target
, EQ
, 0, GET_MODE (target
),
1948 NULL_RTX
, NULL_RTX
, lab
,
1949 /* The jump is very likely. */
1950 REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1));
1953 /* If this built-in doesn't throw an exception, set errno directly. */
1954 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1956 #ifdef GEN_ERRNO_RTX
1957 rtx errno_rtx
= GEN_ERRNO_RTX
;
1960 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1962 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
1968 /* Make sure the library call isn't expanded as a tail call. */
1969 CALL_EXPR_TAILCALL (exp
) = 0;
1971 /* We can't set errno=EDOM directly; let the library call do it.
1972 Pop the arguments right away in case the call gets deleted. */
1974 expand_call (exp
, target
, 0);
1979 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1980 Return NULL_RTX if a normal call should be emitted rather than expanding
1981 the function in-line. EXP is the expression that is a call to the builtin
1982 function; if convenient, the result should be placed in TARGET.
1983 SUBTARGET may be used as the target for computing one of EXP's operands. */
1986 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
1988 optab builtin_optab
;
1990 tree fndecl
= get_callee_fndecl (exp
);
1991 enum machine_mode mode
;
1992 bool errno_set
= false;
1995 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
1998 arg
= CALL_EXPR_ARG (exp
, 0);
2000 switch (DECL_FUNCTION_CODE (fndecl
))
2002 CASE_FLT_FN (BUILT_IN_SQRT
):
2003 errno_set
= ! tree_expr_nonnegative_p (arg
);
2004 builtin_optab
= sqrt_optab
;
2006 CASE_FLT_FN (BUILT_IN_EXP
):
2007 errno_set
= true; builtin_optab
= exp_optab
; break;
2008 CASE_FLT_FN (BUILT_IN_EXP10
):
2009 CASE_FLT_FN (BUILT_IN_POW10
):
2010 errno_set
= true; builtin_optab
= exp10_optab
; break;
2011 CASE_FLT_FN (BUILT_IN_EXP2
):
2012 errno_set
= true; builtin_optab
= exp2_optab
; break;
2013 CASE_FLT_FN (BUILT_IN_EXPM1
):
2014 errno_set
= true; builtin_optab
= expm1_optab
; break;
2015 CASE_FLT_FN (BUILT_IN_LOGB
):
2016 errno_set
= true; builtin_optab
= logb_optab
; break;
2017 CASE_FLT_FN (BUILT_IN_LOG
):
2018 errno_set
= true; builtin_optab
= log_optab
; break;
2019 CASE_FLT_FN (BUILT_IN_LOG10
):
2020 errno_set
= true; builtin_optab
= log10_optab
; break;
2021 CASE_FLT_FN (BUILT_IN_LOG2
):
2022 errno_set
= true; builtin_optab
= log2_optab
; break;
2023 CASE_FLT_FN (BUILT_IN_LOG1P
):
2024 errno_set
= true; builtin_optab
= log1p_optab
; break;
2025 CASE_FLT_FN (BUILT_IN_ASIN
):
2026 builtin_optab
= asin_optab
; break;
2027 CASE_FLT_FN (BUILT_IN_ACOS
):
2028 builtin_optab
= acos_optab
; break;
2029 CASE_FLT_FN (BUILT_IN_TAN
):
2030 builtin_optab
= tan_optab
; break;
2031 CASE_FLT_FN (BUILT_IN_ATAN
):
2032 builtin_optab
= atan_optab
; break;
2033 CASE_FLT_FN (BUILT_IN_FLOOR
):
2034 builtin_optab
= floor_optab
; break;
2035 CASE_FLT_FN (BUILT_IN_CEIL
):
2036 builtin_optab
= ceil_optab
; break;
2037 CASE_FLT_FN (BUILT_IN_TRUNC
):
2038 builtin_optab
= btrunc_optab
; break;
2039 CASE_FLT_FN (BUILT_IN_ROUND
):
2040 builtin_optab
= round_optab
; break;
2041 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
2042 builtin_optab
= nearbyint_optab
;
2043 if (flag_trapping_math
)
2045 /* Else fallthrough and expand as rint. */
2046 CASE_FLT_FN (BUILT_IN_RINT
):
2047 builtin_optab
= rint_optab
; break;
2048 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
2049 builtin_optab
= significand_optab
; break;
2054 /* Make a suitable register to place result in. */
2055 mode
= TYPE_MODE (TREE_TYPE (exp
));
2057 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2060 /* Before working hard, check whether the instruction is available. */
2061 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2063 target
= gen_reg_rtx (mode
);
2065 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2066 need to expand the argument again. This way, we will not perform
2067 side-effects more the once. */
2068 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2070 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2074 /* Compute into TARGET.
2075 Set TARGET to wherever the result comes back. */
2076 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
2081 expand_errno_check (exp
, target
);
2083 /* Output the entire sequence. */
2084 insns
= get_insns ();
2090 /* If we were unable to expand via the builtin, stop the sequence
2091 (without outputting the insns) and call to the library function
2092 with the stabilized argument list. */
2096 return expand_call (exp
, target
, target
== const0_rtx
);
2099 /* Expand a call to the builtin binary math functions (pow and atan2).
2100 Return NULL_RTX if a normal call should be emitted rather than expanding the
2101 function in-line. EXP is the expression that is a call to the builtin
2102 function; if convenient, the result should be placed in TARGET.
2103 SUBTARGET may be used as the target for computing one of EXP's
2107 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2109 optab builtin_optab
;
2110 rtx op0
, op1
, insns
;
2111 int op1_type
= REAL_TYPE
;
2112 tree fndecl
= get_callee_fndecl (exp
);
2114 enum machine_mode mode
;
2115 bool errno_set
= true;
2117 switch (DECL_FUNCTION_CODE (fndecl
))
2119 CASE_FLT_FN (BUILT_IN_SCALBN
):
2120 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2121 CASE_FLT_FN (BUILT_IN_LDEXP
):
2122 op1_type
= INTEGER_TYPE
;
2127 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2130 arg0
= CALL_EXPR_ARG (exp
, 0);
2131 arg1
= CALL_EXPR_ARG (exp
, 1);
2133 switch (DECL_FUNCTION_CODE (fndecl
))
2135 CASE_FLT_FN (BUILT_IN_POW
):
2136 builtin_optab
= pow_optab
; break;
2137 CASE_FLT_FN (BUILT_IN_ATAN2
):
2138 builtin_optab
= atan2_optab
; break;
2139 CASE_FLT_FN (BUILT_IN_SCALB
):
2140 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2142 builtin_optab
= scalb_optab
; break;
2143 CASE_FLT_FN (BUILT_IN_SCALBN
):
2144 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2145 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2147 /* Fall through... */
2148 CASE_FLT_FN (BUILT_IN_LDEXP
):
2149 builtin_optab
= ldexp_optab
; break;
2150 CASE_FLT_FN (BUILT_IN_FMOD
):
2151 builtin_optab
= fmod_optab
; break;
2152 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2153 CASE_FLT_FN (BUILT_IN_DREM
):
2154 builtin_optab
= remainder_optab
; break;
2159 /* Make a suitable register to place result in. */
2160 mode
= TYPE_MODE (TREE_TYPE (exp
));
2162 /* Before working hard, check whether the instruction is available. */
2163 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2166 target
= gen_reg_rtx (mode
);
2168 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2171 /* Always stabilize the argument list. */
2172 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2173 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2175 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2176 op1
= expand_normal (arg1
);
2180 /* Compute into TARGET.
2181 Set TARGET to wherever the result comes back. */
2182 target
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2183 target
, 0, OPTAB_DIRECT
);
2185 /* If we were unable to expand via the builtin, stop the sequence
2186 (without outputting the insns) and call to the library function
2187 with the stabilized argument list. */
2191 return expand_call (exp
, target
, target
== const0_rtx
);
2195 expand_errno_check (exp
, target
);
2197 /* Output the entire sequence. */
2198 insns
= get_insns ();
2205 /* Expand a call to the builtin sin and cos math functions.
2206 Return NULL_RTX if a normal call should be emitted rather than expanding the
2207 function in-line. EXP is the expression that is a call to the builtin
2208 function; if convenient, the result should be placed in TARGET.
2209 SUBTARGET may be used as the target for computing one of EXP's
2213 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2215 optab builtin_optab
;
2217 tree fndecl
= get_callee_fndecl (exp
);
2218 enum machine_mode mode
;
2221 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2224 arg
= CALL_EXPR_ARG (exp
, 0);
2226 switch (DECL_FUNCTION_CODE (fndecl
))
2228 CASE_FLT_FN (BUILT_IN_SIN
):
2229 CASE_FLT_FN (BUILT_IN_COS
):
2230 builtin_optab
= sincos_optab
; break;
2235 /* Make a suitable register to place result in. */
2236 mode
= TYPE_MODE (TREE_TYPE (exp
));
2238 /* Check if sincos insn is available, otherwise fallback
2239 to sin or cos insn. */
2240 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2241 switch (DECL_FUNCTION_CODE (fndecl
))
2243 CASE_FLT_FN (BUILT_IN_SIN
):
2244 builtin_optab
= sin_optab
; break;
2245 CASE_FLT_FN (BUILT_IN_COS
):
2246 builtin_optab
= cos_optab
; break;
2251 /* Before working hard, check whether the instruction is available. */
2252 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2254 target
= gen_reg_rtx (mode
);
2256 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2257 need to expand the argument again. This way, we will not perform
2258 side-effects more the once. */
2259 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2261 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2265 /* Compute into TARGET.
2266 Set TARGET to wherever the result comes back. */
2267 if (builtin_optab
== sincos_optab
)
2271 switch (DECL_FUNCTION_CODE (fndecl
))
2273 CASE_FLT_FN (BUILT_IN_SIN
):
2274 result
= expand_twoval_unop (builtin_optab
, op0
, 0, target
, 0);
2276 CASE_FLT_FN (BUILT_IN_COS
):
2277 result
= expand_twoval_unop (builtin_optab
, op0
, target
, 0, 0);
2282 gcc_assert (result
);
2286 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
2291 /* Output the entire sequence. */
2292 insns
= get_insns ();
2298 /* If we were unable to expand via the builtin, stop the sequence
2299 (without outputting the insns) and call to the library function
2300 with the stabilized argument list. */
2304 target
= expand_call (exp
, target
, target
== const0_rtx
);
2309 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2310 return an RTL instruction code that implements the functionality.
2311 If that isn't possible or available return CODE_FOR_nothing. */
2313 static enum insn_code
2314 interclass_mathfn_icode (tree arg
, tree fndecl
)
2316 bool errno_set
= false;
2317 optab builtin_optab
= 0;
2318 enum machine_mode mode
;
2320 switch (DECL_FUNCTION_CODE (fndecl
))
2322 CASE_FLT_FN (BUILT_IN_ILOGB
):
2323 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2324 CASE_FLT_FN (BUILT_IN_ISINF
):
2325 builtin_optab
= isinf_optab
; break;
2326 case BUILT_IN_ISNORMAL
:
2327 case BUILT_IN_ISFINITE
:
2328 CASE_FLT_FN (BUILT_IN_FINITE
):
2329 case BUILT_IN_FINITED32
:
2330 case BUILT_IN_FINITED64
:
2331 case BUILT_IN_FINITED128
:
2332 case BUILT_IN_ISINFD32
:
2333 case BUILT_IN_ISINFD64
:
2334 case BUILT_IN_ISINFD128
:
2335 /* These builtins have no optabs (yet). */
2341 /* There's no easy way to detect the case we need to set EDOM. */
2342 if (flag_errno_math
&& errno_set
)
2343 return CODE_FOR_nothing
;
2345 /* Optab mode depends on the mode of the input argument. */
2346 mode
= TYPE_MODE (TREE_TYPE (arg
));
2349 return optab_handler (builtin_optab
, mode
);
2350 return CODE_FOR_nothing
;
2353 /* Expand a call to one of the builtin math functions that operate on
2354 floating point argument and output an integer result (ilogb, isinf,
2356 Return 0 if a normal call should be emitted rather than expanding the
2357 function in-line. EXP is the expression that is a call to the builtin
2358 function; if convenient, the result should be placed in TARGET.
2359 SUBTARGET may be used as the target for computing one of EXP's operands. */
2362 expand_builtin_interclass_mathfn (tree exp
, rtx target
, rtx subtarget
)
2364 enum insn_code icode
= CODE_FOR_nothing
;
2366 tree fndecl
= get_callee_fndecl (exp
);
2367 enum machine_mode mode
;
2370 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2373 arg
= CALL_EXPR_ARG (exp
, 0);
2374 icode
= interclass_mathfn_icode (arg
, fndecl
);
2375 mode
= TYPE_MODE (TREE_TYPE (arg
));
2377 if (icode
!= CODE_FOR_nothing
)
2379 rtx last
= get_last_insn ();
2380 tree orig_arg
= arg
;
2381 /* Make a suitable register to place result in. */
2383 || GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
))
2384 || !insn_data
[icode
].operand
[0].predicate (target
, GET_MODE (target
)))
2385 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
2387 gcc_assert (insn_data
[icode
].operand
[0].predicate
2388 (target
, GET_MODE (target
)));
2390 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2391 need to expand the argument again. This way, we will not perform
2392 side-effects more the once. */
2393 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2395 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2397 if (mode
!= GET_MODE (op0
))
2398 op0
= convert_to_mode (mode
, op0
, 0);
2400 /* Compute into TARGET.
2401 Set TARGET to wherever the result comes back. */
2402 if (maybe_emit_unop_insn (icode
, target
, op0
, UNKNOWN
))
2404 delete_insns_since (last
);
2405 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2411 /* Expand a call to the builtin sincos math function.
2412 Return NULL_RTX if a normal call should be emitted rather than expanding the
2413 function in-line. EXP is the expression that is a call to the builtin
2417 expand_builtin_sincos (tree exp
)
2419 rtx op0
, op1
, op2
, target1
, target2
;
2420 enum machine_mode mode
;
2421 tree arg
, sinp
, cosp
;
2423 location_t loc
= EXPR_LOCATION (exp
);
2425 if (!validate_arglist (exp
, REAL_TYPE
,
2426 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2429 arg
= CALL_EXPR_ARG (exp
, 0);
2430 sinp
= CALL_EXPR_ARG (exp
, 1);
2431 cosp
= CALL_EXPR_ARG (exp
, 2);
2433 /* Make a suitable register to place result in. */
2434 mode
= TYPE_MODE (TREE_TYPE (arg
));
2436 /* Check if sincos insn is available, otherwise emit the call. */
2437 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2440 target1
= gen_reg_rtx (mode
);
2441 target2
= gen_reg_rtx (mode
);
2443 op0
= expand_normal (arg
);
2444 op1
= expand_normal (build_fold_indirect_ref_loc (loc
, sinp
));
2445 op2
= expand_normal (build_fold_indirect_ref_loc (loc
, cosp
));
2447 /* Compute into target1 and target2.
2448 Set TARGET to wherever the result comes back. */
2449 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2450 gcc_assert (result
);
2452 /* Move target1 and target2 to the memory locations indicated
2454 emit_move_insn (op1
, target1
);
2455 emit_move_insn (op2
, target2
);
2460 /* Expand a call to the internal cexpi builtin to the sincos math function.
2461 EXP is the expression that is a call to the builtin function; if convenient,
2462 the result should be placed in TARGET. SUBTARGET may be used as the target
2463 for computing one of EXP's operands. */
2466 expand_builtin_cexpi (tree exp
, rtx target
, rtx subtarget
)
2468 tree fndecl
= get_callee_fndecl (exp
);
2470 enum machine_mode mode
;
2472 location_t loc
= EXPR_LOCATION (exp
);
2474 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2477 arg
= CALL_EXPR_ARG (exp
, 0);
2478 type
= TREE_TYPE (arg
);
2479 mode
= TYPE_MODE (TREE_TYPE (arg
));
2481 /* Try expanding via a sincos optab, fall back to emitting a libcall
2482 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2483 is only generated from sincos, cexp or if we have either of them. */
2484 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2486 op1
= gen_reg_rtx (mode
);
2487 op2
= gen_reg_rtx (mode
);
2489 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2491 /* Compute into op1 and op2. */
2492 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2494 else if (TARGET_HAS_SINCOS
)
2496 tree call
, fn
= NULL_TREE
;
2500 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2501 fn
= built_in_decls
[BUILT_IN_SINCOSF
];
2502 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2503 fn
= built_in_decls
[BUILT_IN_SINCOS
];
2504 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2505 fn
= built_in_decls
[BUILT_IN_SINCOSL
];
2509 op1
= assign_temp (TREE_TYPE (arg
), 0, 1, 1);
2510 op2
= assign_temp (TREE_TYPE (arg
), 0, 1, 1);
2511 op1a
= copy_to_mode_reg (Pmode
, XEXP (op1
, 0));
2512 op2a
= copy_to_mode_reg (Pmode
, XEXP (op2
, 0));
2513 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2514 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2516 /* Make sure not to fold the sincos call again. */
2517 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2518 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2519 call
, 3, arg
, top1
, top2
));
2523 tree call
, fn
= NULL_TREE
, narg
;
2524 tree ctype
= build_complex_type (type
);
2526 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2527 fn
= built_in_decls
[BUILT_IN_CEXPF
];
2528 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2529 fn
= built_in_decls
[BUILT_IN_CEXP
];
2530 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2531 fn
= built_in_decls
[BUILT_IN_CEXPL
];
2535 /* If we don't have a decl for cexp create one. This is the
2536 friendliest fallback if the user calls __builtin_cexpi
2537 without full target C99 function support. */
2538 if (fn
== NULL_TREE
)
2541 const char *name
= NULL
;
2543 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2545 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2547 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2550 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2551 fn
= build_fn_decl (name
, fntype
);
2554 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2555 build_real (type
, dconst0
), arg
);
2557 /* Make sure not to fold the cexp call again. */
2558 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2559 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2560 target
, VOIDmode
, EXPAND_NORMAL
);
2563 /* Now build the proper return type. */
2564 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2565 make_tree (TREE_TYPE (arg
), op2
),
2566 make_tree (TREE_TYPE (arg
), op1
)),
2567 target
, VOIDmode
, EXPAND_NORMAL
);
2570 /* Conveniently construct a function call expression. FNDECL names the
2571 function to be called, N is the number of arguments, and the "..."
2572 parameters are the argument expressions. Unlike build_call_exr
2573 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2576 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2579 tree fntype
= TREE_TYPE (fndecl
);
2580 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2583 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2585 SET_EXPR_LOCATION (fn
, loc
);
2589 /* Expand a call to one of the builtin rounding functions gcc defines
2590 as an extension (lfloor and lceil). As these are gcc extensions we
2591 do not need to worry about setting errno to EDOM.
2592 If expanding via optab fails, lower expression to (int)(floor(x)).
2593 EXP is the expression that is a call to the builtin function;
2594 if convenient, the result should be placed in TARGET. */
2597 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2599 convert_optab builtin_optab
;
2600 rtx op0
, insns
, tmp
;
2601 tree fndecl
= get_callee_fndecl (exp
);
2602 enum built_in_function fallback_fn
;
2603 tree fallback_fndecl
;
2604 enum machine_mode mode
;
2607 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2610 arg
= CALL_EXPR_ARG (exp
, 0);
2612 switch (DECL_FUNCTION_CODE (fndecl
))
2614 CASE_FLT_FN (BUILT_IN_LCEIL
):
2615 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2616 builtin_optab
= lceil_optab
;
2617 fallback_fn
= BUILT_IN_CEIL
;
2620 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2621 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2622 builtin_optab
= lfloor_optab
;
2623 fallback_fn
= BUILT_IN_FLOOR
;
2630 /* Make a suitable register to place result in. */
2631 mode
= TYPE_MODE (TREE_TYPE (exp
));
2633 target
= gen_reg_rtx (mode
);
2635 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2636 need to expand the argument again. This way, we will not perform
2637 side-effects more the once. */
2638 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2640 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2644 /* Compute into TARGET. */
2645 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2647 /* Output the entire sequence. */
2648 insns
= get_insns ();
2654 /* If we were unable to expand via the builtin, stop the sequence
2655 (without outputting the insns). */
2658 /* Fall back to floating point rounding optab. */
2659 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2661 /* For non-C99 targets we may end up without a fallback fndecl here
2662 if the user called __builtin_lfloor directly. In this case emit
2663 a call to the floor/ceil variants nevertheless. This should result
2664 in the best user experience for not full C99 targets. */
2665 if (fallback_fndecl
== NULL_TREE
)
2668 const char *name
= NULL
;
2670 switch (DECL_FUNCTION_CODE (fndecl
))
2672 case BUILT_IN_LCEIL
:
2673 case BUILT_IN_LLCEIL
:
2676 case BUILT_IN_LCEILF
:
2677 case BUILT_IN_LLCEILF
:
2680 case BUILT_IN_LCEILL
:
2681 case BUILT_IN_LLCEILL
:
2684 case BUILT_IN_LFLOOR
:
2685 case BUILT_IN_LLFLOOR
:
2688 case BUILT_IN_LFLOORF
:
2689 case BUILT_IN_LLFLOORF
:
2692 case BUILT_IN_LFLOORL
:
2693 case BUILT_IN_LLFLOORL
:
2700 fntype
= build_function_type_list (TREE_TYPE (arg
),
2701 TREE_TYPE (arg
), NULL_TREE
);
2702 fallback_fndecl
= build_fn_decl (name
, fntype
);
2705 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2707 tmp
= expand_normal (exp
);
2709 /* Truncate the result of floating point optab to integer
2710 via expand_fix (). */
2711 target
= gen_reg_rtx (mode
);
2712 expand_fix (target
, tmp
, 0);
2717 /* Expand a call to one of the builtin math functions doing integer
2719 Return 0 if a normal call should be emitted rather than expanding the
2720 function in-line. EXP is the expression that is a call to the builtin
2721 function; if convenient, the result should be placed in TARGET. */
2724 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2726 convert_optab builtin_optab
;
2728 tree fndecl
= get_callee_fndecl (exp
);
2730 enum machine_mode mode
;
2732 /* There's no easy way to detect the case we need to set EDOM. */
2733 if (flag_errno_math
)
2736 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2739 arg
= CALL_EXPR_ARG (exp
, 0);
2741 switch (DECL_FUNCTION_CODE (fndecl
))
2743 CASE_FLT_FN (BUILT_IN_LRINT
):
2744 CASE_FLT_FN (BUILT_IN_LLRINT
):
2745 builtin_optab
= lrint_optab
; break;
2746 CASE_FLT_FN (BUILT_IN_LROUND
):
2747 CASE_FLT_FN (BUILT_IN_LLROUND
):
2748 builtin_optab
= lround_optab
; break;
2753 /* Make a suitable register to place result in. */
2754 mode
= TYPE_MODE (TREE_TYPE (exp
));
2756 target
= gen_reg_rtx (mode
);
2758 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2759 need to expand the argument again. This way, we will not perform
2760 side-effects more the once. */
2761 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2763 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2767 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2769 /* Output the entire sequence. */
2770 insns
= get_insns ();
2776 /* If we were unable to expand via the builtin, stop the sequence
2777 (without outputting the insns) and call to the library function
2778 with the stabilized argument list. */
2781 target
= expand_call (exp
, target
, target
== const0_rtx
);
2786 /* To evaluate powi(x,n), the floating point value x raised to the
2787 constant integer exponent n, we use a hybrid algorithm that
2788 combines the "window method" with look-up tables. For an
2789 introduction to exponentiation algorithms and "addition chains",
2790 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2791 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2792 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2793 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2795 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2796 multiplications to inline before calling the system library's pow
2797 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2798 so this default never requires calling pow, powf or powl. */
2800 #ifndef POWI_MAX_MULTS
2801 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2804 /* The size of the "optimal power tree" lookup table. All
2805 exponents less than this value are simply looked up in the
2806 powi_table below. This threshold is also used to size the
2807 cache of pseudo registers that hold intermediate results. */
2808 #define POWI_TABLE_SIZE 256
2810 /* The size, in bits of the window, used in the "window method"
2811 exponentiation algorithm. This is equivalent to a radix of
2812 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2813 #define POWI_WINDOW_SIZE 3
2815 /* The following table is an efficient representation of an
2816 "optimal power tree". For each value, i, the corresponding
2817 value, j, in the table states than an optimal evaluation
2818 sequence for calculating pow(x,i) can be found by evaluating
2819 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2820 100 integers is given in Knuth's "Seminumerical algorithms". */
2822 static const unsigned char powi_table
[POWI_TABLE_SIZE
] =
2824 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2825 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2826 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2827 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2828 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2829 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2830 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2831 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2832 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2833 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2834 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2835 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2836 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2837 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2838 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2839 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2840 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2841 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2842 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2843 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2844 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2845 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2846 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2847 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2848 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2849 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2850 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2851 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2852 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2853 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2854 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2855 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2859 /* Return the number of multiplications required to calculate
2860 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2861 subroutine of powi_cost. CACHE is an array indicating
2862 which exponents have already been calculated. */
2865 powi_lookup_cost (unsigned HOST_WIDE_INT n
, bool *cache
)
2867 /* If we've already calculated this exponent, then this evaluation
2868 doesn't require any additional multiplications. */
2873 return powi_lookup_cost (n
- powi_table
[n
], cache
)
2874 + powi_lookup_cost (powi_table
[n
], cache
) + 1;
2877 /* Return the number of multiplications required to calculate
2878 powi(x,n) for an arbitrary x, given the exponent N. This
2879 function needs to be kept in sync with expand_powi below. */
2882 powi_cost (HOST_WIDE_INT n
)
2884 bool cache
[POWI_TABLE_SIZE
];
2885 unsigned HOST_WIDE_INT digit
;
2886 unsigned HOST_WIDE_INT val
;
2892 /* Ignore the reciprocal when calculating the cost. */
2893 val
= (n
< 0) ? -n
: n
;
2895 /* Initialize the exponent cache. */
2896 memset (cache
, 0, POWI_TABLE_SIZE
* sizeof (bool));
2901 while (val
>= POWI_TABLE_SIZE
)
2905 digit
= val
& ((1 << POWI_WINDOW_SIZE
) - 1);
2906 result
+= powi_lookup_cost (digit
, cache
)
2907 + POWI_WINDOW_SIZE
+ 1;
2908 val
>>= POWI_WINDOW_SIZE
;
2917 return result
+ powi_lookup_cost (val
, cache
);
2920 /* Recursive subroutine of expand_powi. This function takes the array,
2921 CACHE, of already calculated exponents and an exponent N and returns
2922 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2925 expand_powi_1 (enum machine_mode mode
, unsigned HOST_WIDE_INT n
, rtx
*cache
)
2927 unsigned HOST_WIDE_INT digit
;
2931 if (n
< POWI_TABLE_SIZE
)
2936 target
= gen_reg_rtx (mode
);
2939 op0
= expand_powi_1 (mode
, n
- powi_table
[n
], cache
);
2940 op1
= expand_powi_1 (mode
, powi_table
[n
], cache
);
2944 target
= gen_reg_rtx (mode
);
2945 digit
= n
& ((1 << POWI_WINDOW_SIZE
) - 1);
2946 op0
= expand_powi_1 (mode
, n
- digit
, cache
);
2947 op1
= expand_powi_1 (mode
, digit
, cache
);
2951 target
= gen_reg_rtx (mode
);
2952 op0
= expand_powi_1 (mode
, n
>> 1, cache
);
2956 result
= expand_mult (mode
, op0
, op1
, target
, 0);
2957 if (result
!= target
)
2958 emit_move_insn (target
, result
);
2962 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2963 floating point operand in mode MODE, and N is the exponent. This
2964 function needs to be kept in sync with powi_cost above. */
2967 expand_powi (rtx x
, enum machine_mode mode
, HOST_WIDE_INT n
)
2969 rtx cache
[POWI_TABLE_SIZE
];
2973 return CONST1_RTX (mode
);
2975 memset (cache
, 0, sizeof (cache
));
2978 result
= expand_powi_1 (mode
, (n
< 0) ? -n
: n
, cache
);
2980 /* If the original exponent was negative, reciprocate the result. */
2982 result
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
2983 result
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
2988 /* Fold a builtin function call to pow, powf, or powl into a series of sqrts or
2989 cbrts. Return NULL_RTX if no simplification can be made or expand the tree
2990 if we can simplify it. */
2992 expand_builtin_pow_root (location_t loc
, tree arg0
, tree arg1
, tree type
,
2995 if (TREE_CODE (arg1
) == REAL_CST
2996 && !TREE_OVERFLOW (arg1
)
2997 && flag_unsafe_math_optimizations
)
2999 enum machine_mode mode
= TYPE_MODE (type
);
3000 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
3001 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
3002 REAL_VALUE_TYPE c
= TREE_REAL_CST (arg1
);
3003 tree op
= NULL_TREE
;
3007 /* Optimize pow (x, 0.5) into sqrt. */
3008 if (REAL_VALUES_EQUAL (c
, dconsthalf
))
3009 op
= build_call_nofold_loc (loc
, sqrtfn
, 1, arg0
);
3013 REAL_VALUE_TYPE dconst1_4
= dconst1
;
3014 REAL_VALUE_TYPE dconst3_4
;
3015 SET_REAL_EXP (&dconst1_4
, REAL_EXP (&dconst1_4
) - 2);
3017 real_from_integer (&dconst3_4
, VOIDmode
, 3, 0, 0);
3018 SET_REAL_EXP (&dconst3_4
, REAL_EXP (&dconst3_4
) - 2);
3020 /* Optimize pow (x, 0.25) into sqrt (sqrt (x)). Assume on most
3021 machines that a builtin sqrt instruction is smaller than a
3022 call to pow with 0.25, so do this optimization even if
3024 if (REAL_VALUES_EQUAL (c
, dconst1_4
))
3026 op
= build_call_nofold_loc (loc
, sqrtfn
, 1, arg0
);
3027 op
= build_call_nofold_loc (loc
, sqrtfn
, 1, op
);
3030 /* Optimize pow (x, 0.75) = sqrt (x) * sqrt (sqrt (x)) unless we
3031 are optimizing for space. */
3032 else if (optimize_insn_for_speed_p ()
3033 && !TREE_SIDE_EFFECTS (arg0
)
3034 && REAL_VALUES_EQUAL (c
, dconst3_4
))
3036 tree sqrt1
= build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
3037 tree sqrt2
= builtin_save_expr (sqrt1
);
3038 tree sqrt3
= build_call_expr_loc (loc
, sqrtfn
, 1, sqrt1
);
3039 op
= fold_build2_loc (loc
, MULT_EXPR
, type
, sqrt2
, sqrt3
);
3044 /* Check whether we can do cbrt insstead of pow (x, 1./3.) and
3045 cbrt/sqrts instead of pow (x, 1./6.). */
3047 && (tree_expr_nonnegative_p (arg0
) || !HONOR_NANS (mode
)))
3049 /* First try 1/3. */
3050 REAL_VALUE_TYPE dconst1_3
3051 = real_value_truncate (mode
, dconst_third ());
3053 if (REAL_VALUES_EQUAL (c
, dconst1_3
))
3054 op
= build_call_nofold_loc (loc
, cbrtfn
, 1, arg0
);
3057 else if (optimize_insn_for_speed_p ())
3059 REAL_VALUE_TYPE dconst1_6
= dconst1_3
;
3060 SET_REAL_EXP (&dconst1_6
, REAL_EXP (&dconst1_6
) - 1);
3062 if (REAL_VALUES_EQUAL (c
, dconst1_6
))
3064 op
= build_call_nofold_loc (loc
, sqrtfn
, 1, arg0
);
3065 op
= build_call_nofold_loc (loc
, cbrtfn
, 1, op
);
3071 return expand_expr (op
, subtarget
, mode
, EXPAND_NORMAL
);
3077 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
3078 a normal call should be emitted rather than expanding the function
3079 in-line. EXP is the expression that is a call to the builtin
3080 function; if convenient, the result should be placed in TARGET. */
3083 expand_builtin_pow (tree exp
, rtx target
, rtx subtarget
)
3087 tree type
= TREE_TYPE (exp
);
3088 REAL_VALUE_TYPE cint
, c
, c2
;
3091 enum machine_mode mode
= TYPE_MODE (type
);
3093 if (! validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
3096 arg0
= CALL_EXPR_ARG (exp
, 0);
3097 arg1
= CALL_EXPR_ARG (exp
, 1);
3099 if (TREE_CODE (arg1
) != REAL_CST
3100 || TREE_OVERFLOW (arg1
))
3101 return expand_builtin_mathfn_2 (exp
, target
, subtarget
);
3103 /* Handle constant exponents. */
3105 /* For integer valued exponents we can expand to an optimal multiplication
3106 sequence using expand_powi. */
3107 c
= TREE_REAL_CST (arg1
);
3108 n
= real_to_integer (&c
);
3109 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3110 if (real_identical (&c
, &cint
)
3111 && ((n
>= -1 && n
<= 2)
3112 || (flag_unsafe_math_optimizations
3113 && optimize_insn_for_speed_p ()
3114 && powi_cost (n
) <= POWI_MAX_MULTS
)))
3116 op
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
3119 op
= force_reg (mode
, op
);
3120 op
= expand_powi (op
, mode
, n
);
3125 narg0
= builtin_save_expr (arg0
);
3127 /* If the exponent is not integer valued, check if it is half of an integer.
3128 In this case we can expand to sqrt (x) * x**(n/2). */
3129 fn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
3130 if (fn
!= NULL_TREE
)
3132 real_arithmetic (&c2
, MULT_EXPR
, &c
, &dconst2
);
3133 n
= real_to_integer (&c2
);
3134 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3135 if (real_identical (&c2
, &cint
)
3136 && ((flag_unsafe_math_optimizations
3137 && optimize_insn_for_speed_p ()
3138 && powi_cost (n
/2) <= POWI_MAX_MULTS
)
3139 /* Even the c == 0.5 case cannot be done unconditionally
3140 when we need to preserve signed zeros, as
3141 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
3142 || (!HONOR_SIGNED_ZEROS (mode
) && n
== 1)
3143 /* For c == 1.5 we can assume that x * sqrt (x) is always
3144 smaller than pow (x, 1.5) if sqrt will not be expanded
3147 && optab_handler (sqrt_optab
, mode
) != CODE_FOR_nothing
)))
3149 tree call_expr
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 1,
3151 /* Use expand_expr in case the newly built call expression
3152 was folded to a non-call. */
3153 op
= expand_expr (call_expr
, subtarget
, mode
, EXPAND_NORMAL
);
3156 op2
= expand_expr (narg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
3157 op2
= force_reg (mode
, op2
);
3158 op2
= expand_powi (op2
, mode
, abs (n
/ 2));
3159 op
= expand_simple_binop (mode
, MULT
, op
, op2
, NULL_RTX
,
3160 0, OPTAB_LIB_WIDEN
);
3161 /* If the original exponent was negative, reciprocate the
3164 op
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
3165 op
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
3171 /* Check whether we can do a series of sqrt or cbrt's instead of the pow
3173 op
= expand_builtin_pow_root (EXPR_LOCATION (exp
), arg0
, arg1
, type
,
3178 /* Try if the exponent is a third of an integer. In this case
3179 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3180 different from pow (x, 1./3.) due to rounding and behavior
3181 with negative x we need to constrain this transformation to
3182 unsafe math and positive x or finite math. */
3183 fn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
3185 && flag_unsafe_math_optimizations
3186 && (tree_expr_nonnegative_p (arg0
)
3187 || !HONOR_NANS (mode
)))
3189 REAL_VALUE_TYPE dconst3
;
3190 real_from_integer (&dconst3
, VOIDmode
, 3, 0, 0);
3191 real_arithmetic (&c2
, MULT_EXPR
, &c
, &dconst3
);
3192 real_round (&c2
, mode
, &c2
);
3193 n
= real_to_integer (&c2
);
3194 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3195 real_arithmetic (&c2
, RDIV_EXPR
, &cint
, &dconst3
);
3196 real_convert (&c2
, mode
, &c2
);
3197 if (real_identical (&c2
, &c
)
3198 && ((optimize_insn_for_speed_p ()
3199 && powi_cost (n
/3) <= POWI_MAX_MULTS
)
3202 tree call_expr
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 1,
3204 op
= expand_builtin (call_expr
, NULL_RTX
, subtarget
, mode
, 0);
3205 if (abs (n
) % 3 == 2)
3206 op
= expand_simple_binop (mode
, MULT
, op
, op
, op
,
3207 0, OPTAB_LIB_WIDEN
);
3210 op2
= expand_expr (narg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
3211 op2
= force_reg (mode
, op2
);
3212 op2
= expand_powi (op2
, mode
, abs (n
/ 3));
3213 op
= expand_simple_binop (mode
, MULT
, op
, op2
, NULL_RTX
,
3214 0, OPTAB_LIB_WIDEN
);
3215 /* If the original exponent was negative, reciprocate the
3218 op
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
3219 op
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
3225 /* Fall back to optab expansion. */
3226 return expand_builtin_mathfn_2 (exp
, target
, subtarget
);
3229 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3230 a normal call should be emitted rather than expanding the function
3231 in-line. EXP is the expression that is a call to the builtin
3232 function; if convenient, the result should be placed in TARGET. */
3235 expand_builtin_powi (tree exp
, rtx target
, rtx subtarget
)
3239 enum machine_mode mode
;
3240 enum machine_mode mode2
;
3242 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3245 arg0
= CALL_EXPR_ARG (exp
, 0);
3246 arg1
= CALL_EXPR_ARG (exp
, 1);
3247 mode
= TYPE_MODE (TREE_TYPE (exp
));
3249 /* Handle constant power. */
3251 if (TREE_CODE (arg1
) == INTEGER_CST
3252 && !TREE_OVERFLOW (arg1
))
3254 HOST_WIDE_INT n
= TREE_INT_CST_LOW (arg1
);
3256 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3257 Otherwise, check the number of multiplications required. */
3258 if ((TREE_INT_CST_HIGH (arg1
) == 0
3259 || TREE_INT_CST_HIGH (arg1
) == -1)
3260 && ((n
>= -1 && n
<= 2)
3261 || (optimize_insn_for_speed_p ()
3262 && powi_cost (n
) <= POWI_MAX_MULTS
)))
3264 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
3265 op0
= force_reg (mode
, op0
);
3266 return expand_powi (op0
, mode
, n
);
3270 /* Emit a libcall to libgcc. */
3272 /* Mode of the 2nd argument must match that of an int. */
3273 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
3275 if (target
== NULL_RTX
)
3276 target
= gen_reg_rtx (mode
);
3278 op0
= expand_expr (arg0
, subtarget
, mode
, EXPAND_NORMAL
);
3279 if (GET_MODE (op0
) != mode
)
3280 op0
= convert_to_mode (mode
, op0
, 0);
3281 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
3282 if (GET_MODE (op1
) != mode2
)
3283 op1
= convert_to_mode (mode2
, op1
, 0);
3285 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
3286 target
, LCT_CONST
, mode
, 2,
3287 op0
, mode
, op1
, mode2
);
3292 /* Expand expression EXP which is a call to the strlen builtin. Return
3293 NULL_RTX if we failed the caller should emit a normal call, otherwise
3294 try to get the result in TARGET, if convenient. */
3297 expand_builtin_strlen (tree exp
, rtx target
,
3298 enum machine_mode target_mode
)
3300 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
3306 tree src
= CALL_EXPR_ARG (exp
, 0);
3307 rtx result
, src_reg
, char_rtx
, before_strlen
;
3308 enum machine_mode insn_mode
= target_mode
, char_mode
;
3309 enum insn_code icode
= CODE_FOR_nothing
;
3312 /* If the length can be computed at compile-time, return it. */
3313 len
= c_strlen (src
, 0);
3315 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3317 /* If the length can be computed at compile-time and is constant
3318 integer, but there are side-effects in src, evaluate
3319 src for side-effects, then return len.
3320 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3321 can be optimized into: i++; x = 3; */
3322 len
= c_strlen (src
, 1);
3323 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3325 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3326 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3329 align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3331 /* If SRC is not a pointer type, don't do this operation inline. */
3335 /* Bail out if we can't compute strlen in the right mode. */
3336 while (insn_mode
!= VOIDmode
)
3338 icode
= optab_handler (strlen_optab
, insn_mode
);
3339 if (icode
!= CODE_FOR_nothing
)
3342 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3344 if (insn_mode
== VOIDmode
)
3347 /* Make a place to write the result of the instruction. */
3351 && GET_MODE (result
) == insn_mode
3352 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3353 result
= gen_reg_rtx (insn_mode
);
3355 /* Make a place to hold the source address. We will not expand
3356 the actual source until we are sure that the expansion will
3357 not fail -- there are trees that cannot be expanded twice. */
3358 src_reg
= gen_reg_rtx (Pmode
);
3360 /* Mark the beginning of the strlen sequence so we can emit the
3361 source operand later. */
3362 before_strlen
= get_last_insn ();
3364 char_rtx
= const0_rtx
;
3365 char_mode
= insn_data
[(int) icode
].operand
[2].mode
;
3366 if (! (*insn_data
[(int) icode
].operand
[2].predicate
) (char_rtx
,
3368 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
3370 pat
= GEN_FCN (icode
) (result
, gen_rtx_MEM (BLKmode
, src_reg
),
3371 char_rtx
, GEN_INT (align
));
3376 /* Now that we are assured of success, expand the source. */
3378 pat
= expand_expr (src
, src_reg
, ptr_mode
, EXPAND_NORMAL
);
3380 emit_move_insn (src_reg
, pat
);
3385 emit_insn_after (pat
, before_strlen
);
3387 emit_insn_before (pat
, get_insns ());
3389 /* Return the value in the proper mode for this function. */
3390 if (GET_MODE (result
) == target_mode
)
3392 else if (target
!= 0)
3393 convert_move (target
, result
, 0);
3395 target
= convert_to_mode (target_mode
, result
, 0);
3401 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3402 bytes from constant string DATA + OFFSET and return it as target
3406 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3407 enum machine_mode mode
)
3409 const char *str
= (const char *) data
;
3411 gcc_assert (offset
>= 0
3412 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3413 <= strlen (str
) + 1));
3415 return c_readstr (str
+ offset
, mode
);
3418 /* Expand a call EXP to the memcpy builtin.
3419 Return NULL_RTX if we failed, the caller should emit a normal call,
3420 otherwise try to get the result in TARGET, if convenient (and in
3421 mode MODE if that's convenient). */
3424 expand_builtin_memcpy (tree exp
, rtx target
)
3426 if (!validate_arglist (exp
,
3427 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3431 tree dest
= CALL_EXPR_ARG (exp
, 0);
3432 tree src
= CALL_EXPR_ARG (exp
, 1);
3433 tree len
= CALL_EXPR_ARG (exp
, 2);
3434 const char *src_str
;
3435 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
3436 unsigned int dest_align
3437 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3438 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3439 HOST_WIDE_INT expected_size
= -1;
3440 unsigned int expected_align
= 0;
3442 /* If DEST is not a pointer type, call the normal function. */
3443 if (dest_align
== 0)
3446 /* If either SRC is not a pointer type, don't do this
3447 operation in-line. */
3451 if (currently_expanding_gimple_stmt
)
3452 stringop_block_profile (currently_expanding_gimple_stmt
,
3453 &expected_align
, &expected_size
);
3455 if (expected_align
< dest_align
)
3456 expected_align
= dest_align
;
3457 dest_mem
= get_memory_rtx (dest
, len
);
3458 set_mem_align (dest_mem
, dest_align
);
3459 len_rtx
= expand_normal (len
);
3460 src_str
= c_getstr (src
);
3462 /* If SRC is a string constant and block move would be done
3463 by pieces, we can avoid loading the string from memory
3464 and only stored the computed constants. */
3466 && CONST_INT_P (len_rtx
)
3467 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3468 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3469 CONST_CAST (char *, src_str
),
3472 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3473 builtin_memcpy_read_str
,
3474 CONST_CAST (char *, src_str
),
3475 dest_align
, false, 0);
3476 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3477 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3481 src_mem
= get_memory_rtx (src
, len
);
3482 set_mem_align (src_mem
, src_align
);
3484 /* Copy word part most expediently. */
3485 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3486 CALL_EXPR_TAILCALL (exp
)
3487 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3488 expected_align
, expected_size
);
3492 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3493 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3499 /* Expand a call EXP to the mempcpy builtin.
3500 Return NULL_RTX if we failed; the caller should emit a normal call,
3501 otherwise try to get the result in TARGET, if convenient (and in
3502 mode MODE if that's convenient). If ENDP is 0 return the
3503 destination pointer, if ENDP is 1 return the end pointer ala
3504 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3508 expand_builtin_mempcpy (tree exp
, rtx target
, enum machine_mode mode
)
3510 if (!validate_arglist (exp
,
3511 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3515 tree dest
= CALL_EXPR_ARG (exp
, 0);
3516 tree src
= CALL_EXPR_ARG (exp
, 1);
3517 tree len
= CALL_EXPR_ARG (exp
, 2);
3518 return expand_builtin_mempcpy_args (dest
, src
, len
,
3519 target
, mode
, /*endp=*/ 1);
3523 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3524 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3525 so that this can also be called without constructing an actual CALL_EXPR.
3526 The other arguments and return value are the same as for
3527 expand_builtin_mempcpy. */
3530 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3531 rtx target
, enum machine_mode mode
, int endp
)
3533 /* If return value is ignored, transform mempcpy into memcpy. */
3534 if (target
== const0_rtx
&& implicit_built_in_decls
[BUILT_IN_MEMCPY
])
3536 tree fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
3537 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3539 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3543 const char *src_str
;
3544 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
3545 unsigned int dest_align
3546 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3547 rtx dest_mem
, src_mem
, len_rtx
;
3549 /* If either SRC or DEST is not a pointer type, don't do this
3550 operation in-line. */
3551 if (dest_align
== 0 || src_align
== 0)
3554 /* If LEN is not constant, call the normal function. */
3555 if (! host_integerp (len
, 1))
3558 len_rtx
= expand_normal (len
);
3559 src_str
= c_getstr (src
);
3561 /* If SRC is a string constant and block move would be done
3562 by pieces, we can avoid loading the string from memory
3563 and only stored the computed constants. */
3565 && CONST_INT_P (len_rtx
)
3566 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3567 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3568 CONST_CAST (char *, src_str
),
3571 dest_mem
= get_memory_rtx (dest
, len
);
3572 set_mem_align (dest_mem
, dest_align
);
3573 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3574 builtin_memcpy_read_str
,
3575 CONST_CAST (char *, src_str
),
3576 dest_align
, false, endp
);
3577 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3578 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3582 if (CONST_INT_P (len_rtx
)
3583 && can_move_by_pieces (INTVAL (len_rtx
),
3584 MIN (dest_align
, src_align
)))
3586 dest_mem
= get_memory_rtx (dest
, len
);
3587 set_mem_align (dest_mem
, dest_align
);
3588 src_mem
= get_memory_rtx (src
, len
);
3589 set_mem_align (src_mem
, src_align
);
3590 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3591 MIN (dest_align
, src_align
), endp
);
3592 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3593 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3602 # define HAVE_movstr 0
3603 # define CODE_FOR_movstr CODE_FOR_nothing
3606 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3607 we failed, the caller should emit a normal call, otherwise try to
3608 get the result in TARGET, if convenient. If ENDP is 0 return the
3609 destination pointer, if ENDP is 1 return the end pointer ala
3610 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3614 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3620 const struct insn_data_d
* data
;
3625 dest_mem
= get_memory_rtx (dest
, NULL
);
3626 src_mem
= get_memory_rtx (src
, NULL
);
3627 data
= insn_data
+ CODE_FOR_movstr
;
3630 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3631 dest_mem
= replace_equiv_address (dest_mem
, target
);
3632 end
= gen_reg_rtx (Pmode
);
3637 || target
== const0_rtx
3638 || ! (*data
->operand
[0].predicate
) (target
, Pmode
))
3640 end
= gen_reg_rtx (Pmode
);
3641 if (target
!= const0_rtx
)
3648 if (data
->operand
[0].mode
!= VOIDmode
)
3649 end
= gen_lowpart (data
->operand
[0].mode
, end
);
3651 insn
= data
->genfun (end
, dest_mem
, src_mem
);
3657 /* movstr is supposed to set end to the address of the NUL
3658 terminator. If the caller requested a mempcpy-like return value,
3660 if (endp
== 1 && target
!= const0_rtx
)
3662 rtx tem
= plus_constant (gen_lowpart (GET_MODE (target
), end
), 1);
3663 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3669 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3670 NULL_RTX if we failed the caller should emit a normal call, otherwise
3671 try to get the result in TARGET, if convenient (and in mode MODE if that's
3675 expand_builtin_strcpy (tree exp
, rtx target
)
3677 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3679 tree dest
= CALL_EXPR_ARG (exp
, 0);
3680 tree src
= CALL_EXPR_ARG (exp
, 1);
3681 return expand_builtin_strcpy_args (dest
, src
, target
);
3686 /* Helper function to do the actual work for expand_builtin_strcpy. The
3687 arguments to the builtin_strcpy call DEST and SRC are broken out
3688 so that this can also be called without constructing an actual CALL_EXPR.
3689 The other arguments and return value are the same as for
3690 expand_builtin_strcpy. */
3693 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3695 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3698 /* Expand a call EXP to the stpcpy builtin.
3699 Return NULL_RTX if we failed the caller should emit a normal call,
3700 otherwise try to get the result in TARGET, if convenient (and in
3701 mode MODE if that's convenient). */
3704 expand_builtin_stpcpy (tree exp
, rtx target
, enum machine_mode mode
)
3707 location_t loc
= EXPR_LOCATION (exp
);
3709 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3712 dst
= CALL_EXPR_ARG (exp
, 0);
3713 src
= CALL_EXPR_ARG (exp
, 1);
3715 /* If return value is ignored, transform stpcpy into strcpy. */
3716 if (target
== const0_rtx
&& implicit_built_in_decls
[BUILT_IN_STRCPY
])
3718 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
3719 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3720 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3727 /* Ensure we get an actual string whose length can be evaluated at
3728 compile-time, not an expression containing a string. This is
3729 because the latter will potentially produce pessimized code
3730 when used to produce the return value. */
3731 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3732 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3734 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3735 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3736 target
, mode
, /*endp=*/2);
3741 if (TREE_CODE (len
) == INTEGER_CST
)
3743 rtx len_rtx
= expand_normal (len
);
3745 if (CONST_INT_P (len_rtx
))
3747 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3753 if (mode
!= VOIDmode
)
3754 target
= gen_reg_rtx (mode
);
3756 target
= gen_reg_rtx (GET_MODE (ret
));
3758 if (GET_MODE (target
) != GET_MODE (ret
))
3759 ret
= gen_lowpart (GET_MODE (target
), ret
);
3761 ret
= plus_constant (ret
, INTVAL (len_rtx
));
3762 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3770 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3774 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3775 bytes from constant string DATA + OFFSET and return it as target
3779 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3780 enum machine_mode mode
)
3782 const char *str
= (const char *) data
;
3784 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3787 return c_readstr (str
+ offset
, mode
);
3790 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3791 NULL_RTX if we failed the caller should emit a normal call. */
3794 expand_builtin_strncpy (tree exp
, rtx target
)
3796 location_t loc
= EXPR_LOCATION (exp
);
3798 if (validate_arglist (exp
,
3799 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3801 tree dest
= CALL_EXPR_ARG (exp
, 0);
3802 tree src
= CALL_EXPR_ARG (exp
, 1);
3803 tree len
= CALL_EXPR_ARG (exp
, 2);
3804 tree slen
= c_strlen (src
, 1);
3806 /* We must be passed a constant len and src parameter. */
3807 if (!host_integerp (len
, 1) || !slen
|| !host_integerp (slen
, 1))
3810 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3812 /* We're required to pad with trailing zeros if the requested
3813 len is greater than strlen(s2)+1. In that case try to
3814 use store_by_pieces, if it fails, punt. */
3815 if (tree_int_cst_lt (slen
, len
))
3817 unsigned int dest_align
3818 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3819 const char *p
= c_getstr (src
);
3822 if (!p
|| dest_align
== 0 || !host_integerp (len
, 1)
3823 || !can_store_by_pieces (tree_low_cst (len
, 1),
3824 builtin_strncpy_read_str
,
3825 CONST_CAST (char *, p
),
3829 dest_mem
= get_memory_rtx (dest
, len
);
3830 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3831 builtin_strncpy_read_str
,
3832 CONST_CAST (char *, p
), dest_align
, false, 0);
3833 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3834 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3841 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3842 bytes from constant string DATA + OFFSET and return it as target
3846 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3847 enum machine_mode mode
)
3849 const char *c
= (const char *) data
;
3850 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3852 memset (p
, *c
, GET_MODE_SIZE (mode
));
3854 return c_readstr (p
, mode
);
3857 /* Callback routine for store_by_pieces. Return the RTL of a register
3858 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3859 char value given in the RTL register data. For example, if mode is
3860 4 bytes wide, return the RTL for 0x01010101*data. */
3863 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3864 enum machine_mode mode
)
3870 size
= GET_MODE_SIZE (mode
);
3874 p
= XALLOCAVEC (char, size
);
3875 memset (p
, 1, size
);
3876 coeff
= c_readstr (p
, mode
);
3878 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3879 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3880 return force_reg (mode
, target
);
3883 /* Expand expression EXP, which is a call to the memset builtin. Return
3884 NULL_RTX if we failed the caller should emit a normal call, otherwise
3885 try to get the result in TARGET, if convenient (and in mode MODE if that's
3889 expand_builtin_memset (tree exp
, rtx target
, enum machine_mode mode
)
3891 if (!validate_arglist (exp
,
3892 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3896 tree dest
= CALL_EXPR_ARG (exp
, 0);
3897 tree val
= CALL_EXPR_ARG (exp
, 1);
3898 tree len
= CALL_EXPR_ARG (exp
, 2);
3899 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3903 /* Helper function to do the actual work for expand_builtin_memset. The
3904 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3905 so that this can also be called without constructing an actual CALL_EXPR.
3906 The other arguments and return value are the same as for
3907 expand_builtin_memset. */
3910 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3911 rtx target
, enum machine_mode mode
, tree orig_exp
)
3914 enum built_in_function fcode
;
3916 unsigned int dest_align
;
3917 rtx dest_mem
, dest_addr
, len_rtx
;
3918 HOST_WIDE_INT expected_size
= -1;
3919 unsigned int expected_align
= 0;
3921 dest_align
= get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3923 /* If DEST is not a pointer type, don't do this operation in-line. */
3924 if (dest_align
== 0)
3927 if (currently_expanding_gimple_stmt
)
3928 stringop_block_profile (currently_expanding_gimple_stmt
,
3929 &expected_align
, &expected_size
);
3931 if (expected_align
< dest_align
)
3932 expected_align
= dest_align
;
3934 /* If the LEN parameter is zero, return DEST. */
3935 if (integer_zerop (len
))
3937 /* Evaluate and ignore VAL in case it has side-effects. */
3938 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3939 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3942 /* Stabilize the arguments in case we fail. */
3943 dest
= builtin_save_expr (dest
);
3944 val
= builtin_save_expr (val
);
3945 len
= builtin_save_expr (len
);
3947 len_rtx
= expand_normal (len
);
3948 dest_mem
= get_memory_rtx (dest
, len
);
3950 if (TREE_CODE (val
) != INTEGER_CST
)
3954 val_rtx
= expand_normal (val
);
3955 val_rtx
= convert_to_mode (TYPE_MODE (unsigned_char_type_node
),
3958 /* Assume that we can memset by pieces if we can store
3959 * the coefficients by pieces (in the required modes).
3960 * We can't pass builtin_memset_gen_str as that emits RTL. */
3962 if (host_integerp (len
, 1)
3963 && can_store_by_pieces (tree_low_cst (len
, 1),
3964 builtin_memset_read_str
, &c
, dest_align
,
3967 val_rtx
= force_reg (TYPE_MODE (unsigned_char_type_node
),
3969 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3970 builtin_memset_gen_str
, val_rtx
, dest_align
,
3973 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3974 dest_align
, expected_align
,
3978 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3979 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3983 if (target_char_cast (val
, &c
))
3988 if (host_integerp (len
, 1)
3989 && can_store_by_pieces (tree_low_cst (len
, 1),
3990 builtin_memset_read_str
, &c
, dest_align
,
3992 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3993 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3994 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, GEN_INT (c
),
3995 dest_align
, expected_align
,
3999 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4000 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4004 set_mem_align (dest_mem
, dest_align
);
4005 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
4006 CALL_EXPR_TAILCALL (orig_exp
)
4007 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
4008 expected_align
, expected_size
);
4012 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4013 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
4019 fndecl
= get_callee_fndecl (orig_exp
);
4020 fcode
= DECL_FUNCTION_CODE (fndecl
);
4021 if (fcode
== BUILT_IN_MEMSET
)
4022 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
4024 else if (fcode
== BUILT_IN_BZERO
)
4025 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
4029 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4030 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
4031 return expand_call (fn
, target
, target
== const0_rtx
);
4034 /* Expand expression EXP, which is a call to the bzero builtin. Return
4035 NULL_RTX if we failed the caller should emit a normal call. */
4038 expand_builtin_bzero (tree exp
)
4041 location_t loc
= EXPR_LOCATION (exp
);
4043 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4046 dest
= CALL_EXPR_ARG (exp
, 0);
4047 size
= CALL_EXPR_ARG (exp
, 1);
4049 /* New argument list transforming bzero(ptr x, int y) to
4050 memset(ptr x, int 0, size_t y). This is done this way
4051 so that if it isn't expanded inline, we fallback to
4052 calling bzero instead of memset. */
4054 return expand_builtin_memset_args (dest
, integer_zero_node
,
4055 fold_convert_loc (loc
, sizetype
, size
),
4056 const0_rtx
, VOIDmode
, exp
);
4059 /* Expand expression EXP, which is a call to the memcmp built-in function.
4060 Return NULL_RTX if we failed and the
4061 caller should emit a normal call, otherwise try to get the result in
4062 TARGET, if convenient (and in mode MODE, if that's convenient). */
4065 expand_builtin_memcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4066 ATTRIBUTE_UNUSED
enum machine_mode mode
)
4068 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
4070 if (!validate_arglist (exp
,
4071 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4074 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4076 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4079 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4080 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4081 tree len
= CALL_EXPR_ARG (exp
, 2);
4083 unsigned int arg1_align
4084 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4085 unsigned int arg2_align
4086 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4087 enum machine_mode insn_mode
;
4089 #ifdef HAVE_cmpmemsi
4091 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
4094 #ifdef HAVE_cmpstrnsi
4096 insn_mode
= insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4101 /* If we don't have POINTER_TYPE, call the function. */
4102 if (arg1_align
== 0 || arg2_align
== 0)
4105 /* Make a place to write the result of the instruction. */
4108 && REG_P (result
) && GET_MODE (result
) == insn_mode
4109 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4110 result
= gen_reg_rtx (insn_mode
);
4112 arg1_rtx
= get_memory_rtx (arg1
, len
);
4113 arg2_rtx
= get_memory_rtx (arg2
, len
);
4114 arg3_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
4116 /* Set MEM_SIZE as appropriate. */
4117 if (CONST_INT_P (arg3_rtx
))
4119 set_mem_size (arg1_rtx
, arg3_rtx
);
4120 set_mem_size (arg2_rtx
, arg3_rtx
);
4123 #ifdef HAVE_cmpmemsi
4125 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4126 GEN_INT (MIN (arg1_align
, arg2_align
)));
4129 #ifdef HAVE_cmpstrnsi
4131 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4132 GEN_INT (MIN (arg1_align
, arg2_align
)));
4140 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
4141 TYPE_MODE (integer_type_node
), 3,
4142 XEXP (arg1_rtx
, 0), Pmode
,
4143 XEXP (arg2_rtx
, 0), Pmode
,
4144 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
4145 TYPE_UNSIGNED (sizetype
)),
4146 TYPE_MODE (sizetype
));
4148 /* Return the value in the proper mode for this function. */
4149 mode
= TYPE_MODE (TREE_TYPE (exp
));
4150 if (GET_MODE (result
) == mode
)
4152 else if (target
!= 0)
4154 convert_move (target
, result
, 0);
4158 return convert_to_mode (mode
, result
, 0);
4165 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4166 if we failed the caller should emit a normal call, otherwise try to get
4167 the result in TARGET, if convenient. */
4170 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4172 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4175 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4176 if (direct_optab_handler (cmpstr_optab
, SImode
) != CODE_FOR_nothing
4177 || direct_optab_handler (cmpstrn_optab
, SImode
) != CODE_FOR_nothing
)
4179 rtx arg1_rtx
, arg2_rtx
;
4180 rtx result
, insn
= NULL_RTX
;
4182 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4183 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4185 unsigned int arg1_align
4186 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4187 unsigned int arg2_align
4188 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4190 /* If we don't have POINTER_TYPE, call the function. */
4191 if (arg1_align
== 0 || arg2_align
== 0)
4194 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4195 arg1
= builtin_save_expr (arg1
);
4196 arg2
= builtin_save_expr (arg2
);
4198 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4199 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4201 #ifdef HAVE_cmpstrsi
4202 /* Try to call cmpstrsi. */
4205 enum machine_mode insn_mode
4206 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
4208 /* Make a place to write the result of the instruction. */
4211 && REG_P (result
) && GET_MODE (result
) == insn_mode
4212 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4213 result
= gen_reg_rtx (insn_mode
);
4215 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
4216 GEN_INT (MIN (arg1_align
, arg2_align
)));
4219 #ifdef HAVE_cmpstrnsi
4220 /* Try to determine at least one length and call cmpstrnsi. */
4221 if (!insn
&& HAVE_cmpstrnsi
)
4226 enum machine_mode insn_mode
4227 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4228 tree len1
= c_strlen (arg1
, 1);
4229 tree len2
= c_strlen (arg2
, 1);
4232 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4234 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4236 /* If we don't have a constant length for the first, use the length
4237 of the second, if we know it. We don't require a constant for
4238 this case; some cost analysis could be done if both are available
4239 but neither is constant. For now, assume they're equally cheap,
4240 unless one has side effects. If both strings have constant lengths,
4247 else if (TREE_SIDE_EFFECTS (len1
))
4249 else if (TREE_SIDE_EFFECTS (len2
))
4251 else if (TREE_CODE (len1
) != INTEGER_CST
)
4253 else if (TREE_CODE (len2
) != INTEGER_CST
)
4255 else if (tree_int_cst_lt (len1
, len2
))
4260 /* If both arguments have side effects, we cannot optimize. */
4261 if (!len
|| TREE_SIDE_EFFECTS (len
))
4264 arg3_rtx
= expand_normal (len
);
4266 /* Make a place to write the result of the instruction. */
4269 && REG_P (result
) && GET_MODE (result
) == insn_mode
4270 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4271 result
= gen_reg_rtx (insn_mode
);
4273 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4274 GEN_INT (MIN (arg1_align
, arg2_align
)));
4280 enum machine_mode mode
;
4283 /* Return the value in the proper mode for this function. */
4284 mode
= TYPE_MODE (TREE_TYPE (exp
));
4285 if (GET_MODE (result
) == mode
)
4288 return convert_to_mode (mode
, result
, 0);
4289 convert_move (target
, result
, 0);
4293 /* Expand the library call ourselves using a stabilized argument
4294 list to avoid re-evaluating the function's arguments twice. */
4295 #ifdef HAVE_cmpstrnsi
4298 fndecl
= get_callee_fndecl (exp
);
4299 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4300 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4301 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4302 return expand_call (fn
, target
, target
== const0_rtx
);
4308 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4309 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4310 the result in TARGET, if convenient. */
4313 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4314 ATTRIBUTE_UNUSED
enum machine_mode mode
)
4316 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
4318 if (!validate_arglist (exp
,
4319 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4322 /* If c_strlen can determine an expression for one of the string
4323 lengths, and it doesn't have side effects, then emit cmpstrnsi
4324 using length MIN(strlen(string)+1, arg3). */
4325 #ifdef HAVE_cmpstrnsi
4328 tree len
, len1
, len2
;
4329 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4332 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4333 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4334 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4336 unsigned int arg1_align
4337 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4338 unsigned int arg2_align
4339 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4340 enum machine_mode insn_mode
4341 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4343 len1
= c_strlen (arg1
, 1);
4344 len2
= c_strlen (arg2
, 1);
4347 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4349 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4351 /* If we don't have a constant length for the first, use the length
4352 of the second, if we know it. We don't require a constant for
4353 this case; some cost analysis could be done if both are available
4354 but neither is constant. For now, assume they're equally cheap,
4355 unless one has side effects. If both strings have constant lengths,
4362 else if (TREE_SIDE_EFFECTS (len1
))
4364 else if (TREE_SIDE_EFFECTS (len2
))
4366 else if (TREE_CODE (len1
) != INTEGER_CST
)
4368 else if (TREE_CODE (len2
) != INTEGER_CST
)
4370 else if (tree_int_cst_lt (len1
, len2
))
4375 /* If both arguments have side effects, we cannot optimize. */
4376 if (!len
|| TREE_SIDE_EFFECTS (len
))
4379 /* The actual new length parameter is MIN(len,arg3). */
4380 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
4381 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
4383 /* If we don't have POINTER_TYPE, call the function. */
4384 if (arg1_align
== 0 || arg2_align
== 0)
4387 /* Make a place to write the result of the instruction. */
4390 && REG_P (result
) && GET_MODE (result
) == insn_mode
4391 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4392 result
= gen_reg_rtx (insn_mode
);
4394 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4395 arg1
= builtin_save_expr (arg1
);
4396 arg2
= builtin_save_expr (arg2
);
4397 len
= builtin_save_expr (len
);
4399 arg1_rtx
= get_memory_rtx (arg1
, len
);
4400 arg2_rtx
= get_memory_rtx (arg2
, len
);
4401 arg3_rtx
= expand_normal (len
);
4402 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4403 GEN_INT (MIN (arg1_align
, arg2_align
)));
4408 /* Return the value in the proper mode for this function. */
4409 mode
= TYPE_MODE (TREE_TYPE (exp
));
4410 if (GET_MODE (result
) == mode
)
4413 return convert_to_mode (mode
, result
, 0);
4414 convert_move (target
, result
, 0);
4418 /* Expand the library call ourselves using a stabilized argument
4419 list to avoid re-evaluating the function's arguments twice. */
4420 fndecl
= get_callee_fndecl (exp
);
4421 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4423 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4424 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4425 return expand_call (fn
, target
, target
== const0_rtx
);
4431 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4432 if that's convenient. */
4435 expand_builtin_saveregs (void)
4439 /* Don't do __builtin_saveregs more than once in a function.
4440 Save the result of the first call and reuse it. */
4441 if (saveregs_value
!= 0)
4442 return saveregs_value
;
4444 /* When this function is called, it means that registers must be
4445 saved on entry to this function. So we migrate the call to the
4446 first insn of this function. */
4450 /* Do whatever the machine needs done in this case. */
4451 val
= targetm
.calls
.expand_builtin_saveregs ();
4456 saveregs_value
= val
;
4458 /* Put the insns after the NOTE that starts the function. If this
4459 is inside a start_sequence, make the outer-level insn chain current, so
4460 the code is placed at the start of the function. */
4461 push_topmost_sequence ();
4462 emit_insn_after (seq
, entry_of_function ());
4463 pop_topmost_sequence ();
4468 /* Expand a call to __builtin_next_arg. */
4471 expand_builtin_next_arg (void)
4473 /* Checking arguments is already done in fold_builtin_next_arg
4474 that must be called before this function. */
4475 return expand_binop (ptr_mode
, add_optab
,
4476 crtl
->args
.internal_arg_pointer
,
4477 crtl
->args
.arg_offset_rtx
,
4478 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4481 /* Make it easier for the backends by protecting the valist argument
4482 from multiple evaluations. */
4485 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4487 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4489 /* The current way of determining the type of valist is completely
4490 bogus. We should have the information on the va builtin instead. */
4492 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4494 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4496 if (TREE_SIDE_EFFECTS (valist
))
4497 valist
= save_expr (valist
);
4499 /* For this case, the backends will be expecting a pointer to
4500 vatype, but it's possible we've actually been given an array
4501 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4503 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4505 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4506 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4511 tree pt
= build_pointer_type (vatype
);
4515 if (! TREE_SIDE_EFFECTS (valist
))
4518 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4519 TREE_SIDE_EFFECTS (valist
) = 1;
4522 if (TREE_SIDE_EFFECTS (valist
))
4523 valist
= save_expr (valist
);
4524 valist
= fold_build2_loc (loc
, MEM_REF
,
4525 vatype
, valist
, build_int_cst (pt
, 0));
4531 /* The "standard" definition of va_list is void*. */
4534 std_build_builtin_va_list (void)
4536 return ptr_type_node
;
4539 /* The "standard" abi va_list is va_list_type_node. */
4542 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4544 return va_list_type_node
;
4547 /* The "standard" type of va_list is va_list_type_node. */
4550 std_canonical_va_list_type (tree type
)
4554 if (INDIRECT_REF_P (type
))
4555 type
= TREE_TYPE (type
);
4556 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE(type
)))
4557 type
= TREE_TYPE (type
);
4558 wtype
= va_list_type_node
;
4560 /* Treat structure va_list types. */
4561 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4562 htype
= TREE_TYPE (htype
);
4563 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4565 /* If va_list is an array type, the argument may have decayed
4566 to a pointer type, e.g. by being passed to another function.
4567 In that case, unwrap both types so that we can compare the
4568 underlying records. */
4569 if (TREE_CODE (htype
) == ARRAY_TYPE
4570 || POINTER_TYPE_P (htype
))
4572 wtype
= TREE_TYPE (wtype
);
4573 htype
= TREE_TYPE (htype
);
4576 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4577 return va_list_type_node
;
4582 /* The "standard" implementation of va_start: just assign `nextarg' to
4586 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4588 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4589 convert_move (va_r
, nextarg
, 0);
4592 /* Expand EXP, a call to __builtin_va_start. */
4595 expand_builtin_va_start (tree exp
)
4599 location_t loc
= EXPR_LOCATION (exp
);
4601 if (call_expr_nargs (exp
) < 2)
4603 error_at (loc
, "too few arguments to function %<va_start%>");
4607 if (fold_builtin_next_arg (exp
, true))
4610 nextarg
= expand_builtin_next_arg ();
4611 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4613 if (targetm
.expand_builtin_va_start
)
4614 targetm
.expand_builtin_va_start (valist
, nextarg
);
4616 std_expand_builtin_va_start (valist
, nextarg
);
4621 /* The "standard" implementation of va_arg: read the value from the
4622 current (padded) address and increment by the (padded) size. */
4625 std_gimplify_va_arg_expr (tree valist
, tree type
, gimple_seq
*pre_p
,
4628 tree addr
, t
, type_size
, rounded_size
, valist_tmp
;
4629 unsigned HOST_WIDE_INT align
, boundary
;
4632 #ifdef ARGS_GROW_DOWNWARD
4633 /* All of the alignment and movement below is for args-grow-up machines.
4634 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4635 implement their own specialized gimplify_va_arg_expr routines. */
4639 indirect
= pass_by_reference (NULL
, TYPE_MODE (type
), type
, false);
4641 type
= build_pointer_type (type
);
4643 align
= PARM_BOUNDARY
/ BITS_PER_UNIT
;
4644 boundary
= FUNCTION_ARG_BOUNDARY (TYPE_MODE (type
), type
);
4646 /* When we align parameter on stack for caller, if the parameter
4647 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4648 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4649 here with caller. */
4650 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
4651 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
4653 boundary
/= BITS_PER_UNIT
;
4655 /* Hoist the valist value into a temporary for the moment. */
4656 valist_tmp
= get_initialized_tmp_var (valist
, pre_p
, NULL
);
4658 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4659 requires greater alignment, we must perform dynamic alignment. */
4660 if (boundary
> align
4661 && !integer_zerop (TYPE_SIZE (type
)))
4663 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4664 fold_build2 (POINTER_PLUS_EXPR
,
4666 valist_tmp
, size_int (boundary
- 1)));
4667 gimplify_and_add (t
, pre_p
);
4669 t
= fold_convert (sizetype
, valist_tmp
);
4670 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4671 fold_convert (TREE_TYPE (valist
),
4672 fold_build2 (BIT_AND_EXPR
, sizetype
, t
,
4673 size_int (-boundary
))));
4674 gimplify_and_add (t
, pre_p
);
4679 /* If the actual alignment is less than the alignment of the type,
4680 adjust the type accordingly so that we don't assume strict alignment
4681 when dereferencing the pointer. */
4682 boundary
*= BITS_PER_UNIT
;
4683 if (boundary
< TYPE_ALIGN (type
))
4685 type
= build_variant_type_copy (type
);
4686 TYPE_ALIGN (type
) = boundary
;
4689 /* Compute the rounded size of the type. */
4690 type_size
= size_in_bytes (type
);
4691 rounded_size
= round_up (type_size
, align
);
4693 /* Reduce rounded_size so it's sharable with the postqueue. */
4694 gimplify_expr (&rounded_size
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4698 if (PAD_VARARGS_DOWN
&& !integer_zerop (rounded_size
))
4700 /* Small args are padded downward. */
4701 t
= fold_build2_loc (input_location
, GT_EXPR
, sizetype
,
4702 rounded_size
, size_int (align
));
4703 t
= fold_build3 (COND_EXPR
, sizetype
, t
, size_zero_node
,
4704 size_binop (MINUS_EXPR
, rounded_size
, type_size
));
4705 addr
= fold_build2 (POINTER_PLUS_EXPR
,
4706 TREE_TYPE (addr
), addr
, t
);
4709 /* Compute new value for AP. */
4710 t
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (valist
), valist_tmp
, rounded_size
);
4711 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist
, t
);
4712 gimplify_and_add (t
, pre_p
);
4714 addr
= fold_convert (build_pointer_type (type
), addr
);
4717 addr
= build_va_arg_indirect_ref (addr
);
4719 return build_va_arg_indirect_ref (addr
);
4722 /* Build an indirect-ref expression over the given TREE, which represents a
4723 piece of a va_arg() expansion. */
4725 build_va_arg_indirect_ref (tree addr
)
4727 addr
= build_fold_indirect_ref_loc (EXPR_LOCATION (addr
), addr
);
4729 if (flag_mudflap
) /* Don't instrument va_arg INDIRECT_REF. */
4735 /* Return a dummy expression of type TYPE in order to keep going after an
4739 dummy_object (tree type
)
4741 tree t
= build_int_cst (build_pointer_type (type
), 0);
4742 return build1 (INDIRECT_REF
, type
, t
);
4745 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4746 builtin function, but a very special sort of operator. */
4748 enum gimplify_status
4749 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
4751 tree promoted_type
, have_va_type
;
4752 tree valist
= TREE_OPERAND (*expr_p
, 0);
4753 tree type
= TREE_TYPE (*expr_p
);
4755 location_t loc
= EXPR_LOCATION (*expr_p
);
4757 /* Verify that valist is of the proper type. */
4758 have_va_type
= TREE_TYPE (valist
);
4759 if (have_va_type
== error_mark_node
)
4761 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
4763 if (have_va_type
== NULL_TREE
)
4765 error_at (loc
, "first argument to %<va_arg%> not of type %<va_list%>");
4769 /* Generate a diagnostic for requesting data of a type that cannot
4770 be passed through `...' due to type promotion at the call site. */
4771 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
4774 static bool gave_help
;
4777 /* Unfortunately, this is merely undefined, rather than a constraint
4778 violation, so we cannot make this an error. If this call is never
4779 executed, the program is still strictly conforming. */
4780 warned
= warning_at (loc
, 0,
4781 "%qT is promoted to %qT when passed through %<...%>",
4782 type
, promoted_type
);
4783 if (!gave_help
&& warned
)
4786 inform (loc
, "(so you should pass %qT not %qT to %<va_arg%>)",
4787 promoted_type
, type
);
4790 /* We can, however, treat "undefined" any way we please.
4791 Call abort to encourage the user to fix the program. */
4793 inform (loc
, "if this code is reached, the program will abort");
4794 /* Before the abort, allow the evaluation of the va_list
4795 expression to exit or longjmp. */
4796 gimplify_and_add (valist
, pre_p
);
4797 t
= build_call_expr_loc (loc
,
4798 implicit_built_in_decls
[BUILT_IN_TRAP
], 0);
4799 gimplify_and_add (t
, pre_p
);
4801 /* This is dead code, but go ahead and finish so that the
4802 mode of the result comes out right. */
4803 *expr_p
= dummy_object (type
);
4808 /* Make it easier for the backends by protecting the valist argument
4809 from multiple evaluations. */
4810 if (TREE_CODE (have_va_type
) == ARRAY_TYPE
)
4812 /* For this case, the backends will be expecting a pointer to
4813 TREE_TYPE (abi), but it's possible we've
4814 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4816 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4818 tree p1
= build_pointer_type (TREE_TYPE (have_va_type
));
4819 valist
= fold_convert_loc (loc
, p1
,
4820 build_fold_addr_expr_loc (loc
, valist
));
4823 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4826 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_min_lval
, fb_lvalue
);
4828 if (!targetm
.gimplify_va_arg_expr
)
4829 /* FIXME: Once most targets are converted we should merely
4830 assert this is non-null. */
4833 *expr_p
= targetm
.gimplify_va_arg_expr (valist
, type
, pre_p
, post_p
);
4838 /* Expand EXP, a call to __builtin_va_end. */
4841 expand_builtin_va_end (tree exp
)
4843 tree valist
= CALL_EXPR_ARG (exp
, 0);
4845 /* Evaluate for side effects, if needed. I hate macros that don't
4847 if (TREE_SIDE_EFFECTS (valist
))
4848 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4853 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4854 builtin rather than just as an assignment in stdarg.h because of the
4855 nastiness of array-type va_list types. */
4858 expand_builtin_va_copy (tree exp
)
4861 location_t loc
= EXPR_LOCATION (exp
);
4863 dst
= CALL_EXPR_ARG (exp
, 0);
4864 src
= CALL_EXPR_ARG (exp
, 1);
4866 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4867 src
= stabilize_va_list_loc (loc
, src
, 0);
4869 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4871 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4873 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4874 TREE_SIDE_EFFECTS (t
) = 1;
4875 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4879 rtx dstb
, srcb
, size
;
4881 /* Evaluate to pointers. */
4882 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4883 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4884 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4885 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4887 dstb
= convert_memory_address (Pmode
, dstb
);
4888 srcb
= convert_memory_address (Pmode
, srcb
);
4890 /* "Dereference" to BLKmode memories. */
4891 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4892 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4893 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4894 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4895 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4896 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4899 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4905 /* Expand a call to one of the builtin functions __builtin_frame_address or
4906 __builtin_return_address. */
4909 expand_builtin_frame_address (tree fndecl
, tree exp
)
4911 /* The argument must be a nonnegative integer constant.
4912 It counts the number of frames to scan up the stack.
4913 The value is the return address saved in that frame. */
4914 if (call_expr_nargs (exp
) == 0)
4915 /* Warning about missing arg was already issued. */
4917 else if (! host_integerp (CALL_EXPR_ARG (exp
, 0), 1))
4919 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4920 error ("invalid argument to %<__builtin_frame_address%>");
4922 error ("invalid argument to %<__builtin_return_address%>");
4928 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
4929 tree_low_cst (CALL_EXPR_ARG (exp
, 0), 1));
4931 /* Some ports cannot access arbitrary stack frames. */
4934 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4935 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4937 warning (0, "unsupported argument to %<__builtin_return_address%>");
4941 /* For __builtin_frame_address, return what we've got. */
4942 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4946 && ! CONSTANT_P (tem
))
4947 tem
= copy_to_mode_reg (Pmode
, tem
);
4952 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4953 we failed and the caller should emit a normal call, otherwise try to get
4954 the result in TARGET, if convenient. */
4957 expand_builtin_alloca (tree exp
, rtx target
)
4962 /* Emit normal call if marked not-inlineable. */
4963 if (CALL_CANNOT_INLINE_P (exp
))
4966 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4969 /* Compute the argument. */
4970 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4972 /* Allocate the desired space. */
4973 result
= allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
4974 result
= convert_memory_address (ptr_mode
, result
);
4979 /* Expand a call to a bswap builtin with argument ARG0. MODE
4980 is the mode to expand with. */
4983 expand_builtin_bswap (tree exp
, rtx target
, rtx subtarget
)
4985 enum machine_mode mode
;
4989 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4992 arg
= CALL_EXPR_ARG (exp
, 0);
4993 mode
= TYPE_MODE (TREE_TYPE (arg
));
4994 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4996 target
= expand_unop (mode
, bswap_optab
, op0
, target
, 1);
4998 gcc_assert (target
);
5000 return convert_to_mode (mode
, target
, 0);
5003 /* Expand a call to a unary builtin in EXP.
5004 Return NULL_RTX if a normal call should be emitted rather than expanding the
5005 function in-line. If convenient, the result should be placed in TARGET.
5006 SUBTARGET may be used as the target for computing one of EXP's operands. */
5009 expand_builtin_unop (enum machine_mode target_mode
, tree exp
, rtx target
,
5010 rtx subtarget
, optab op_optab
)
5014 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5017 /* Compute the argument. */
5018 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
5019 VOIDmode
, EXPAND_NORMAL
);
5020 /* Compute op, into TARGET if possible.
5021 Set TARGET to wherever the result comes back. */
5022 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
5023 op_optab
, op0
, target
, 1);
5024 gcc_assert (target
);
5026 return convert_to_mode (target_mode
, target
, 0);
5029 /* Expand a call to __builtin_expect. We just return our argument
5030 as the builtin_expect semantic should've been already executed by
5031 tree branch prediction pass. */
5034 expand_builtin_expect (tree exp
, rtx target
)
5038 if (call_expr_nargs (exp
) < 2)
5040 arg
= CALL_EXPR_ARG (exp
, 0);
5042 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5043 /* When guessing was done, the hints should be already stripped away. */
5044 gcc_assert (!flag_guess_branch_prob
5045 || optimize
== 0 || seen_error ());
5050 expand_builtin_trap (void)
5054 emit_insn (gen_trap ());
5057 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
5061 /* Expand a call to __builtin_unreachable. We do nothing except emit
5062 a barrier saying that control flow will not pass here.
5064 It is the responsibility of the program being compiled to ensure
5065 that control flow does never reach __builtin_unreachable. */
5067 expand_builtin_unreachable (void)
5072 /* Expand EXP, a call to fabs, fabsf or fabsl.
5073 Return NULL_RTX if a normal call should be emitted rather than expanding
5074 the function inline. If convenient, the result should be placed
5075 in TARGET. SUBTARGET may be used as the target for computing
5079 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
5081 enum machine_mode mode
;
5085 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5088 arg
= CALL_EXPR_ARG (exp
, 0);
5089 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
5090 mode
= TYPE_MODE (TREE_TYPE (arg
));
5091 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5092 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
5095 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5096 Return NULL is a normal call should be emitted rather than expanding the
5097 function inline. If convenient, the result should be placed in TARGET.
5098 SUBTARGET may be used as the target for computing the operand. */
5101 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
5106 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
5109 arg
= CALL_EXPR_ARG (exp
, 0);
5110 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5112 arg
= CALL_EXPR_ARG (exp
, 1);
5113 op1
= expand_normal (arg
);
5115 return expand_copysign (op0
, op1
, target
);
5118 /* Create a new constant string literal and return a char* pointer to it.
5119 The STRING_CST value is the LEN characters at STR. */
5121 build_string_literal (int len
, const char *str
)
5123 tree t
, elem
, index
, type
;
5125 t
= build_string (len
, str
);
5126 elem
= build_type_variant (char_type_node
, 1, 0);
5127 index
= build_index_type (size_int (len
- 1));
5128 type
= build_array_type (elem
, index
);
5129 TREE_TYPE (t
) = type
;
5130 TREE_CONSTANT (t
) = 1;
5131 TREE_READONLY (t
) = 1;
5132 TREE_STATIC (t
) = 1;
5134 type
= build_pointer_type (elem
);
5135 t
= build1 (ADDR_EXPR
, type
,
5136 build4 (ARRAY_REF
, elem
,
5137 t
, integer_zero_node
, NULL_TREE
, NULL_TREE
));
5141 /* Expand a call to either the entry or exit function profiler. */
5144 expand_builtin_profile_func (bool exitp
)
5146 rtx this_rtx
, which
;
5148 this_rtx
= DECL_RTL (current_function_decl
);
5149 gcc_assert (MEM_P (this_rtx
));
5150 this_rtx
= XEXP (this_rtx
, 0);
5153 which
= profile_function_exit_libfunc
;
5155 which
= profile_function_entry_libfunc
;
5157 emit_library_call (which
, LCT_NORMAL
, VOIDmode
, 2, this_rtx
, Pmode
,
5158 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS
,
5165 /* Expand a call to __builtin___clear_cache. */
5168 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED
)
5170 #ifndef HAVE_clear_cache
5171 #ifdef CLEAR_INSN_CACHE
5172 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5173 does something. Just do the default expansion to a call to
5177 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5178 does nothing. There is no need to call it. Do nothing. */
5180 #endif /* CLEAR_INSN_CACHE */
5182 /* We have a "clear_cache" insn, and it will handle everything. */
5184 rtx begin_rtx
, end_rtx
;
5185 enum insn_code icode
;
5187 /* We must not expand to a library call. If we did, any
5188 fallback library function in libgcc that might contain a call to
5189 __builtin___clear_cache() would recurse infinitely. */
5190 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5192 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5196 if (HAVE_clear_cache
)
5198 icode
= CODE_FOR_clear_cache
;
5200 begin
= CALL_EXPR_ARG (exp
, 0);
5201 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5202 begin_rtx
= convert_memory_address (Pmode
, begin_rtx
);
5203 if (!insn_data
[icode
].operand
[0].predicate (begin_rtx
, Pmode
))
5204 begin_rtx
= copy_to_mode_reg (Pmode
, begin_rtx
);
5206 end
= CALL_EXPR_ARG (exp
, 1);
5207 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5208 end_rtx
= convert_memory_address (Pmode
, end_rtx
);
5209 if (!insn_data
[icode
].operand
[1].predicate (end_rtx
, Pmode
))
5210 end_rtx
= copy_to_mode_reg (Pmode
, end_rtx
);
5212 emit_insn (gen_clear_cache (begin_rtx
, end_rtx
));
5215 #endif /* HAVE_clear_cache */
5218 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5221 round_trampoline_addr (rtx tramp
)
5223 rtx temp
, addend
, mask
;
5225 /* If we don't need too much alignment, we'll have been guaranteed
5226 proper alignment by get_trampoline_type. */
5227 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
5230 /* Round address up to desired boundary. */
5231 temp
= gen_reg_rtx (Pmode
);
5232 addend
= GEN_INT (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1);
5233 mask
= GEN_INT (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
);
5235 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
5236 temp
, 0, OPTAB_LIB_WIDEN
);
5237 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
5238 temp
, 0, OPTAB_LIB_WIDEN
);
5244 expand_builtin_init_trampoline (tree exp
)
5246 tree t_tramp
, t_func
, t_chain
;
5247 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
5249 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
5250 POINTER_TYPE
, VOID_TYPE
))
5253 t_tramp
= CALL_EXPR_ARG (exp
, 0);
5254 t_func
= CALL_EXPR_ARG (exp
, 1);
5255 t_chain
= CALL_EXPR_ARG (exp
, 2);
5257 r_tramp
= expand_normal (t_tramp
);
5258 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
5259 MEM_NOTRAP_P (m_tramp
) = 1;
5261 /* The TRAMP argument should be the address of a field within the
5262 local function's FRAME decl. Let's see if we can fill in the
5263 to fill in the MEM_ATTRs for this memory. */
5264 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
5265 set_mem_attributes_minus_bitpos (m_tramp
, TREE_OPERAND (t_tramp
, 0),
5268 tmp
= round_trampoline_addr (r_tramp
);
5271 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
5272 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
5273 set_mem_size (m_tramp
, GEN_INT (TRAMPOLINE_SIZE
));
5276 /* The FUNC argument should be the address of the nested function.
5277 Extract the actual function decl to pass to the hook. */
5278 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
5279 t_func
= TREE_OPERAND (t_func
, 0);
5280 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
5282 r_chain
= expand_normal (t_chain
);
5284 /* Generate insns to initialize the trampoline. */
5285 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
5287 trampolines_created
= 1;
5289 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
5290 "trampoline generated for nested function %qD", t_func
);
5296 expand_builtin_adjust_trampoline (tree exp
)
5300 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5303 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5304 tramp
= round_trampoline_addr (tramp
);
5305 if (targetm
.calls
.trampoline_adjust_address
)
5306 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
5311 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5312 function. The function first checks whether the back end provides
5313 an insn to implement signbit for the respective mode. If not, it
5314 checks whether the floating point format of the value is such that
5315 the sign bit can be extracted. If that is not the case, the
5316 function returns NULL_RTX to indicate that a normal call should be
5317 emitted rather than expanding the function in-line. EXP is the
5318 expression that is a call to the builtin function; if convenient,
5319 the result should be placed in TARGET. */
5321 expand_builtin_signbit (tree exp
, rtx target
)
5323 const struct real_format
*fmt
;
5324 enum machine_mode fmode
, imode
, rmode
;
5327 enum insn_code icode
;
5329 location_t loc
= EXPR_LOCATION (exp
);
5331 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5334 arg
= CALL_EXPR_ARG (exp
, 0);
5335 fmode
= TYPE_MODE (TREE_TYPE (arg
));
5336 rmode
= TYPE_MODE (TREE_TYPE (exp
));
5337 fmt
= REAL_MODE_FORMAT (fmode
);
5339 arg
= builtin_save_expr (arg
);
5341 /* Expand the argument yielding a RTX expression. */
5342 temp
= expand_normal (arg
);
5344 /* Check if the back end provides an insn that handles signbit for the
5346 icode
= optab_handler (signbit_optab
, fmode
);
5347 if (icode
!= CODE_FOR_nothing
)
5349 rtx last
= get_last_insn ();
5350 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5351 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
5353 delete_insns_since (last
);
5356 /* For floating point formats without a sign bit, implement signbit
5358 bitpos
= fmt
->signbit_ro
;
5361 /* But we can't do this if the format supports signed zero. */
5362 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
5365 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
5366 build_real (TREE_TYPE (arg
), dconst0
));
5367 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5370 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5372 imode
= int_mode_for_mode (fmode
);
5373 if (imode
== BLKmode
)
5375 temp
= gen_lowpart (imode
, temp
);
5380 /* Handle targets with different FP word orders. */
5381 if (FLOAT_WORDS_BIG_ENDIAN
)
5382 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5384 word
= bitpos
/ BITS_PER_WORD
;
5385 temp
= operand_subword_force (temp
, word
, fmode
);
5386 bitpos
= bitpos
% BITS_PER_WORD
;
5389 /* Force the intermediate word_mode (or narrower) result into a
5390 register. This avoids attempting to create paradoxical SUBREGs
5391 of floating point modes below. */
5392 temp
= force_reg (imode
, temp
);
5394 /* If the bitpos is within the "result mode" lowpart, the operation
5395 can be implement with a single bitwise AND. Otherwise, we need
5396 a right shift and an AND. */
5398 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5400 double_int mask
= double_int_setbit (double_int_zero
, bitpos
);
5402 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5403 temp
= gen_lowpart (rmode
, temp
);
5404 temp
= expand_binop (rmode
, and_optab
, temp
,
5405 immed_double_int_const (mask
, rmode
),
5406 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5410 /* Perform a logical right shift to place the signbit in the least
5411 significant bit, then truncate the result to the desired mode
5412 and mask just this bit. */
5413 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
,
5414 build_int_cst (NULL_TREE
, bitpos
), NULL_RTX
, 1);
5415 temp
= gen_lowpart (rmode
, temp
);
5416 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5417 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5423 /* Expand fork or exec calls. TARGET is the desired target of the
5424 call. EXP is the call. FN is the
5425 identificator of the actual function. IGNORE is nonzero if the
5426 value is to be ignored. */
5429 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5434 /* If we are not profiling, just call the function. */
5435 if (!profile_arc_flag
)
5438 /* Otherwise call the wrapper. This should be equivalent for the rest of
5439 compiler, so the code does not diverge, and the wrapper may run the
5440 code necessary for keeping the profiling sane. */
5442 switch (DECL_FUNCTION_CODE (fn
))
5445 id
= get_identifier ("__gcov_fork");
5448 case BUILT_IN_EXECL
:
5449 id
= get_identifier ("__gcov_execl");
5452 case BUILT_IN_EXECV
:
5453 id
= get_identifier ("__gcov_execv");
5456 case BUILT_IN_EXECLP
:
5457 id
= get_identifier ("__gcov_execlp");
5460 case BUILT_IN_EXECLE
:
5461 id
= get_identifier ("__gcov_execle");
5464 case BUILT_IN_EXECVP
:
5465 id
= get_identifier ("__gcov_execvp");
5468 case BUILT_IN_EXECVE
:
5469 id
= get_identifier ("__gcov_execve");
5476 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5477 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5478 DECL_EXTERNAL (decl
) = 1;
5479 TREE_PUBLIC (decl
) = 1;
5480 DECL_ARTIFICIAL (decl
) = 1;
5481 TREE_NOTHROW (decl
) = 1;
5482 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5483 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5484 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5485 return expand_call (call
, target
, ignore
);
5490 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5491 the pointer in these functions is void*, the tree optimizers may remove
5492 casts. The mode computed in expand_builtin isn't reliable either, due
5493 to __sync_bool_compare_and_swap.
5495 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5496 group of builtins. This gives us log2 of the mode size. */
5498 static inline enum machine_mode
5499 get_builtin_sync_mode (int fcode_diff
)
5501 /* The size is not negotiable, so ask not to get BLKmode in return
5502 if the target indicates that a smaller size would be better. */
5503 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5506 /* Expand the memory expression LOC and return the appropriate memory operand
5507 for the builtin_sync operations. */
5510 get_builtin_sync_mem (tree loc
, enum machine_mode mode
)
5514 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5515 addr
= convert_memory_address (Pmode
, addr
);
5517 /* Note that we explicitly do not want any alias information for this
5518 memory, so that we kill all other live memories. Otherwise we don't
5519 satisfy the full barrier semantics of the intrinsic. */
5520 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5522 /* The alignment needs to be at least according to that of the mode. */
5523 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5524 get_pointer_alignment (loc
, BIGGEST_ALIGNMENT
)));
5525 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5526 MEM_VOLATILE_P (mem
) = 1;
5531 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5532 EXP is the CALL_EXPR. CODE is the rtx code
5533 that corresponds to the arithmetic or logical operation from the name;
5534 an exception here is that NOT actually means NAND. TARGET is an optional
5535 place for us to store the results; AFTER is true if this is the
5536 fetch_and_xxx form. IGNORE is true if we don't actually care about
5537 the result of the operation at all. */
5540 expand_builtin_sync_operation (enum machine_mode mode
, tree exp
,
5541 enum rtx_code code
, bool after
,
5542 rtx target
, bool ignore
)
5545 enum machine_mode old_mode
;
5546 location_t loc
= EXPR_LOCATION (exp
);
5548 if (code
== NOT
&& warn_sync_nand
)
5550 tree fndecl
= get_callee_fndecl (exp
);
5551 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5553 static bool warned_f_a_n
, warned_n_a_f
;
5557 case BUILT_IN_FETCH_AND_NAND_1
:
5558 case BUILT_IN_FETCH_AND_NAND_2
:
5559 case BUILT_IN_FETCH_AND_NAND_4
:
5560 case BUILT_IN_FETCH_AND_NAND_8
:
5561 case BUILT_IN_FETCH_AND_NAND_16
:
5566 fndecl
= implicit_built_in_decls
[BUILT_IN_FETCH_AND_NAND_N
];
5567 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5568 warned_f_a_n
= true;
5571 case BUILT_IN_NAND_AND_FETCH_1
:
5572 case BUILT_IN_NAND_AND_FETCH_2
:
5573 case BUILT_IN_NAND_AND_FETCH_4
:
5574 case BUILT_IN_NAND_AND_FETCH_8
:
5575 case BUILT_IN_NAND_AND_FETCH_16
:
5580 fndecl
= implicit_built_in_decls
[BUILT_IN_NAND_AND_FETCH_N
];
5581 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5582 warned_n_a_f
= true;
5590 /* Expand the operands. */
5591 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5593 val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL_RTX
, mode
, EXPAND_NORMAL
);
5594 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5595 of CONST_INTs, where we know the old_mode only from the call argument. */
5596 old_mode
= GET_MODE (val
);
5597 if (old_mode
== VOIDmode
)
5598 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
5599 val
= convert_modes (mode
, old_mode
, val
, 1);
5602 return expand_sync_operation (mem
, val
, code
);
5604 return expand_sync_fetch_operation (mem
, val
, code
, after
, target
);
5607 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5608 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5609 true if this is the boolean form. TARGET is a place for us to store the
5610 results; this is NOT optional if IS_BOOL is true. */
5613 expand_builtin_compare_and_swap (enum machine_mode mode
, tree exp
,
5614 bool is_bool
, rtx target
)
5616 rtx old_val
, new_val
, mem
;
5617 enum machine_mode old_mode
;
5619 /* Expand the operands. */
5620 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5623 old_val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL_RTX
,
5624 mode
, EXPAND_NORMAL
);
5625 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5626 of CONST_INTs, where we know the old_mode only from the call argument. */
5627 old_mode
= GET_MODE (old_val
);
5628 if (old_mode
== VOIDmode
)
5629 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
5630 old_val
= convert_modes (mode
, old_mode
, old_val
, 1);
5632 new_val
= expand_expr (CALL_EXPR_ARG (exp
, 2), NULL_RTX
,
5633 mode
, EXPAND_NORMAL
);
5634 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5635 of CONST_INTs, where we know the old_mode only from the call argument. */
5636 old_mode
= GET_MODE (new_val
);
5637 if (old_mode
== VOIDmode
)
5638 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 2)));
5639 new_val
= convert_modes (mode
, old_mode
, new_val
, 1);
5642 return expand_bool_compare_and_swap (mem
, old_val
, new_val
, target
);
5644 return expand_val_compare_and_swap (mem
, old_val
, new_val
, target
);
5647 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5648 general form is actually an atomic exchange, and some targets only
5649 support a reduced form with the second argument being a constant 1.
5650 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5654 expand_builtin_lock_test_and_set (enum machine_mode mode
, tree exp
,
5658 enum machine_mode old_mode
;
5660 /* Expand the operands. */
5661 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5662 val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL_RTX
, mode
, EXPAND_NORMAL
);
5663 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5664 of CONST_INTs, where we know the old_mode only from the call argument. */
5665 old_mode
= GET_MODE (val
);
5666 if (old_mode
== VOIDmode
)
5667 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
5668 val
= convert_modes (mode
, old_mode
, val
, 1);
5670 return expand_sync_lock_test_and_set (mem
, val
, target
);
5673 /* Expand the __sync_synchronize intrinsic. */
5676 expand_builtin_synchronize (void)
5679 VEC (tree
, gc
) *v_clobbers
;
5681 #ifdef HAVE_memory_barrier
5682 if (HAVE_memory_barrier
)
5684 emit_insn (gen_memory_barrier ());
5689 if (synchronize_libfunc
!= NULL_RTX
)
5691 emit_library_call (synchronize_libfunc
, LCT_NORMAL
, VOIDmode
, 0);
5695 /* If no explicit memory barrier instruction is available, create an
5696 empty asm stmt with a memory clobber. */
5697 v_clobbers
= VEC_alloc (tree
, gc
, 1);
5698 VEC_quick_push (tree
, v_clobbers
,
5699 tree_cons (NULL
, build_string (6, "memory"), NULL
));
5700 x
= gimple_build_asm_vec ("", NULL
, NULL
, v_clobbers
, NULL
);
5701 gimple_asm_set_volatile (x
, true);
5702 expand_asm_stmt (x
);
5705 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5708 expand_builtin_lock_release (enum machine_mode mode
, tree exp
)
5710 enum insn_code icode
;
5712 rtx val
= const0_rtx
;
5714 /* Expand the operands. */
5715 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5717 /* If there is an explicit operation in the md file, use it. */
5718 icode
= direct_optab_handler (sync_lock_release_optab
, mode
);
5719 if (icode
!= CODE_FOR_nothing
)
5721 if (!insn_data
[icode
].operand
[1].predicate (val
, mode
))
5722 val
= force_reg (mode
, val
);
5724 insn
= GEN_FCN (icode
) (mem
, val
);
5732 /* Otherwise we can implement this operation by emitting a barrier
5733 followed by a store of zero. */
5734 expand_builtin_synchronize ();
5735 emit_move_insn (mem
, val
);
5738 /* Expand an expression EXP that calls a built-in function,
5739 with result going to TARGET if that's convenient
5740 (and in mode MODE if that's convenient).
5741 SUBTARGET may be used as the target for computing one of EXP's operands.
5742 IGNORE is nonzero if the value is to be ignored. */
5745 expand_builtin (tree exp
, rtx target
, rtx subtarget
, enum machine_mode mode
,
5748 tree fndecl
= get_callee_fndecl (exp
);
5749 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5750 enum machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5753 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5754 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5756 /* When not optimizing, generate calls to library functions for a certain
5759 && !called_as_built_in (fndecl
)
5760 && DECL_ASSEMBLER_NAME_SET_P (fndecl
)
5761 && fcode
!= BUILT_IN_ALLOCA
5762 && fcode
!= BUILT_IN_FREE
)
5763 return expand_call (exp
, target
, ignore
);
5765 /* The built-in function expanders test for target == const0_rtx
5766 to determine whether the function's result will be ignored. */
5768 target
= const0_rtx
;
5770 /* If the result of a pure or const built-in function is ignored, and
5771 none of its arguments are volatile, we can avoid expanding the
5772 built-in call and just evaluate the arguments for side-effects. */
5773 if (target
== const0_rtx
5774 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
5775 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
5777 bool volatilep
= false;
5779 call_expr_arg_iterator iter
;
5781 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5782 if (TREE_THIS_VOLATILE (arg
))
5790 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5791 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5798 CASE_FLT_FN (BUILT_IN_FABS
):
5799 target
= expand_builtin_fabs (exp
, target
, subtarget
);
5804 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
5805 target
= expand_builtin_copysign (exp
, target
, subtarget
);
5810 /* Just do a normal library call if we were unable to fold
5812 CASE_FLT_FN (BUILT_IN_CABS
):
5815 CASE_FLT_FN (BUILT_IN_EXP
):
5816 CASE_FLT_FN (BUILT_IN_EXP10
):
5817 CASE_FLT_FN (BUILT_IN_POW10
):
5818 CASE_FLT_FN (BUILT_IN_EXP2
):
5819 CASE_FLT_FN (BUILT_IN_EXPM1
):
5820 CASE_FLT_FN (BUILT_IN_LOGB
):
5821 CASE_FLT_FN (BUILT_IN_LOG
):
5822 CASE_FLT_FN (BUILT_IN_LOG10
):
5823 CASE_FLT_FN (BUILT_IN_LOG2
):
5824 CASE_FLT_FN (BUILT_IN_LOG1P
):
5825 CASE_FLT_FN (BUILT_IN_TAN
):
5826 CASE_FLT_FN (BUILT_IN_ASIN
):
5827 CASE_FLT_FN (BUILT_IN_ACOS
):
5828 CASE_FLT_FN (BUILT_IN_ATAN
):
5829 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
5830 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5831 because of possible accuracy problems. */
5832 if (! flag_unsafe_math_optimizations
)
5834 CASE_FLT_FN (BUILT_IN_SQRT
):
5835 CASE_FLT_FN (BUILT_IN_FLOOR
):
5836 CASE_FLT_FN (BUILT_IN_CEIL
):
5837 CASE_FLT_FN (BUILT_IN_TRUNC
):
5838 CASE_FLT_FN (BUILT_IN_ROUND
):
5839 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
5840 CASE_FLT_FN (BUILT_IN_RINT
):
5841 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
5846 CASE_FLT_FN (BUILT_IN_ILOGB
):
5847 if (! flag_unsafe_math_optimizations
)
5849 CASE_FLT_FN (BUILT_IN_ISINF
):
5850 CASE_FLT_FN (BUILT_IN_FINITE
):
5851 case BUILT_IN_ISFINITE
:
5852 case BUILT_IN_ISNORMAL
:
5853 target
= expand_builtin_interclass_mathfn (exp
, target
, subtarget
);
5858 CASE_FLT_FN (BUILT_IN_LCEIL
):
5859 CASE_FLT_FN (BUILT_IN_LLCEIL
):
5860 CASE_FLT_FN (BUILT_IN_LFLOOR
):
5861 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
5862 target
= expand_builtin_int_roundingfn (exp
, target
);
5867 CASE_FLT_FN (BUILT_IN_LRINT
):
5868 CASE_FLT_FN (BUILT_IN_LLRINT
):
5869 CASE_FLT_FN (BUILT_IN_LROUND
):
5870 CASE_FLT_FN (BUILT_IN_LLROUND
):
5871 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
5876 CASE_FLT_FN (BUILT_IN_POW
):
5877 target
= expand_builtin_pow (exp
, target
, subtarget
);
5882 CASE_FLT_FN (BUILT_IN_POWI
):
5883 target
= expand_builtin_powi (exp
, target
, subtarget
);
5888 CASE_FLT_FN (BUILT_IN_ATAN2
):
5889 CASE_FLT_FN (BUILT_IN_LDEXP
):
5890 CASE_FLT_FN (BUILT_IN_SCALB
):
5891 CASE_FLT_FN (BUILT_IN_SCALBN
):
5892 CASE_FLT_FN (BUILT_IN_SCALBLN
):
5893 if (! flag_unsafe_math_optimizations
)
5896 CASE_FLT_FN (BUILT_IN_FMOD
):
5897 CASE_FLT_FN (BUILT_IN_REMAINDER
):
5898 CASE_FLT_FN (BUILT_IN_DREM
):
5899 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
5904 CASE_FLT_FN (BUILT_IN_CEXPI
):
5905 target
= expand_builtin_cexpi (exp
, target
, subtarget
);
5906 gcc_assert (target
);
5909 CASE_FLT_FN (BUILT_IN_SIN
):
5910 CASE_FLT_FN (BUILT_IN_COS
):
5911 if (! flag_unsafe_math_optimizations
)
5913 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
5918 CASE_FLT_FN (BUILT_IN_SINCOS
):
5919 if (! flag_unsafe_math_optimizations
)
5921 target
= expand_builtin_sincos (exp
);
5926 case BUILT_IN_APPLY_ARGS
:
5927 return expand_builtin_apply_args ();
5929 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5930 FUNCTION with a copy of the parameters described by
5931 ARGUMENTS, and ARGSIZE. It returns a block of memory
5932 allocated on the stack into which is stored all the registers
5933 that might possibly be used for returning the result of a
5934 function. ARGUMENTS is the value returned by
5935 __builtin_apply_args. ARGSIZE is the number of bytes of
5936 arguments that must be copied. ??? How should this value be
5937 computed? We'll also need a safe worst case value for varargs
5939 case BUILT_IN_APPLY
:
5940 if (!validate_arglist (exp
, POINTER_TYPE
,
5941 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
5942 && !validate_arglist (exp
, REFERENCE_TYPE
,
5943 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
5949 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
5950 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
5951 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
5953 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
5956 /* __builtin_return (RESULT) causes the function to return the
5957 value described by RESULT. RESULT is address of the block of
5958 memory returned by __builtin_apply. */
5959 case BUILT_IN_RETURN
:
5960 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5961 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
5964 case BUILT_IN_SAVEREGS
:
5965 return expand_builtin_saveregs ();
5967 case BUILT_IN_VA_ARG_PACK
:
5968 /* All valid uses of __builtin_va_arg_pack () are removed during
5970 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
5973 case BUILT_IN_VA_ARG_PACK_LEN
:
5974 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5976 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
5979 /* Return the address of the first anonymous stack arg. */
5980 case BUILT_IN_NEXT_ARG
:
5981 if (fold_builtin_next_arg (exp
, false))
5983 return expand_builtin_next_arg ();
5985 case BUILT_IN_CLEAR_CACHE
:
5986 target
= expand_builtin___clear_cache (exp
);
5991 case BUILT_IN_CLASSIFY_TYPE
:
5992 return expand_builtin_classify_type (exp
);
5994 case BUILT_IN_CONSTANT_P
:
5997 case BUILT_IN_FRAME_ADDRESS
:
5998 case BUILT_IN_RETURN_ADDRESS
:
5999 return expand_builtin_frame_address (fndecl
, exp
);
6001 /* Returns the address of the area where the structure is returned.
6003 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6004 if (call_expr_nargs (exp
) != 0
6005 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6006 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6009 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6011 case BUILT_IN_ALLOCA
:
6012 target
= expand_builtin_alloca (exp
, target
);
6017 case BUILT_IN_STACK_SAVE
:
6018 return expand_stack_save ();
6020 case BUILT_IN_STACK_RESTORE
:
6021 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6024 case BUILT_IN_BSWAP32
:
6025 case BUILT_IN_BSWAP64
:
6026 target
= expand_builtin_bswap (exp
, target
, subtarget
);
6032 CASE_INT_FN (BUILT_IN_FFS
):
6033 case BUILT_IN_FFSIMAX
:
6034 target
= expand_builtin_unop (target_mode
, exp
, target
,
6035 subtarget
, ffs_optab
);
6040 CASE_INT_FN (BUILT_IN_CLZ
):
6041 case BUILT_IN_CLZIMAX
:
6042 target
= expand_builtin_unop (target_mode
, exp
, target
,
6043 subtarget
, clz_optab
);
6048 CASE_INT_FN (BUILT_IN_CTZ
):
6049 case BUILT_IN_CTZIMAX
:
6050 target
= expand_builtin_unop (target_mode
, exp
, target
,
6051 subtarget
, ctz_optab
);
6056 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6057 case BUILT_IN_POPCOUNTIMAX
:
6058 target
= expand_builtin_unop (target_mode
, exp
, target
,
6059 subtarget
, popcount_optab
);
6064 CASE_INT_FN (BUILT_IN_PARITY
):
6065 case BUILT_IN_PARITYIMAX
:
6066 target
= expand_builtin_unop (target_mode
, exp
, target
,
6067 subtarget
, parity_optab
);
6072 case BUILT_IN_STRLEN
:
6073 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6078 case BUILT_IN_STRCPY
:
6079 target
= expand_builtin_strcpy (exp
, target
);
6084 case BUILT_IN_STRNCPY
:
6085 target
= expand_builtin_strncpy (exp
, target
);
6090 case BUILT_IN_STPCPY
:
6091 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6096 case BUILT_IN_MEMCPY
:
6097 target
= expand_builtin_memcpy (exp
, target
);
6102 case BUILT_IN_MEMPCPY
:
6103 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6108 case BUILT_IN_MEMSET
:
6109 target
= expand_builtin_memset (exp
, target
, mode
);
6114 case BUILT_IN_BZERO
:
6115 target
= expand_builtin_bzero (exp
);
6120 case BUILT_IN_STRCMP
:
6121 target
= expand_builtin_strcmp (exp
, target
);
6126 case BUILT_IN_STRNCMP
:
6127 target
= expand_builtin_strncmp (exp
, target
, mode
);
6133 case BUILT_IN_MEMCMP
:
6134 target
= expand_builtin_memcmp (exp
, target
, mode
);
6139 case BUILT_IN_SETJMP
:
6140 /* This should have been lowered to the builtins below. */
6143 case BUILT_IN_SETJMP_SETUP
:
6144 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6145 and the receiver label. */
6146 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6148 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6149 VOIDmode
, EXPAND_NORMAL
);
6150 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6151 rtx label_r
= label_rtx (label
);
6153 /* This is copied from the handling of non-local gotos. */
6154 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6155 nonlocal_goto_handler_labels
6156 = gen_rtx_EXPR_LIST (VOIDmode
, label_r
,
6157 nonlocal_goto_handler_labels
);
6158 /* ??? Do not let expand_label treat us as such since we would
6159 not want to be both on the list of non-local labels and on
6160 the list of forced labels. */
6161 FORCED_LABEL (label
) = 0;
6166 case BUILT_IN_SETJMP_DISPATCHER
:
6167 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6168 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6170 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6171 rtx label_r
= label_rtx (label
);
6173 /* Remove the dispatcher label from the list of non-local labels
6174 since the receiver labels have been added to it above. */
6175 remove_node_from_expr_list (label_r
, &nonlocal_goto_handler_labels
);
6180 case BUILT_IN_SETJMP_RECEIVER
:
6181 /* __builtin_setjmp_receiver is passed the receiver label. */
6182 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6184 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6185 rtx label_r
= label_rtx (label
);
6187 expand_builtin_setjmp_receiver (label_r
);
6192 /* __builtin_longjmp is passed a pointer to an array of five words.
6193 It's similar to the C library longjmp function but works with
6194 __builtin_setjmp above. */
6195 case BUILT_IN_LONGJMP
:
6196 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6198 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6199 VOIDmode
, EXPAND_NORMAL
);
6200 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6202 if (value
!= const1_rtx
)
6204 error ("%<__builtin_longjmp%> second argument must be 1");
6208 expand_builtin_longjmp (buf_addr
, value
);
6213 case BUILT_IN_NONLOCAL_GOTO
:
6214 target
= expand_builtin_nonlocal_goto (exp
);
6219 /* This updates the setjmp buffer that is its argument with the value
6220 of the current stack pointer. */
6221 case BUILT_IN_UPDATE_SETJMP_BUF
:
6222 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6225 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6227 expand_builtin_update_setjmp_buf (buf_addr
);
6233 expand_builtin_trap ();
6236 case BUILT_IN_UNREACHABLE
:
6237 expand_builtin_unreachable ();
6240 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6241 case BUILT_IN_SIGNBITD32
:
6242 case BUILT_IN_SIGNBITD64
:
6243 case BUILT_IN_SIGNBITD128
:
6244 target
= expand_builtin_signbit (exp
, target
);
6249 /* Various hooks for the DWARF 2 __throw routine. */
6250 case BUILT_IN_UNWIND_INIT
:
6251 expand_builtin_unwind_init ();
6253 case BUILT_IN_DWARF_CFA
:
6254 return virtual_cfa_rtx
;
6255 #ifdef DWARF2_UNWIND_INFO
6256 case BUILT_IN_DWARF_SP_COLUMN
:
6257 return expand_builtin_dwarf_sp_column ();
6258 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6259 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6262 case BUILT_IN_FROB_RETURN_ADDR
:
6263 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6264 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6265 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6266 case BUILT_IN_EH_RETURN
:
6267 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6268 CALL_EXPR_ARG (exp
, 1));
6270 #ifdef EH_RETURN_DATA_REGNO
6271 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6272 return expand_builtin_eh_return_data_regno (exp
);
6274 case BUILT_IN_EXTEND_POINTER
:
6275 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6276 case BUILT_IN_EH_POINTER
:
6277 return expand_builtin_eh_pointer (exp
);
6278 case BUILT_IN_EH_FILTER
:
6279 return expand_builtin_eh_filter (exp
);
6280 case BUILT_IN_EH_COPY_VALUES
:
6281 return expand_builtin_eh_copy_values (exp
);
6283 case BUILT_IN_VA_START
:
6284 return expand_builtin_va_start (exp
);
6285 case BUILT_IN_VA_END
:
6286 return expand_builtin_va_end (exp
);
6287 case BUILT_IN_VA_COPY
:
6288 return expand_builtin_va_copy (exp
);
6289 case BUILT_IN_EXPECT
:
6290 return expand_builtin_expect (exp
, target
);
6291 case BUILT_IN_PREFETCH
:
6292 expand_builtin_prefetch (exp
);
6295 case BUILT_IN_PROFILE_FUNC_ENTER
:
6296 return expand_builtin_profile_func (false);
6297 case BUILT_IN_PROFILE_FUNC_EXIT
:
6298 return expand_builtin_profile_func (true);
6300 case BUILT_IN_INIT_TRAMPOLINE
:
6301 return expand_builtin_init_trampoline (exp
);
6302 case BUILT_IN_ADJUST_TRAMPOLINE
:
6303 return expand_builtin_adjust_trampoline (exp
);
6306 case BUILT_IN_EXECL
:
6307 case BUILT_IN_EXECV
:
6308 case BUILT_IN_EXECLP
:
6309 case BUILT_IN_EXECLE
:
6310 case BUILT_IN_EXECVP
:
6311 case BUILT_IN_EXECVE
:
6312 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6317 case BUILT_IN_FETCH_AND_ADD_1
:
6318 case BUILT_IN_FETCH_AND_ADD_2
:
6319 case BUILT_IN_FETCH_AND_ADD_4
:
6320 case BUILT_IN_FETCH_AND_ADD_8
:
6321 case BUILT_IN_FETCH_AND_ADD_16
:
6322 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_ADD_1
);
6323 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
,
6324 false, target
, ignore
);
6329 case BUILT_IN_FETCH_AND_SUB_1
:
6330 case BUILT_IN_FETCH_AND_SUB_2
:
6331 case BUILT_IN_FETCH_AND_SUB_4
:
6332 case BUILT_IN_FETCH_AND_SUB_8
:
6333 case BUILT_IN_FETCH_AND_SUB_16
:
6334 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_SUB_1
);
6335 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
,
6336 false, target
, ignore
);
6341 case BUILT_IN_FETCH_AND_OR_1
:
6342 case BUILT_IN_FETCH_AND_OR_2
:
6343 case BUILT_IN_FETCH_AND_OR_4
:
6344 case BUILT_IN_FETCH_AND_OR_8
:
6345 case BUILT_IN_FETCH_AND_OR_16
:
6346 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_OR_1
);
6347 target
= expand_builtin_sync_operation (mode
, exp
, IOR
,
6348 false, target
, ignore
);
6353 case BUILT_IN_FETCH_AND_AND_1
:
6354 case BUILT_IN_FETCH_AND_AND_2
:
6355 case BUILT_IN_FETCH_AND_AND_4
:
6356 case BUILT_IN_FETCH_AND_AND_8
:
6357 case BUILT_IN_FETCH_AND_AND_16
:
6358 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_AND_1
);
6359 target
= expand_builtin_sync_operation (mode
, exp
, AND
,
6360 false, target
, ignore
);
6365 case BUILT_IN_FETCH_AND_XOR_1
:
6366 case BUILT_IN_FETCH_AND_XOR_2
:
6367 case BUILT_IN_FETCH_AND_XOR_4
:
6368 case BUILT_IN_FETCH_AND_XOR_8
:
6369 case BUILT_IN_FETCH_AND_XOR_16
:
6370 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_XOR_1
);
6371 target
= expand_builtin_sync_operation (mode
, exp
, XOR
,
6372 false, target
, ignore
);
6377 case BUILT_IN_FETCH_AND_NAND_1
:
6378 case BUILT_IN_FETCH_AND_NAND_2
:
6379 case BUILT_IN_FETCH_AND_NAND_4
:
6380 case BUILT_IN_FETCH_AND_NAND_8
:
6381 case BUILT_IN_FETCH_AND_NAND_16
:
6382 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_NAND_1
);
6383 target
= expand_builtin_sync_operation (mode
, exp
, NOT
,
6384 false, target
, ignore
);
6389 case BUILT_IN_ADD_AND_FETCH_1
:
6390 case BUILT_IN_ADD_AND_FETCH_2
:
6391 case BUILT_IN_ADD_AND_FETCH_4
:
6392 case BUILT_IN_ADD_AND_FETCH_8
:
6393 case BUILT_IN_ADD_AND_FETCH_16
:
6394 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ADD_AND_FETCH_1
);
6395 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
,
6396 true, target
, ignore
);
6401 case BUILT_IN_SUB_AND_FETCH_1
:
6402 case BUILT_IN_SUB_AND_FETCH_2
:
6403 case BUILT_IN_SUB_AND_FETCH_4
:
6404 case BUILT_IN_SUB_AND_FETCH_8
:
6405 case BUILT_IN_SUB_AND_FETCH_16
:
6406 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SUB_AND_FETCH_1
);
6407 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
,
6408 true, target
, ignore
);
6413 case BUILT_IN_OR_AND_FETCH_1
:
6414 case BUILT_IN_OR_AND_FETCH_2
:
6415 case BUILT_IN_OR_AND_FETCH_4
:
6416 case BUILT_IN_OR_AND_FETCH_8
:
6417 case BUILT_IN_OR_AND_FETCH_16
:
6418 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_OR_AND_FETCH_1
);
6419 target
= expand_builtin_sync_operation (mode
, exp
, IOR
,
6420 true, target
, ignore
);
6425 case BUILT_IN_AND_AND_FETCH_1
:
6426 case BUILT_IN_AND_AND_FETCH_2
:
6427 case BUILT_IN_AND_AND_FETCH_4
:
6428 case BUILT_IN_AND_AND_FETCH_8
:
6429 case BUILT_IN_AND_AND_FETCH_16
:
6430 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_AND_AND_FETCH_1
);
6431 target
= expand_builtin_sync_operation (mode
, exp
, AND
,
6432 true, target
, ignore
);
6437 case BUILT_IN_XOR_AND_FETCH_1
:
6438 case BUILT_IN_XOR_AND_FETCH_2
:
6439 case BUILT_IN_XOR_AND_FETCH_4
:
6440 case BUILT_IN_XOR_AND_FETCH_8
:
6441 case BUILT_IN_XOR_AND_FETCH_16
:
6442 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_XOR_AND_FETCH_1
);
6443 target
= expand_builtin_sync_operation (mode
, exp
, XOR
,
6444 true, target
, ignore
);
6449 case BUILT_IN_NAND_AND_FETCH_1
:
6450 case BUILT_IN_NAND_AND_FETCH_2
:
6451 case BUILT_IN_NAND_AND_FETCH_4
:
6452 case BUILT_IN_NAND_AND_FETCH_8
:
6453 case BUILT_IN_NAND_AND_FETCH_16
:
6454 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_NAND_AND_FETCH_1
);
6455 target
= expand_builtin_sync_operation (mode
, exp
, NOT
,
6456 true, target
, ignore
);
6461 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1
:
6462 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2
:
6463 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4
:
6464 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8
:
6465 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16
:
6466 if (mode
== VOIDmode
)
6467 mode
= TYPE_MODE (boolean_type_node
);
6468 if (!target
|| !register_operand (target
, mode
))
6469 target
= gen_reg_rtx (mode
);
6471 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_BOOL_COMPARE_AND_SWAP_1
);
6472 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6477 case BUILT_IN_VAL_COMPARE_AND_SWAP_1
:
6478 case BUILT_IN_VAL_COMPARE_AND_SWAP_2
:
6479 case BUILT_IN_VAL_COMPARE_AND_SWAP_4
:
6480 case BUILT_IN_VAL_COMPARE_AND_SWAP_8
:
6481 case BUILT_IN_VAL_COMPARE_AND_SWAP_16
:
6482 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_VAL_COMPARE_AND_SWAP_1
);
6483 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6488 case BUILT_IN_LOCK_TEST_AND_SET_1
:
6489 case BUILT_IN_LOCK_TEST_AND_SET_2
:
6490 case BUILT_IN_LOCK_TEST_AND_SET_4
:
6491 case BUILT_IN_LOCK_TEST_AND_SET_8
:
6492 case BUILT_IN_LOCK_TEST_AND_SET_16
:
6493 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_LOCK_TEST_AND_SET_1
);
6494 target
= expand_builtin_lock_test_and_set (mode
, exp
, target
);
6499 case BUILT_IN_LOCK_RELEASE_1
:
6500 case BUILT_IN_LOCK_RELEASE_2
:
6501 case BUILT_IN_LOCK_RELEASE_4
:
6502 case BUILT_IN_LOCK_RELEASE_8
:
6503 case BUILT_IN_LOCK_RELEASE_16
:
6504 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_LOCK_RELEASE_1
);
6505 expand_builtin_lock_release (mode
, exp
);
6508 case BUILT_IN_SYNCHRONIZE
:
6509 expand_builtin_synchronize ();
6512 case BUILT_IN_OBJECT_SIZE
:
6513 return expand_builtin_object_size (exp
);
6515 case BUILT_IN_MEMCPY_CHK
:
6516 case BUILT_IN_MEMPCPY_CHK
:
6517 case BUILT_IN_MEMMOVE_CHK
:
6518 case BUILT_IN_MEMSET_CHK
:
6519 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6524 case BUILT_IN_STRCPY_CHK
:
6525 case BUILT_IN_STPCPY_CHK
:
6526 case BUILT_IN_STRNCPY_CHK
:
6527 case BUILT_IN_STRCAT_CHK
:
6528 case BUILT_IN_STRNCAT_CHK
:
6529 case BUILT_IN_SNPRINTF_CHK
:
6530 case BUILT_IN_VSNPRINTF_CHK
:
6531 maybe_emit_chk_warning (exp
, fcode
);
6534 case BUILT_IN_SPRINTF_CHK
:
6535 case BUILT_IN_VSPRINTF_CHK
:
6536 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6540 maybe_emit_free_warning (exp
);
6543 default: /* just do library call, if unknown builtin */
6547 /* The switch statement above can drop through to cause the function
6548 to be called normally. */
6549 return expand_call (exp
, target
, ignore
);
6552 /* Determine whether a tree node represents a call to a built-in
6553 function. If the tree T is a call to a built-in function with
6554 the right number of arguments of the appropriate types, return
6555 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6556 Otherwise the return value is END_BUILTINS. */
6558 enum built_in_function
6559 builtin_mathfn_code (const_tree t
)
6561 const_tree fndecl
, arg
, parmlist
;
6562 const_tree argtype
, parmtype
;
6563 const_call_expr_arg_iterator iter
;
6565 if (TREE_CODE (t
) != CALL_EXPR
6566 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
6567 return END_BUILTINS
;
6569 fndecl
= get_callee_fndecl (t
);
6570 if (fndecl
== NULL_TREE
6571 || TREE_CODE (fndecl
) != FUNCTION_DECL
6572 || ! DECL_BUILT_IN (fndecl
)
6573 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6574 return END_BUILTINS
;
6576 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
6577 init_const_call_expr_arg_iterator (t
, &iter
);
6578 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
6580 /* If a function doesn't take a variable number of arguments,
6581 the last element in the list will have type `void'. */
6582 parmtype
= TREE_VALUE (parmlist
);
6583 if (VOID_TYPE_P (parmtype
))
6585 if (more_const_call_expr_args_p (&iter
))
6586 return END_BUILTINS
;
6587 return DECL_FUNCTION_CODE (fndecl
);
6590 if (! more_const_call_expr_args_p (&iter
))
6591 return END_BUILTINS
;
6593 arg
= next_const_call_expr_arg (&iter
);
6594 argtype
= TREE_TYPE (arg
);
6596 if (SCALAR_FLOAT_TYPE_P (parmtype
))
6598 if (! SCALAR_FLOAT_TYPE_P (argtype
))
6599 return END_BUILTINS
;
6601 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
6603 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
6604 return END_BUILTINS
;
6606 else if (POINTER_TYPE_P (parmtype
))
6608 if (! POINTER_TYPE_P (argtype
))
6609 return END_BUILTINS
;
6611 else if (INTEGRAL_TYPE_P (parmtype
))
6613 if (! INTEGRAL_TYPE_P (argtype
))
6614 return END_BUILTINS
;
6617 return END_BUILTINS
;
6620 /* Variable-length argument list. */
6621 return DECL_FUNCTION_CODE (fndecl
);
6624 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6625 evaluate to a constant. */
6628 fold_builtin_constant_p (tree arg
)
6630 /* We return 1 for a numeric type that's known to be a constant
6631 value at compile-time or for an aggregate type that's a
6632 literal constant. */
6635 /* If we know this is a constant, emit the constant of one. */
6636 if (CONSTANT_CLASS_P (arg
)
6637 || (TREE_CODE (arg
) == CONSTRUCTOR
6638 && TREE_CONSTANT (arg
)))
6639 return integer_one_node
;
6640 if (TREE_CODE (arg
) == ADDR_EXPR
)
6642 tree op
= TREE_OPERAND (arg
, 0);
6643 if (TREE_CODE (op
) == STRING_CST
6644 || (TREE_CODE (op
) == ARRAY_REF
6645 && integer_zerop (TREE_OPERAND (op
, 1))
6646 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
6647 return integer_one_node
;
6650 /* If this expression has side effects, show we don't know it to be a
6651 constant. Likewise if it's a pointer or aggregate type since in
6652 those case we only want literals, since those are only optimized
6653 when generating RTL, not later.
6654 And finally, if we are compiling an initializer, not code, we
6655 need to return a definite result now; there's not going to be any
6656 more optimization done. */
6657 if (TREE_SIDE_EFFECTS (arg
)
6658 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
6659 || POINTER_TYPE_P (TREE_TYPE (arg
))
6661 || folding_initializer
)
6662 return integer_zero_node
;
6667 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6668 return it as a truthvalue. */
6671 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
)
6673 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
6675 fn
= built_in_decls
[BUILT_IN_EXPECT
];
6676 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
6677 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
6678 pred_type
= TREE_VALUE (arg_types
);
6679 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
6681 pred
= fold_convert_loc (loc
, pred_type
, pred
);
6682 expected
= fold_convert_loc (loc
, expected_type
, expected
);
6683 call_expr
= build_call_expr_loc (loc
, fn
, 2, pred
, expected
);
6685 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
6686 build_int_cst (ret_type
, 0));
6689 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6690 NULL_TREE if no simplification is possible. */
6693 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
)
6696 enum tree_code code
;
6698 /* If this is a builtin_expect within a builtin_expect keep the
6699 inner one. See through a comparison against a constant. It
6700 might have been added to create a thruthvalue. */
6702 if (COMPARISON_CLASS_P (inner
)
6703 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
6704 inner
= TREE_OPERAND (inner
, 0);
6706 if (TREE_CODE (inner
) == CALL_EXPR
6707 && (fndecl
= get_callee_fndecl (inner
))
6708 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
6709 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
6712 /* Distribute the expected value over short-circuiting operators.
6713 See through the cast from truthvalue_type_node to long. */
6715 while (TREE_CODE (inner
) == NOP_EXPR
6716 && INTEGRAL_TYPE_P (TREE_TYPE (inner
))
6717 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner
, 0))))
6718 inner
= TREE_OPERAND (inner
, 0);
6720 code
= TREE_CODE (inner
);
6721 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6723 tree op0
= TREE_OPERAND (inner
, 0);
6724 tree op1
= TREE_OPERAND (inner
, 1);
6726 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
);
6727 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
);
6728 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
6730 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
6733 /* If the argument isn't invariant then there's nothing else we can do. */
6734 if (!TREE_CONSTANT (arg0
))
6737 /* If we expect that a comparison against the argument will fold to
6738 a constant return the constant. In practice, this means a true
6739 constant or the address of a non-weak symbol. */
6742 if (TREE_CODE (inner
) == ADDR_EXPR
)
6746 inner
= TREE_OPERAND (inner
, 0);
6748 while (TREE_CODE (inner
) == COMPONENT_REF
6749 || TREE_CODE (inner
) == ARRAY_REF
);
6750 if ((TREE_CODE (inner
) == VAR_DECL
6751 || TREE_CODE (inner
) == FUNCTION_DECL
)
6752 && DECL_WEAK (inner
))
6756 /* Otherwise, ARG0 already has the proper type for the return value. */
6760 /* Fold a call to __builtin_classify_type with argument ARG. */
6763 fold_builtin_classify_type (tree arg
)
6766 return build_int_cst (NULL_TREE
, no_type_class
);
6768 return build_int_cst (NULL_TREE
, type_to_class (TREE_TYPE (arg
)));
6771 /* Fold a call to __builtin_strlen with argument ARG. */
6774 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
6776 if (!validate_arg (arg
, POINTER_TYPE
))
6780 tree len
= c_strlen (arg
, 0);
6783 return fold_convert_loc (loc
, type
, len
);
6789 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6792 fold_builtin_inf (location_t loc
, tree type
, int warn
)
6794 REAL_VALUE_TYPE real
;
6796 /* __builtin_inff is intended to be usable to define INFINITY on all
6797 targets. If an infinity is not available, INFINITY expands "to a
6798 positive constant of type float that overflows at translation
6799 time", footnote "In this case, using INFINITY will violate the
6800 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6801 Thus we pedwarn to ensure this constraint violation is
6803 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
6804 pedwarn (loc
, 0, "target format does not support infinity");
6807 return build_real (type
, real
);
6810 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6813 fold_builtin_nan (tree arg
, tree type
, int quiet
)
6815 REAL_VALUE_TYPE real
;
6818 if (!validate_arg (arg
, POINTER_TYPE
))
6820 str
= c_getstr (arg
);
6824 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
6827 return build_real (type
, real
);
6830 /* Return true if the floating point expression T has an integer value.
6831 We also allow +Inf, -Inf and NaN to be considered integer values. */
6834 integer_valued_real_p (tree t
)
6836 switch (TREE_CODE (t
))
6843 return integer_valued_real_p (TREE_OPERAND (t
, 0));
6848 return integer_valued_real_p (TREE_OPERAND (t
, 1));
6855 return integer_valued_real_p (TREE_OPERAND (t
, 0))
6856 && integer_valued_real_p (TREE_OPERAND (t
, 1));
6859 return integer_valued_real_p (TREE_OPERAND (t
, 1))
6860 && integer_valued_real_p (TREE_OPERAND (t
, 2));
6863 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
6867 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
6868 if (TREE_CODE (type
) == INTEGER_TYPE
)
6870 if (TREE_CODE (type
) == REAL_TYPE
)
6871 return integer_valued_real_p (TREE_OPERAND (t
, 0));
6876 switch (builtin_mathfn_code (t
))
6878 CASE_FLT_FN (BUILT_IN_CEIL
):
6879 CASE_FLT_FN (BUILT_IN_FLOOR
):
6880 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
6881 CASE_FLT_FN (BUILT_IN_RINT
):
6882 CASE_FLT_FN (BUILT_IN_ROUND
):
6883 CASE_FLT_FN (BUILT_IN_TRUNC
):
6886 CASE_FLT_FN (BUILT_IN_FMIN
):
6887 CASE_FLT_FN (BUILT_IN_FMAX
):
6888 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
6889 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
6902 /* FNDECL is assumed to be a builtin where truncation can be propagated
6903 across (for instance floor((double)f) == (double)floorf (f).
6904 Do the transformation for a call with argument ARG. */
6907 fold_trunc_transparent_mathfn (location_t loc
, tree fndecl
, tree arg
)
6909 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6911 if (!validate_arg (arg
, REAL_TYPE
))
6914 /* Integer rounding functions are idempotent. */
6915 if (fcode
== builtin_mathfn_code (arg
))
6918 /* If argument is already integer valued, and we don't need to worry
6919 about setting errno, there's no need to perform rounding. */
6920 if (! flag_errno_math
&& integer_valued_real_p (arg
))
6925 tree arg0
= strip_float_extensions (arg
);
6926 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
6927 tree newtype
= TREE_TYPE (arg0
);
6930 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
6931 && (decl
= mathfn_built_in (newtype
, fcode
)))
6932 return fold_convert_loc (loc
, ftype
,
6933 build_call_expr_loc (loc
, decl
, 1,
6934 fold_convert_loc (loc
,
6941 /* FNDECL is assumed to be builtin which can narrow the FP type of
6942 the argument, for instance lround((double)f) -> lroundf (f).
6943 Do the transformation for a call with argument ARG. */
6946 fold_fixed_mathfn (location_t loc
, tree fndecl
, tree arg
)
6948 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6950 if (!validate_arg (arg
, REAL_TYPE
))
6953 /* If argument is already integer valued, and we don't need to worry
6954 about setting errno, there's no need to perform rounding. */
6955 if (! flag_errno_math
&& integer_valued_real_p (arg
))
6956 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
6957 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
6961 tree ftype
= TREE_TYPE (arg
);
6962 tree arg0
= strip_float_extensions (arg
);
6963 tree newtype
= TREE_TYPE (arg0
);
6966 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
6967 && (decl
= mathfn_built_in (newtype
, fcode
)))
6968 return build_call_expr_loc (loc
, decl
, 1,
6969 fold_convert_loc (loc
, newtype
, arg0
));
6972 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6973 sizeof (long long) == sizeof (long). */
6974 if (TYPE_PRECISION (long_long_integer_type_node
)
6975 == TYPE_PRECISION (long_integer_type_node
))
6977 tree newfn
= NULL_TREE
;
6980 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6981 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
6984 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6985 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
6988 CASE_FLT_FN (BUILT_IN_LLROUND
):
6989 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
6992 CASE_FLT_FN (BUILT_IN_LLRINT
):
6993 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7002 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7003 return fold_convert_loc (loc
,
7004 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7011 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7012 return type. Return NULL_TREE if no simplification can be made. */
7015 fold_builtin_cabs (location_t loc
, tree arg
, tree type
, tree fndecl
)
7019 if (!validate_arg (arg
, COMPLEX_TYPE
)
7020 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7023 /* Calculate the result when the argument is a constant. */
7024 if (TREE_CODE (arg
) == COMPLEX_CST
7025 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7029 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7031 tree real
= TREE_OPERAND (arg
, 0);
7032 tree imag
= TREE_OPERAND (arg
, 1);
7034 /* If either part is zero, cabs is fabs of the other. */
7035 if (real_zerop (real
))
7036 return fold_build1_loc (loc
, ABS_EXPR
, type
, imag
);
7037 if (real_zerop (imag
))
7038 return fold_build1_loc (loc
, ABS_EXPR
, type
, real
);
7040 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7041 if (flag_unsafe_math_optimizations
7042 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7044 const REAL_VALUE_TYPE sqrt2_trunc
7045 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
7047 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7048 fold_build1_loc (loc
, ABS_EXPR
, type
, real
),
7049 build_real (type
, sqrt2_trunc
));
7053 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7054 if (TREE_CODE (arg
) == NEGATE_EXPR
7055 || TREE_CODE (arg
) == CONJ_EXPR
)
7056 return build_call_expr_loc (loc
, fndecl
, 1, TREE_OPERAND (arg
, 0));
7058 /* Don't do this when optimizing for size. */
7059 if (flag_unsafe_math_optimizations
7060 && optimize
&& optimize_function_for_speed_p (cfun
))
7062 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7064 if (sqrtfn
!= NULL_TREE
)
7066 tree rpart
, ipart
, result
;
7068 arg
= builtin_save_expr (arg
);
7070 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, type
, arg
);
7071 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg
);
7073 rpart
= builtin_save_expr (rpart
);
7074 ipart
= builtin_save_expr (ipart
);
7076 result
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
7077 fold_build2_loc (loc
, MULT_EXPR
, type
,
7079 fold_build2_loc (loc
, MULT_EXPR
, type
,
7082 return build_call_expr_loc (loc
, sqrtfn
, 1, result
);
7089 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7090 complex tree type of the result. If NEG is true, the imaginary
7091 zero is negative. */
7094 build_complex_cproj (tree type
, bool neg
)
7096 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
7100 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
7101 build_real (TREE_TYPE (type
), rzero
));
7104 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7105 return type. Return NULL_TREE if no simplification can be made. */
7108 fold_builtin_cproj (location_t loc
, tree arg
, tree type
)
7110 if (!validate_arg (arg
, COMPLEX_TYPE
)
7111 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7114 /* If there are no infinities, return arg. */
7115 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type
))))
7116 return non_lvalue_loc (loc
, arg
);
7118 /* Calculate the result when the argument is a constant. */
7119 if (TREE_CODE (arg
) == COMPLEX_CST
)
7121 const REAL_VALUE_TYPE
*real
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
7122 const REAL_VALUE_TYPE
*imag
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
7124 if (real_isinf (real
) || real_isinf (imag
))
7125 return build_complex_cproj (type
, imag
->sign
);
7129 else if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7131 tree real
= TREE_OPERAND (arg
, 0);
7132 tree imag
= TREE_OPERAND (arg
, 1);
7137 /* If the real part is inf and the imag part is known to be
7138 nonnegative, return (inf + 0i). Remember side-effects are
7139 possible in the imag part. */
7140 if (TREE_CODE (real
) == REAL_CST
7141 && real_isinf (TREE_REAL_CST_PTR (real
))
7142 && tree_expr_nonnegative_p (imag
))
7143 return omit_one_operand_loc (loc
, type
,
7144 build_complex_cproj (type
, false),
7147 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7148 Remember side-effects are possible in the real part. */
7149 if (TREE_CODE (imag
) == REAL_CST
7150 && real_isinf (TREE_REAL_CST_PTR (imag
)))
7152 omit_one_operand_loc (loc
, type
,
7153 build_complex_cproj (type
, TREE_REAL_CST_PTR
7154 (imag
)->sign
), arg
);
7160 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7161 Return NULL_TREE if no simplification can be made. */
7164 fold_builtin_sqrt (location_t loc
, tree arg
, tree type
)
7167 enum built_in_function fcode
;
7170 if (!validate_arg (arg
, REAL_TYPE
))
7173 /* Calculate the result when the argument is a constant. */
7174 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7177 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7178 fcode
= builtin_mathfn_code (arg
);
7179 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7181 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7182 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7183 CALL_EXPR_ARG (arg
, 0),
7184 build_real (type
, dconsthalf
));
7185 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7188 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7189 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7191 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7195 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7197 /* The inner root was either sqrt or cbrt. */
7198 /* This was a conditional expression but it triggered a bug
7200 REAL_VALUE_TYPE dconstroot
;
7201 if (BUILTIN_SQRT_P (fcode
))
7202 dconstroot
= dconsthalf
;
7204 dconstroot
= dconst_third ();
7206 /* Adjust for the outer root. */
7207 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7208 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7209 tree_root
= build_real (type
, dconstroot
);
7210 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7214 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7215 if (flag_unsafe_math_optimizations
7216 && (fcode
== BUILT_IN_POW
7217 || fcode
== BUILT_IN_POWF
7218 || fcode
== BUILT_IN_POWL
))
7220 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7221 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7222 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7224 if (!tree_expr_nonnegative_p (arg0
))
7225 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7226 narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
7227 build_real (type
, dconsthalf
));
7228 return build_call_expr_loc (loc
, powfn
, 2, arg0
, narg1
);
7234 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7235 Return NULL_TREE if no simplification can be made. */
7238 fold_builtin_cbrt (location_t loc
, tree arg
, tree type
)
7240 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7243 if (!validate_arg (arg
, REAL_TYPE
))
7246 /* Calculate the result when the argument is a constant. */
7247 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7250 if (flag_unsafe_math_optimizations
)
7252 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7253 if (BUILTIN_EXPONENT_P (fcode
))
7255 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7256 const REAL_VALUE_TYPE third_trunc
=
7257 real_value_truncate (TYPE_MODE (type
), dconst_third ());
7258 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7259 CALL_EXPR_ARG (arg
, 0),
7260 build_real (type
, third_trunc
));
7261 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7264 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7265 if (BUILTIN_SQRT_P (fcode
))
7267 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7271 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7273 REAL_VALUE_TYPE dconstroot
= dconst_third ();
7275 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7276 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7277 tree_root
= build_real (type
, dconstroot
);
7278 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7282 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7283 if (BUILTIN_CBRT_P (fcode
))
7285 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7286 if (tree_expr_nonnegative_p (arg0
))
7288 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7293 REAL_VALUE_TYPE dconstroot
;
7295 real_arithmetic (&dconstroot
, MULT_EXPR
,
7296 dconst_third_ptr (), dconst_third_ptr ());
7297 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7298 tree_root
= build_real (type
, dconstroot
);
7299 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7304 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7305 if (fcode
== BUILT_IN_POW
7306 || fcode
== BUILT_IN_POWF
7307 || fcode
== BUILT_IN_POWL
)
7309 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7310 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7311 if (tree_expr_nonnegative_p (arg00
))
7313 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7314 const REAL_VALUE_TYPE dconstroot
7315 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
7316 tree narg01
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
,
7317 build_real (type
, dconstroot
));
7318 return build_call_expr_loc (loc
, powfn
, 2, arg00
, narg01
);
7325 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7326 TYPE is the type of the return value. Return NULL_TREE if no
7327 simplification can be made. */
7330 fold_builtin_cos (location_t loc
,
7331 tree arg
, tree type
, tree fndecl
)
7335 if (!validate_arg (arg
, REAL_TYPE
))
7338 /* Calculate the result when the argument is a constant. */
7339 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7342 /* Optimize cos(-x) into cos (x). */
7343 if ((narg
= fold_strip_sign_ops (arg
)))
7344 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7349 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7350 Return NULL_TREE if no simplification can be made. */
7353 fold_builtin_cosh (location_t loc
, tree arg
, tree type
, tree fndecl
)
7355 if (validate_arg (arg
, REAL_TYPE
))
7359 /* Calculate the result when the argument is a constant. */
7360 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7363 /* Optimize cosh(-x) into cosh (x). */
7364 if ((narg
= fold_strip_sign_ops (arg
)))
7365 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7371 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7372 argument ARG. TYPE is the type of the return value. Return
7373 NULL_TREE if no simplification can be made. */
7376 fold_builtin_ccos (location_t loc
, tree arg
, tree type
, tree fndecl
,
7379 if (validate_arg (arg
, COMPLEX_TYPE
)
7380 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
7384 /* Calculate the result when the argument is a constant. */
7385 if ((tmp
= do_mpc_arg1 (arg
, type
, (hyper
? mpc_cosh
: mpc_cos
))))
7388 /* Optimize fn(-x) into fn(x). */
7389 if ((tmp
= fold_strip_sign_ops (arg
)))
7390 return build_call_expr_loc (loc
, fndecl
, 1, tmp
);
7396 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7397 Return NULL_TREE if no simplification can be made. */
7400 fold_builtin_tan (tree arg
, tree type
)
7402 enum built_in_function fcode
;
7405 if (!validate_arg (arg
, REAL_TYPE
))
7408 /* Calculate the result when the argument is a constant. */
7409 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
7412 /* Optimize tan(atan(x)) = x. */
7413 fcode
= builtin_mathfn_code (arg
);
7414 if (flag_unsafe_math_optimizations
7415 && (fcode
== BUILT_IN_ATAN
7416 || fcode
== BUILT_IN_ATANF
7417 || fcode
== BUILT_IN_ATANL
))
7418 return CALL_EXPR_ARG (arg
, 0);
7423 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7424 NULL_TREE if no simplification can be made. */
7427 fold_builtin_sincos (location_t loc
,
7428 tree arg0
, tree arg1
, tree arg2
)
7433 if (!validate_arg (arg0
, REAL_TYPE
)
7434 || !validate_arg (arg1
, POINTER_TYPE
)
7435 || !validate_arg (arg2
, POINTER_TYPE
))
7438 type
= TREE_TYPE (arg0
);
7440 /* Calculate the result when the argument is a constant. */
7441 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7444 /* Canonicalize sincos to cexpi. */
7445 if (!TARGET_C99_FUNCTIONS
)
7447 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
7451 call
= build_call_expr_loc (loc
, fn
, 1, arg0
);
7452 call
= builtin_save_expr (call
);
7454 return build2 (COMPOUND_EXPR
, void_type_node
,
7455 build2 (MODIFY_EXPR
, void_type_node
,
7456 build_fold_indirect_ref_loc (loc
, arg1
),
7457 build1 (IMAGPART_EXPR
, type
, call
)),
7458 build2 (MODIFY_EXPR
, void_type_node
,
7459 build_fold_indirect_ref_loc (loc
, arg2
),
7460 build1 (REALPART_EXPR
, type
, call
)));
7463 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7464 NULL_TREE if no simplification can be made. */
7467 fold_builtin_cexp (location_t loc
, tree arg0
, tree type
)
7470 tree realp
, imagp
, ifn
;
7473 if (!validate_arg (arg0
, COMPLEX_TYPE
)
7474 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) != REAL_TYPE
)
7477 /* Calculate the result when the argument is a constant. */
7478 if ((res
= do_mpc_arg1 (arg0
, type
, mpc_exp
)))
7481 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
7483 /* In case we can figure out the real part of arg0 and it is constant zero
7485 if (!TARGET_C99_FUNCTIONS
)
7487 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
7491 if ((realp
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
))
7492 && real_zerop (realp
))
7494 tree narg
= fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7495 return build_call_expr_loc (loc
, ifn
, 1, narg
);
7498 /* In case we can easily decompose real and imaginary parts split cexp
7499 to exp (r) * cexpi (i). */
7500 if (flag_unsafe_math_optimizations
7503 tree rfn
, rcall
, icall
;
7505 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
7509 imagp
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7513 icall
= build_call_expr_loc (loc
, ifn
, 1, imagp
);
7514 icall
= builtin_save_expr (icall
);
7515 rcall
= build_call_expr_loc (loc
, rfn
, 1, realp
);
7516 rcall
= builtin_save_expr (rcall
);
7517 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
7518 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7520 fold_build1_loc (loc
, REALPART_EXPR
,
7522 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7524 fold_build1_loc (loc
, IMAGPART_EXPR
,
7531 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7532 Return NULL_TREE if no simplification can be made. */
7535 fold_builtin_trunc (location_t loc
, tree fndecl
, tree arg
)
7537 if (!validate_arg (arg
, REAL_TYPE
))
7540 /* Optimize trunc of constant value. */
7541 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7543 REAL_VALUE_TYPE r
, x
;
7544 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7546 x
= TREE_REAL_CST (arg
);
7547 real_trunc (&r
, TYPE_MODE (type
), &x
);
7548 return build_real (type
, r
);
7551 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7554 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7555 Return NULL_TREE if no simplification can be made. */
7558 fold_builtin_floor (location_t loc
, tree fndecl
, tree arg
)
7560 if (!validate_arg (arg
, REAL_TYPE
))
7563 /* Optimize floor of constant value. */
7564 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7568 x
= TREE_REAL_CST (arg
);
7569 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7571 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7574 real_floor (&r
, TYPE_MODE (type
), &x
);
7575 return build_real (type
, r
);
7579 /* Fold floor (x) where x is nonnegative to trunc (x). */
7580 if (tree_expr_nonnegative_p (arg
))
7582 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
7584 return build_call_expr_loc (loc
, truncfn
, 1, arg
);
7587 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7590 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7591 Return NULL_TREE if no simplification can be made. */
7594 fold_builtin_ceil (location_t loc
, tree fndecl
, tree arg
)
7596 if (!validate_arg (arg
, REAL_TYPE
))
7599 /* Optimize ceil of constant value. */
7600 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7604 x
= TREE_REAL_CST (arg
);
7605 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7607 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7610 real_ceil (&r
, TYPE_MODE (type
), &x
);
7611 return build_real (type
, r
);
7615 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7618 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7619 Return NULL_TREE if no simplification can be made. */
7622 fold_builtin_round (location_t loc
, tree fndecl
, tree arg
)
7624 if (!validate_arg (arg
, REAL_TYPE
))
7627 /* Optimize round of constant value. */
7628 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7632 x
= TREE_REAL_CST (arg
);
7633 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7635 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7638 real_round (&r
, TYPE_MODE (type
), &x
);
7639 return build_real (type
, r
);
7643 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7646 /* Fold function call to builtin lround, lroundf or lroundl (or the
7647 corresponding long long versions) and other rounding functions. ARG
7648 is the argument to the call. Return NULL_TREE if no simplification
7652 fold_builtin_int_roundingfn (location_t loc
, tree fndecl
, tree arg
)
7654 if (!validate_arg (arg
, REAL_TYPE
))
7657 /* Optimize lround of constant value. */
7658 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7660 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
7662 if (real_isfinite (&x
))
7664 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
7665 tree ftype
= TREE_TYPE (arg
);
7669 switch (DECL_FUNCTION_CODE (fndecl
))
7671 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7672 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7673 real_floor (&r
, TYPE_MODE (ftype
), &x
);
7676 CASE_FLT_FN (BUILT_IN_LCEIL
):
7677 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7678 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
7681 CASE_FLT_FN (BUILT_IN_LROUND
):
7682 CASE_FLT_FN (BUILT_IN_LLROUND
):
7683 real_round (&r
, TYPE_MODE (ftype
), &x
);
7690 real_to_integer2 ((HOST_WIDE_INT
*)&val
.low
, &val
.high
, &r
);
7691 if (double_int_fits_to_tree_p (itype
, val
))
7692 return double_int_to_tree (itype
, val
);
7696 switch (DECL_FUNCTION_CODE (fndecl
))
7698 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7699 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7700 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7701 if (tree_expr_nonnegative_p (arg
))
7702 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7703 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7708 return fold_fixed_mathfn (loc
, fndecl
, arg
);
7711 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7712 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7713 the argument to the call. Return NULL_TREE if no simplification can
7717 fold_builtin_bitop (tree fndecl
, tree arg
)
7719 if (!validate_arg (arg
, INTEGER_TYPE
))
7722 /* Optimize for constant argument. */
7723 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
7725 HOST_WIDE_INT hi
, width
, result
;
7726 unsigned HOST_WIDE_INT lo
;
7729 type
= TREE_TYPE (arg
);
7730 width
= TYPE_PRECISION (type
);
7731 lo
= TREE_INT_CST_LOW (arg
);
7733 /* Clear all the bits that are beyond the type's precision. */
7734 if (width
> HOST_BITS_PER_WIDE_INT
)
7736 hi
= TREE_INT_CST_HIGH (arg
);
7737 if (width
< 2 * HOST_BITS_PER_WIDE_INT
)
7738 hi
&= ~((HOST_WIDE_INT
) (-1) >> (width
- HOST_BITS_PER_WIDE_INT
));
7743 if (width
< HOST_BITS_PER_WIDE_INT
)
7744 lo
&= ~((unsigned HOST_WIDE_INT
) (-1) << width
);
7747 switch (DECL_FUNCTION_CODE (fndecl
))
7749 CASE_INT_FN (BUILT_IN_FFS
):
7751 result
= ffs_hwi (lo
);
7753 result
= HOST_BITS_PER_WIDE_INT
+ ffs_hwi (hi
);
7758 CASE_INT_FN (BUILT_IN_CLZ
):
7760 result
= width
- floor_log2 (hi
) - 1 - HOST_BITS_PER_WIDE_INT
;
7762 result
= width
- floor_log2 (lo
) - 1;
7763 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
7767 CASE_INT_FN (BUILT_IN_CTZ
):
7769 result
= ctz_hwi (lo
);
7771 result
= HOST_BITS_PER_WIDE_INT
+ ctz_hwi (hi
);
7772 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
7776 CASE_INT_FN (BUILT_IN_POPCOUNT
):
7779 result
++, lo
&= lo
- 1;
7781 result
++, hi
&= (unsigned HOST_WIDE_INT
) hi
- 1;
7784 CASE_INT_FN (BUILT_IN_PARITY
):
7787 result
++, lo
&= lo
- 1;
7789 result
++, hi
&= (unsigned HOST_WIDE_INT
) hi
- 1;
7797 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
7803 /* Fold function call to builtin_bswap and the long and long long
7804 variants. Return NULL_TREE if no simplification can be made. */
7806 fold_builtin_bswap (tree fndecl
, tree arg
)
7808 if (! validate_arg (arg
, INTEGER_TYPE
))
7811 /* Optimize constant value. */
7812 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
7814 HOST_WIDE_INT hi
, width
, r_hi
= 0;
7815 unsigned HOST_WIDE_INT lo
, r_lo
= 0;
7818 type
= TREE_TYPE (arg
);
7819 width
= TYPE_PRECISION (type
);
7820 lo
= TREE_INT_CST_LOW (arg
);
7821 hi
= TREE_INT_CST_HIGH (arg
);
7823 switch (DECL_FUNCTION_CODE (fndecl
))
7825 case BUILT_IN_BSWAP32
:
7826 case BUILT_IN_BSWAP64
:
7830 for (s
= 0; s
< width
; s
+= 8)
7832 int d
= width
- s
- 8;
7833 unsigned HOST_WIDE_INT byte
;
7835 if (s
< HOST_BITS_PER_WIDE_INT
)
7836 byte
= (lo
>> s
) & 0xff;
7838 byte
= (hi
>> (s
- HOST_BITS_PER_WIDE_INT
)) & 0xff;
7840 if (d
< HOST_BITS_PER_WIDE_INT
)
7843 r_hi
|= byte
<< (d
- HOST_BITS_PER_WIDE_INT
);
7853 if (width
< HOST_BITS_PER_WIDE_INT
)
7854 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), r_lo
);
7856 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl
)), r_lo
, r_hi
);
7862 /* A subroutine of fold_builtin to fold the various logarithmic
7863 functions. Return NULL_TREE if no simplification can me made.
7864 FUNC is the corresponding MPFR logarithm function. */
7867 fold_builtin_logarithm (location_t loc
, tree fndecl
, tree arg
,
7868 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
7870 if (validate_arg (arg
, REAL_TYPE
))
7872 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7874 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7876 /* Calculate the result when the argument is a constant. */
7877 if ((res
= do_mpfr_arg1 (arg
, type
, func
, &dconst0
, NULL
, false)))
7880 /* Special case, optimize logN(expN(x)) = x. */
7881 if (flag_unsafe_math_optimizations
7882 && ((func
== mpfr_log
7883 && (fcode
== BUILT_IN_EXP
7884 || fcode
== BUILT_IN_EXPF
7885 || fcode
== BUILT_IN_EXPL
))
7886 || (func
== mpfr_log2
7887 && (fcode
== BUILT_IN_EXP2
7888 || fcode
== BUILT_IN_EXP2F
7889 || fcode
== BUILT_IN_EXP2L
))
7890 || (func
== mpfr_log10
&& (BUILTIN_EXP10_P (fcode
)))))
7891 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
7893 /* Optimize logN(func()) for various exponential functions. We
7894 want to determine the value "x" and the power "exponent" in
7895 order to transform logN(x**exponent) into exponent*logN(x). */
7896 if (flag_unsafe_math_optimizations
)
7898 tree exponent
= 0, x
= 0;
7902 CASE_FLT_FN (BUILT_IN_EXP
):
7903 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7904 x
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
7906 exponent
= CALL_EXPR_ARG (arg
, 0);
7908 CASE_FLT_FN (BUILT_IN_EXP2
):
7909 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7910 x
= build_real (type
, dconst2
);
7911 exponent
= CALL_EXPR_ARG (arg
, 0);
7913 CASE_FLT_FN (BUILT_IN_EXP10
):
7914 CASE_FLT_FN (BUILT_IN_POW10
):
7915 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7917 REAL_VALUE_TYPE dconst10
;
7918 real_from_integer (&dconst10
, VOIDmode
, 10, 0, 0);
7919 x
= build_real (type
, dconst10
);
7921 exponent
= CALL_EXPR_ARG (arg
, 0);
7923 CASE_FLT_FN (BUILT_IN_SQRT
):
7924 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7925 x
= CALL_EXPR_ARG (arg
, 0);
7926 exponent
= build_real (type
, dconsthalf
);
7928 CASE_FLT_FN (BUILT_IN_CBRT
):
7929 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7930 x
= CALL_EXPR_ARG (arg
, 0);
7931 exponent
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
7934 CASE_FLT_FN (BUILT_IN_POW
):
7935 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7936 x
= CALL_EXPR_ARG (arg
, 0);
7937 exponent
= CALL_EXPR_ARG (arg
, 1);
7943 /* Now perform the optimization. */
7946 tree logfn
= build_call_expr_loc (loc
, fndecl
, 1, x
);
7947 return fold_build2_loc (loc
, MULT_EXPR
, type
, exponent
, logfn
);
7955 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7956 NULL_TREE if no simplification can be made. */
7959 fold_builtin_hypot (location_t loc
, tree fndecl
,
7960 tree arg0
, tree arg1
, tree type
)
7962 tree res
, narg0
, narg1
;
7964 if (!validate_arg (arg0
, REAL_TYPE
)
7965 || !validate_arg (arg1
, REAL_TYPE
))
7968 /* Calculate the result when the argument is a constant. */
7969 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
7972 /* If either argument to hypot has a negate or abs, strip that off.
7973 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7974 narg0
= fold_strip_sign_ops (arg0
);
7975 narg1
= fold_strip_sign_ops (arg1
);
7978 return build_call_expr_loc (loc
, fndecl
, 2, narg0
? narg0
: arg0
,
7979 narg1
? narg1
: arg1
);
7982 /* If either argument is zero, hypot is fabs of the other. */
7983 if (real_zerop (arg0
))
7984 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
);
7985 else if (real_zerop (arg1
))
7986 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
);
7988 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7989 if (flag_unsafe_math_optimizations
7990 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
7992 const REAL_VALUE_TYPE sqrt2_trunc
7993 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
7994 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7995 fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
),
7996 build_real (type
, sqrt2_trunc
));
8003 /* Fold a builtin function call to pow, powf, or powl. Return
8004 NULL_TREE if no simplification can be made. */
8006 fold_builtin_pow (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, tree type
)
8010 if (!validate_arg (arg0
, REAL_TYPE
)
8011 || !validate_arg (arg1
, REAL_TYPE
))
8014 /* Calculate the result when the argument is a constant. */
8015 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8018 /* Optimize pow(1.0,y) = 1.0. */
8019 if (real_onep (arg0
))
8020 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8022 if (TREE_CODE (arg1
) == REAL_CST
8023 && !TREE_OVERFLOW (arg1
))
8025 REAL_VALUE_TYPE cint
;
8029 c
= TREE_REAL_CST (arg1
);
8031 /* Optimize pow(x,0.0) = 1.0. */
8032 if (REAL_VALUES_EQUAL (c
, dconst0
))
8033 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8036 /* Optimize pow(x,1.0) = x. */
8037 if (REAL_VALUES_EQUAL (c
, dconst1
))
8040 /* Optimize pow(x,-1.0) = 1.0/x. */
8041 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8042 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8043 build_real (type
, dconst1
), arg0
);
8045 /* Optimize pow(x,0.5) = sqrt(x). */
8046 if (flag_unsafe_math_optimizations
8047 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8049 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8051 if (sqrtfn
!= NULL_TREE
)
8052 return build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
8055 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8056 if (flag_unsafe_math_optimizations
)
8058 const REAL_VALUE_TYPE dconstroot
8059 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8061 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8063 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8064 if (cbrtfn
!= NULL_TREE
)
8065 return build_call_expr_loc (loc
, cbrtfn
, 1, arg0
);
8069 /* Check for an integer exponent. */
8070 n
= real_to_integer (&c
);
8071 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
8072 if (real_identical (&c
, &cint
))
8074 /* Attempt to evaluate pow at compile-time, unless this should
8075 raise an exception. */
8076 if (TREE_CODE (arg0
) == REAL_CST
8077 && !TREE_OVERFLOW (arg0
)
8079 || (!flag_trapping_math
&& !flag_errno_math
)
8080 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
), dconst0
)))
8085 x
= TREE_REAL_CST (arg0
);
8086 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8087 if (flag_unsafe_math_optimizations
|| !inexact
)
8088 return build_real (type
, x
);
8091 /* Strip sign ops from even integer powers. */
8092 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8094 tree narg0
= fold_strip_sign_ops (arg0
);
8096 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, arg1
);
8101 if (flag_unsafe_math_optimizations
)
8103 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8105 /* Optimize pow(expN(x),y) = expN(x*y). */
8106 if (BUILTIN_EXPONENT_P (fcode
))
8108 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8109 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8110 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg
, arg1
);
8111 return build_call_expr_loc (loc
, expfn
, 1, arg
);
8114 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8115 if (BUILTIN_SQRT_P (fcode
))
8117 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8118 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8119 build_real (type
, dconsthalf
));
8120 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, narg1
);
8123 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8124 if (BUILTIN_CBRT_P (fcode
))
8126 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8127 if (tree_expr_nonnegative_p (arg
))
8129 const REAL_VALUE_TYPE dconstroot
8130 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8131 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8132 build_real (type
, dconstroot
));
8133 return build_call_expr_loc (loc
, fndecl
, 2, arg
, narg1
);
8137 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8138 if (fcode
== BUILT_IN_POW
8139 || fcode
== BUILT_IN_POWF
8140 || fcode
== BUILT_IN_POWL
)
8142 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8143 if (tree_expr_nonnegative_p (arg00
))
8145 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8146 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
, arg1
);
8147 return build_call_expr_loc (loc
, fndecl
, 2, arg00
, narg1
);
8155 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8156 Return NULL_TREE if no simplification can be made. */
8158 fold_builtin_powi (location_t loc
, tree fndecl ATTRIBUTE_UNUSED
,
8159 tree arg0
, tree arg1
, tree type
)
8161 if (!validate_arg (arg0
, REAL_TYPE
)
8162 || !validate_arg (arg1
, INTEGER_TYPE
))
8165 /* Optimize pow(1.0,y) = 1.0. */
8166 if (real_onep (arg0
))
8167 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8169 if (host_integerp (arg1
, 0))
8171 HOST_WIDE_INT c
= TREE_INT_CST_LOW (arg1
);
8173 /* Evaluate powi at compile-time. */
8174 if (TREE_CODE (arg0
) == REAL_CST
8175 && !TREE_OVERFLOW (arg0
))
8178 x
= TREE_REAL_CST (arg0
);
8179 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8180 return build_real (type
, x
);
8183 /* Optimize pow(x,0) = 1.0. */
8185 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8188 /* Optimize pow(x,1) = x. */
8192 /* Optimize pow(x,-1) = 1.0/x. */
8194 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8195 build_real (type
, dconst1
), arg0
);
8201 /* A subroutine of fold_builtin to fold the various exponent
8202 functions. Return NULL_TREE if no simplification can be made.
8203 FUNC is the corresponding MPFR exponent function. */
8206 fold_builtin_exponent (location_t loc
, tree fndecl
, tree arg
,
8207 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8209 if (validate_arg (arg
, REAL_TYPE
))
8211 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8214 /* Calculate the result when the argument is a constant. */
8215 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8218 /* Optimize expN(logN(x)) = x. */
8219 if (flag_unsafe_math_optimizations
)
8221 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8223 if ((func
== mpfr_exp
8224 && (fcode
== BUILT_IN_LOG
8225 || fcode
== BUILT_IN_LOGF
8226 || fcode
== BUILT_IN_LOGL
))
8227 || (func
== mpfr_exp2
8228 && (fcode
== BUILT_IN_LOG2
8229 || fcode
== BUILT_IN_LOG2F
8230 || fcode
== BUILT_IN_LOG2L
))
8231 || (func
== mpfr_exp10
8232 && (fcode
== BUILT_IN_LOG10
8233 || fcode
== BUILT_IN_LOG10F
8234 || fcode
== BUILT_IN_LOG10L
)))
8235 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8242 /* Return true if VAR is a VAR_DECL or a component thereof. */
8245 var_decl_component_p (tree var
)
8248 while (handled_component_p (inner
))
8249 inner
= TREE_OPERAND (inner
, 0);
8250 return SSA_VAR_P (inner
);
8253 /* Fold function call to builtin memset. Return
8254 NULL_TREE if no simplification can be made. */
8257 fold_builtin_memset (location_t loc
, tree dest
, tree c
, tree len
,
8258 tree type
, bool ignore
)
8260 tree var
, ret
, etype
;
8261 unsigned HOST_WIDE_INT length
, cval
;
8263 if (! validate_arg (dest
, POINTER_TYPE
)
8264 || ! validate_arg (c
, INTEGER_TYPE
)
8265 || ! validate_arg (len
, INTEGER_TYPE
))
8268 if (! host_integerp (len
, 1))
8271 /* If the LEN parameter is zero, return DEST. */
8272 if (integer_zerop (len
))
8273 return omit_one_operand_loc (loc
, type
, dest
, c
);
8275 if (! host_integerp (c
, 1) || TREE_SIDE_EFFECTS (dest
))
8280 if (TREE_CODE (var
) != ADDR_EXPR
)
8283 var
= TREE_OPERAND (var
, 0);
8284 if (TREE_THIS_VOLATILE (var
))
8287 etype
= TREE_TYPE (var
);
8288 if (TREE_CODE (etype
) == ARRAY_TYPE
)
8289 etype
= TREE_TYPE (etype
);
8291 if (!INTEGRAL_TYPE_P (etype
)
8292 && !POINTER_TYPE_P (etype
))
8295 if (! var_decl_component_p (var
))
8298 length
= tree_low_cst (len
, 1);
8299 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
8300 || get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
8304 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
8307 if (integer_zerop (c
))
8311 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
8314 cval
= tree_low_cst (c
, 1);
8318 cval
|= (cval
<< 31) << 1;
8321 ret
= build_int_cst_type (etype
, cval
);
8322 var
= build_fold_indirect_ref_loc (loc
,
8323 fold_convert_loc (loc
,
8324 build_pointer_type (etype
),
8326 ret
= build2 (MODIFY_EXPR
, etype
, var
, ret
);
8330 return omit_one_operand_loc (loc
, type
, dest
, ret
);
8333 /* Fold function call to builtin memset. Return
8334 NULL_TREE if no simplification can be made. */
8337 fold_builtin_bzero (location_t loc
, tree dest
, tree size
, bool ignore
)
8339 if (! validate_arg (dest
, POINTER_TYPE
)
8340 || ! validate_arg (size
, INTEGER_TYPE
))
8346 /* New argument list transforming bzero(ptr x, int y) to
8347 memset(ptr x, int 0, size_t y). This is done this way
8348 so that if it isn't expanded inline, we fallback to
8349 calling bzero instead of memset. */
8351 return fold_builtin_memset (loc
, dest
, integer_zero_node
,
8352 fold_convert_loc (loc
, sizetype
, size
),
8353 void_type_node
, ignore
);
8356 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8357 NULL_TREE if no simplification can be made.
8358 If ENDP is 0, return DEST (like memcpy).
8359 If ENDP is 1, return DEST+LEN (like mempcpy).
8360 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8361 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8365 fold_builtin_memory_op (location_t loc
, tree dest
, tree src
,
8366 tree len
, tree type
, bool ignore
, int endp
)
8368 tree destvar
, srcvar
, expr
;
8370 if (! validate_arg (dest
, POINTER_TYPE
)
8371 || ! validate_arg (src
, POINTER_TYPE
)
8372 || ! validate_arg (len
, INTEGER_TYPE
))
8375 /* If the LEN parameter is zero, return DEST. */
8376 if (integer_zerop (len
))
8377 return omit_one_operand_loc (loc
, type
, dest
, src
);
8379 /* If SRC and DEST are the same (and not volatile), return
8380 DEST{,+LEN,+LEN-1}. */
8381 if (operand_equal_p (src
, dest
, 0))
8385 tree srctype
, desttype
;
8386 unsigned int src_align
, dest_align
;
8391 src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
8392 dest_align
= get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
8394 /* Both DEST and SRC must be pointer types.
8395 ??? This is what old code did. Is the testing for pointer types
8398 If either SRC is readonly or length is 1, we can use memcpy. */
8399 if (!dest_align
|| !src_align
)
8401 if (readonly_data_expr (src
)
8402 || (host_integerp (len
, 1)
8403 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
8404 >= (unsigned HOST_WIDE_INT
) tree_low_cst (len
, 1))))
8406 tree fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8409 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8412 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8413 if (TREE_CODE (src
) == ADDR_EXPR
8414 && TREE_CODE (dest
) == ADDR_EXPR
)
8416 tree src_base
, dest_base
, fn
;
8417 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
8418 HOST_WIDE_INT size
= -1;
8419 HOST_WIDE_INT maxsize
= -1;
8421 srcvar
= TREE_OPERAND (src
, 0);
8422 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
8424 destvar
= TREE_OPERAND (dest
, 0);
8425 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
8427 if (host_integerp (len
, 1))
8428 maxsize
= tree_low_cst (len
, 1);
8431 src_offset
/= BITS_PER_UNIT
;
8432 dest_offset
/= BITS_PER_UNIT
;
8433 if (SSA_VAR_P (src_base
)
8434 && SSA_VAR_P (dest_base
))
8436 if (operand_equal_p (src_base
, dest_base
, 0)
8437 && ranges_overlap_p (src_offset
, maxsize
,
8438 dest_offset
, maxsize
))
8441 else if (TREE_CODE (src_base
) == MEM_REF
8442 && TREE_CODE (dest_base
) == MEM_REF
)
8445 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
8446 TREE_OPERAND (dest_base
, 0), 0))
8448 off
= double_int_add (mem_ref_offset (src_base
),
8449 shwi_to_double_int (src_offset
));
8450 if (!double_int_fits_in_shwi_p (off
))
8452 src_offset
= off
.low
;
8453 off
= double_int_add (mem_ref_offset (dest_base
),
8454 shwi_to_double_int (dest_offset
));
8455 if (!double_int_fits_in_shwi_p (off
))
8457 dest_offset
= off
.low
;
8458 if (ranges_overlap_p (src_offset
, maxsize
,
8459 dest_offset
, maxsize
))
8465 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8468 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8473 if (!host_integerp (len
, 0))
8476 This logic lose for arguments like (type *)malloc (sizeof (type)),
8477 since we strip the casts of up to VOID return value from malloc.
8478 Perhaps we ought to inherit type from non-VOID argument here? */
8481 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8482 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
8484 tree tem
= TREE_OPERAND (src
, 0);
8486 if (tem
!= TREE_OPERAND (src
, 0))
8487 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
8489 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
8491 tree tem
= TREE_OPERAND (dest
, 0);
8493 if (tem
!= TREE_OPERAND (dest
, 0))
8494 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
8496 srctype
= TREE_TYPE (TREE_TYPE (src
));
8498 && TREE_CODE (srctype
) == ARRAY_TYPE
8499 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
8501 srctype
= TREE_TYPE (srctype
);
8503 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
8505 desttype
= TREE_TYPE (TREE_TYPE (dest
));
8507 && TREE_CODE (desttype
) == ARRAY_TYPE
8508 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8510 desttype
= TREE_TYPE (desttype
);
8512 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
8514 if (!srctype
|| !desttype
8515 || TREE_ADDRESSABLE (srctype
)
8516 || TREE_ADDRESSABLE (desttype
)
8517 || !TYPE_SIZE_UNIT (srctype
)
8518 || !TYPE_SIZE_UNIT (desttype
)
8519 || TREE_CODE (TYPE_SIZE_UNIT (srctype
)) != INTEGER_CST
8520 || TREE_CODE (TYPE_SIZE_UNIT (desttype
)) != INTEGER_CST
)
8523 src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
8524 dest_align
= get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
8525 if (dest_align
< TYPE_ALIGN (desttype
)
8526 || src_align
< TYPE_ALIGN (srctype
))
8530 dest
= builtin_save_expr (dest
);
8532 /* Build accesses at offset zero with a ref-all character type. */
8533 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
8534 ptr_mode
, true), 0);
8537 STRIP_NOPS (destvar
);
8538 if (TREE_CODE (destvar
) == ADDR_EXPR
8539 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
8540 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8541 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
8543 destvar
= NULL_TREE
;
8546 STRIP_NOPS (srcvar
);
8547 if (TREE_CODE (srcvar
) == ADDR_EXPR
8548 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
8549 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
)
8550 && (!STRICT_ALIGNMENT
8552 || src_align
>= TYPE_ALIGN (desttype
)))
8553 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
8558 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
8561 if (srcvar
== NULL_TREE
)
8563 if (STRICT_ALIGNMENT
8564 && src_align
< TYPE_ALIGN (desttype
))
8567 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
8569 else if (destvar
== NULL_TREE
)
8571 if (STRICT_ALIGNMENT
8572 && dest_align
< TYPE_ALIGN (srctype
))
8575 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
8578 expr
= build2 (MODIFY_EXPR
, TREE_TYPE (destvar
), destvar
, srcvar
);
8584 if (endp
== 0 || endp
== 3)
8585 return omit_one_operand_loc (loc
, type
, dest
, expr
);
8591 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
8594 len
= fold_convert_loc (loc
, sizetype
, len
);
8595 dest
= fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
8596 dest
= fold_convert_loc (loc
, type
, dest
);
8598 dest
= omit_one_operand_loc (loc
, type
, dest
, expr
);
8602 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8603 If LEN is not NULL, it represents the length of the string to be
8604 copied. Return NULL_TREE if no simplification can be made. */
8607 fold_builtin_strcpy (location_t loc
, tree fndecl
, tree dest
, tree src
, tree len
)
8611 if (!validate_arg (dest
, POINTER_TYPE
)
8612 || !validate_arg (src
, POINTER_TYPE
))
8615 /* If SRC and DEST are the same (and not volatile), return DEST. */
8616 if (operand_equal_p (src
, dest
, 0))
8617 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
8619 if (optimize_function_for_size_p (cfun
))
8622 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8628 len
= c_strlen (src
, 1);
8629 if (! len
|| TREE_SIDE_EFFECTS (len
))
8633 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
8634 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
8635 build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
));
8638 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8639 Return NULL_TREE if no simplification can be made. */
8642 fold_builtin_stpcpy (location_t loc
, tree fndecl
, tree dest
, tree src
)
8644 tree fn
, len
, lenp1
, call
, type
;
8646 if (!validate_arg (dest
, POINTER_TYPE
)
8647 || !validate_arg (src
, POINTER_TYPE
))
8650 len
= c_strlen (src
, 1);
8652 || TREE_CODE (len
) != INTEGER_CST
)
8655 if (optimize_function_for_size_p (cfun
)
8656 /* If length is zero it's small enough. */
8657 && !integer_zerop (len
))
8660 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8664 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
8665 /* We use dest twice in building our expression. Save it from
8666 multiple expansions. */
8667 dest
= builtin_save_expr (dest
);
8668 call
= build_call_expr_loc (loc
, fn
, 3, dest
, src
, lenp1
);
8670 type
= TREE_TYPE (TREE_TYPE (fndecl
));
8671 len
= fold_convert_loc (loc
, sizetype
, len
);
8672 dest
= fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
8673 dest
= fold_convert_loc (loc
, type
, dest
);
8674 dest
= omit_one_operand_loc (loc
, type
, dest
, call
);
8678 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8679 If SLEN is not NULL, it represents the length of the source string.
8680 Return NULL_TREE if no simplification can be made. */
8683 fold_builtin_strncpy (location_t loc
, tree fndecl
, tree dest
,
8684 tree src
, tree len
, tree slen
)
8688 if (!validate_arg (dest
, POINTER_TYPE
)
8689 || !validate_arg (src
, POINTER_TYPE
)
8690 || !validate_arg (len
, INTEGER_TYPE
))
8693 /* If the LEN parameter is zero, return DEST. */
8694 if (integer_zerop (len
))
8695 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
8697 /* We can't compare slen with len as constants below if len is not a
8699 if (len
== 0 || TREE_CODE (len
) != INTEGER_CST
)
8703 slen
= c_strlen (src
, 1);
8705 /* Now, we must be passed a constant src ptr parameter. */
8706 if (slen
== 0 || TREE_CODE (slen
) != INTEGER_CST
)
8709 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
8711 /* We do not support simplification of this case, though we do
8712 support it when expanding trees into RTL. */
8713 /* FIXME: generate a call to __builtin_memset. */
8714 if (tree_int_cst_lt (slen
, len
))
8717 /* OK transform into builtin memcpy. */
8718 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8721 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
8722 build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
));
8725 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8726 arguments to the call, and TYPE is its return type.
8727 Return NULL_TREE if no simplification can be made. */
8730 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
8732 if (!validate_arg (arg1
, POINTER_TYPE
)
8733 || !validate_arg (arg2
, INTEGER_TYPE
)
8734 || !validate_arg (len
, INTEGER_TYPE
))
8740 if (TREE_CODE (arg2
) != INTEGER_CST
8741 || !host_integerp (len
, 1))
8744 p1
= c_getstr (arg1
);
8745 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
8751 if (target_char_cast (arg2
, &c
))
8754 r
= (char *) memchr (p1
, c
, tree_low_cst (len
, 1));
8757 return build_int_cst (TREE_TYPE (arg1
), 0);
8759 tem
= fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (arg1
), arg1
,
8761 return fold_convert_loc (loc
, type
, tem
);
8767 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8768 Return NULL_TREE if no simplification can be made. */
8771 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8773 const char *p1
, *p2
;
8775 if (!validate_arg (arg1
, POINTER_TYPE
)
8776 || !validate_arg (arg2
, POINTER_TYPE
)
8777 || !validate_arg (len
, INTEGER_TYPE
))
8780 /* If the LEN parameter is zero, return zero. */
8781 if (integer_zerop (len
))
8782 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8785 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8786 if (operand_equal_p (arg1
, arg2
, 0))
8787 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8789 p1
= c_getstr (arg1
);
8790 p2
= c_getstr (arg2
);
8792 /* If all arguments are constant, and the value of len is not greater
8793 than the lengths of arg1 and arg2, evaluate at compile-time. */
8794 if (host_integerp (len
, 1) && p1
&& p2
8795 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
8796 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
8798 const int r
= memcmp (p1
, p2
, tree_low_cst (len
, 1));
8801 return integer_one_node
;
8803 return integer_minus_one_node
;
8805 return integer_zero_node
;
8808 /* If len parameter is one, return an expression corresponding to
8809 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8810 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
8812 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8813 tree cst_uchar_ptr_node
8814 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8817 = fold_convert_loc (loc
, integer_type_node
,
8818 build1 (INDIRECT_REF
, cst_uchar_node
,
8819 fold_convert_loc (loc
,
8823 = fold_convert_loc (loc
, integer_type_node
,
8824 build1 (INDIRECT_REF
, cst_uchar_node
,
8825 fold_convert_loc (loc
,
8828 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8834 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8835 Return NULL_TREE if no simplification can be made. */
8838 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
8840 const char *p1
, *p2
;
8842 if (!validate_arg (arg1
, POINTER_TYPE
)
8843 || !validate_arg (arg2
, POINTER_TYPE
))
8846 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8847 if (operand_equal_p (arg1
, arg2
, 0))
8848 return integer_zero_node
;
8850 p1
= c_getstr (arg1
);
8851 p2
= c_getstr (arg2
);
8855 const int i
= strcmp (p1
, p2
);
8857 return integer_minus_one_node
;
8859 return integer_one_node
;
8861 return integer_zero_node
;
8864 /* If the second arg is "", return *(const unsigned char*)arg1. */
8865 if (p2
&& *p2
== '\0')
8867 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8868 tree cst_uchar_ptr_node
8869 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8871 return fold_convert_loc (loc
, integer_type_node
,
8872 build1 (INDIRECT_REF
, cst_uchar_node
,
8873 fold_convert_loc (loc
,
8878 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8879 if (p1
&& *p1
== '\0')
8881 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8882 tree cst_uchar_ptr_node
8883 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8886 = fold_convert_loc (loc
, integer_type_node
,
8887 build1 (INDIRECT_REF
, cst_uchar_node
,
8888 fold_convert_loc (loc
,
8891 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8897 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8898 Return NULL_TREE if no simplification can be made. */
8901 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8903 const char *p1
, *p2
;
8905 if (!validate_arg (arg1
, POINTER_TYPE
)
8906 || !validate_arg (arg2
, POINTER_TYPE
)
8907 || !validate_arg (len
, INTEGER_TYPE
))
8910 /* If the LEN parameter is zero, return zero. */
8911 if (integer_zerop (len
))
8912 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8915 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8916 if (operand_equal_p (arg1
, arg2
, 0))
8917 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8919 p1
= c_getstr (arg1
);
8920 p2
= c_getstr (arg2
);
8922 if (host_integerp (len
, 1) && p1
&& p2
)
8924 const int i
= strncmp (p1
, p2
, tree_low_cst (len
, 1));
8926 return integer_one_node
;
8928 return integer_minus_one_node
;
8930 return integer_zero_node
;
8933 /* If the second arg is "", and the length is greater than zero,
8934 return *(const unsigned char*)arg1. */
8935 if (p2
&& *p2
== '\0'
8936 && TREE_CODE (len
) == INTEGER_CST
8937 && tree_int_cst_sgn (len
) == 1)
8939 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8940 tree cst_uchar_ptr_node
8941 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8943 return fold_convert_loc (loc
, integer_type_node
,
8944 build1 (INDIRECT_REF
, cst_uchar_node
,
8945 fold_convert_loc (loc
,
8950 /* If the first arg is "", and the length is greater than zero,
8951 return -*(const unsigned char*)arg2. */
8952 if (p1
&& *p1
== '\0'
8953 && TREE_CODE (len
) == INTEGER_CST
8954 && tree_int_cst_sgn (len
) == 1)
8956 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8957 tree cst_uchar_ptr_node
8958 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8960 tree temp
= fold_convert_loc (loc
, integer_type_node
,
8961 build1 (INDIRECT_REF
, cst_uchar_node
,
8962 fold_convert_loc (loc
,
8965 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8968 /* If len parameter is one, return an expression corresponding to
8969 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8970 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
8972 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8973 tree cst_uchar_ptr_node
8974 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8976 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
8977 build1 (INDIRECT_REF
, cst_uchar_node
,
8978 fold_convert_loc (loc
,
8981 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
8982 build1 (INDIRECT_REF
, cst_uchar_node
,
8983 fold_convert_loc (loc
,
8986 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8992 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8993 ARG. Return NULL_TREE if no simplification can be made. */
8996 fold_builtin_signbit (location_t loc
, tree arg
, tree type
)
9000 if (!validate_arg (arg
, REAL_TYPE
))
9003 /* If ARG is a compile-time constant, determine the result. */
9004 if (TREE_CODE (arg
) == REAL_CST
9005 && !TREE_OVERFLOW (arg
))
9009 c
= TREE_REAL_CST (arg
);
9010 temp
= REAL_VALUE_NEGATIVE (c
) ? integer_one_node
: integer_zero_node
;
9011 return fold_convert_loc (loc
, type
, temp
);
9014 /* If ARG is non-negative, the result is always zero. */
9015 if (tree_expr_nonnegative_p (arg
))
9016 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9018 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9019 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg
))))
9020 return fold_build2_loc (loc
, LT_EXPR
, type
, arg
,
9021 build_real (TREE_TYPE (arg
), dconst0
));
9026 /* Fold function call to builtin copysign, copysignf or copysignl with
9027 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9031 fold_builtin_copysign (location_t loc
, tree fndecl
,
9032 tree arg1
, tree arg2
, tree type
)
9036 if (!validate_arg (arg1
, REAL_TYPE
)
9037 || !validate_arg (arg2
, REAL_TYPE
))
9040 /* copysign(X,X) is X. */
9041 if (operand_equal_p (arg1
, arg2
, 0))
9042 return fold_convert_loc (loc
, type
, arg1
);
9044 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9045 if (TREE_CODE (arg1
) == REAL_CST
9046 && TREE_CODE (arg2
) == REAL_CST
9047 && !TREE_OVERFLOW (arg1
)
9048 && !TREE_OVERFLOW (arg2
))
9050 REAL_VALUE_TYPE c1
, c2
;
9052 c1
= TREE_REAL_CST (arg1
);
9053 c2
= TREE_REAL_CST (arg2
);
9054 /* c1.sign := c2.sign. */
9055 real_copysign (&c1
, &c2
);
9056 return build_real (type
, c1
);
9059 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9060 Remember to evaluate Y for side-effects. */
9061 if (tree_expr_nonnegative_p (arg2
))
9062 return omit_one_operand_loc (loc
, type
,
9063 fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
),
9066 /* Strip sign changing operations for the first argument. */
9067 tem
= fold_strip_sign_ops (arg1
);
9069 return build_call_expr_loc (loc
, fndecl
, 2, tem
, arg2
);
9074 /* Fold a call to builtin isascii with argument ARG. */
9077 fold_builtin_isascii (location_t loc
, tree arg
)
9079 if (!validate_arg (arg
, INTEGER_TYPE
))
9083 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9084 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9085 build_int_cst (NULL_TREE
,
9086 ~ (unsigned HOST_WIDE_INT
) 0x7f));
9087 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
9088 arg
, integer_zero_node
);
9092 /* Fold a call to builtin toascii with argument ARG. */
9095 fold_builtin_toascii (location_t loc
, tree arg
)
9097 if (!validate_arg (arg
, INTEGER_TYPE
))
9100 /* Transform toascii(c) -> (c & 0x7f). */
9101 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
9102 build_int_cst (NULL_TREE
, 0x7f));
9105 /* Fold a call to builtin isdigit with argument ARG. */
9108 fold_builtin_isdigit (location_t loc
, tree arg
)
9110 if (!validate_arg (arg
, INTEGER_TYPE
))
9114 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9115 /* According to the C standard, isdigit is unaffected by locale.
9116 However, it definitely is affected by the target character set. */
9117 unsigned HOST_WIDE_INT target_digit0
9118 = lang_hooks
.to_target_charset ('0');
9120 if (target_digit0
== 0)
9123 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
9124 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
9125 build_int_cst (unsigned_type_node
, target_digit0
));
9126 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
9127 build_int_cst (unsigned_type_node
, 9));
9131 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9134 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
9136 if (!validate_arg (arg
, REAL_TYPE
))
9139 arg
= fold_convert_loc (loc
, type
, arg
);
9140 if (TREE_CODE (arg
) == REAL_CST
)
9141 return fold_abs_const (arg
, type
);
9142 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9145 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9148 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
9150 if (!validate_arg (arg
, INTEGER_TYPE
))
9153 arg
= fold_convert_loc (loc
, type
, arg
);
9154 if (TREE_CODE (arg
) == INTEGER_CST
)
9155 return fold_abs_const (arg
, type
);
9156 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9159 /* Fold a call to builtin fmin or fmax. */
9162 fold_builtin_fmin_fmax (location_t loc
, tree arg0
, tree arg1
,
9163 tree type
, bool max
)
9165 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9167 /* Calculate the result when the argument is a constant. */
9168 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9173 /* If either argument is NaN, return the other one. Avoid the
9174 transformation if we get (and honor) a signalling NaN. Using
9175 omit_one_operand() ensures we create a non-lvalue. */
9176 if (TREE_CODE (arg0
) == REAL_CST
9177 && real_isnan (&TREE_REAL_CST (arg0
))
9178 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9179 || ! TREE_REAL_CST (arg0
).signalling
))
9180 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
9181 if (TREE_CODE (arg1
) == REAL_CST
9182 && real_isnan (&TREE_REAL_CST (arg1
))
9183 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
9184 || ! TREE_REAL_CST (arg1
).signalling
))
9185 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9187 /* Transform fmin/fmax(x,x) -> x. */
9188 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9189 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9191 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9192 functions to return the numeric arg if the other one is NaN.
9193 These tree codes don't honor that, so only transform if
9194 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9195 handled, so we don't have to worry about it either. */
9196 if (flag_finite_math_only
)
9197 return fold_build2_loc (loc
, (max
? MAX_EXPR
: MIN_EXPR
), type
,
9198 fold_convert_loc (loc
, type
, arg0
),
9199 fold_convert_loc (loc
, type
, arg1
));
9204 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9207 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
9209 if (validate_arg (arg
, COMPLEX_TYPE
)
9210 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
9212 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9216 tree new_arg
= builtin_save_expr (arg
);
9217 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
9218 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
9219 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
9226 /* Fold a call to builtin logb/ilogb. */
9229 fold_builtin_logb (location_t loc
, tree arg
, tree rettype
)
9231 if (! validate_arg (arg
, REAL_TYPE
))
9236 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9238 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9244 /* If arg is Inf or NaN and we're logb, return it. */
9245 if (TREE_CODE (rettype
) == REAL_TYPE
)
9246 return fold_convert_loc (loc
, rettype
, arg
);
9247 /* Fall through... */
9249 /* Zero may set errno and/or raise an exception for logb, also
9250 for ilogb we don't know FP_ILOGB0. */
9253 /* For normal numbers, proceed iff radix == 2. In GCC,
9254 normalized significands are in the range [0.5, 1.0). We
9255 want the exponent as if they were [1.0, 2.0) so get the
9256 exponent and subtract 1. */
9257 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9258 return fold_convert_loc (loc
, rettype
,
9259 build_int_cst (NULL_TREE
,
9260 REAL_EXP (value
)-1));
9268 /* Fold a call to builtin significand, if radix == 2. */
9271 fold_builtin_significand (location_t loc
, tree arg
, tree rettype
)
9273 if (! validate_arg (arg
, REAL_TYPE
))
9278 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9280 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9287 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9288 return fold_convert_loc (loc
, rettype
, arg
);
9290 /* For normal numbers, proceed iff radix == 2. */
9291 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9293 REAL_VALUE_TYPE result
= *value
;
9294 /* In GCC, normalized significands are in the range [0.5,
9295 1.0). We want them to be [1.0, 2.0) so set the
9297 SET_REAL_EXP (&result
, 1);
9298 return build_real (rettype
, result
);
9307 /* Fold a call to builtin frexp, we can assume the base is 2. */
9310 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9312 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9317 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9320 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9322 /* Proceed if a valid pointer type was passed in. */
9323 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9325 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9331 /* For +-0, return (*exp = 0, +-0). */
9332 exp
= integer_zero_node
;
9337 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9338 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
9341 /* Since the frexp function always expects base 2, and in
9342 GCC normalized significands are already in the range
9343 [0.5, 1.0), we have exactly what frexp wants. */
9344 REAL_VALUE_TYPE frac_rvt
= *value
;
9345 SET_REAL_EXP (&frac_rvt
, 0);
9346 frac
= build_real (rettype
, frac_rvt
);
9347 exp
= build_int_cst (NULL_TREE
, REAL_EXP (value
));
9354 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9355 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
9356 TREE_SIDE_EFFECTS (arg1
) = 1;
9357 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
9363 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9364 then we can assume the base is two. If it's false, then we have to
9365 check the mode of the TYPE parameter in certain cases. */
9368 fold_builtin_load_exponent (location_t loc
, tree arg0
, tree arg1
,
9369 tree type
, bool ldexp
)
9371 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9376 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9377 if (real_zerop (arg0
) || integer_zerop (arg1
)
9378 || (TREE_CODE (arg0
) == REAL_CST
9379 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9380 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9382 /* If both arguments are constant, then try to evaluate it. */
9383 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9384 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9385 && host_integerp (arg1
, 0))
9387 /* Bound the maximum adjustment to twice the range of the
9388 mode's valid exponents. Use abs to ensure the range is
9389 positive as a sanity check. */
9390 const long max_exp_adj
= 2 *
9391 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9392 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9394 /* Get the user-requested adjustment. */
9395 const HOST_WIDE_INT req_exp_adj
= tree_low_cst (arg1
, 0);
9397 /* The requested adjustment must be inside this range. This
9398 is a preliminary cap to avoid things like overflow, we
9399 may still fail to compute the result for other reasons. */
9400 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9402 REAL_VALUE_TYPE initial_result
;
9404 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9406 /* Ensure we didn't overflow. */
9407 if (! real_isinf (&initial_result
))
9409 const REAL_VALUE_TYPE trunc_result
9410 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9412 /* Only proceed if the target mode can hold the
9414 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9415 return build_real (type
, trunc_result
);
9424 /* Fold a call to builtin modf. */
9427 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9429 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9434 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9437 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9439 /* Proceed if a valid pointer type was passed in. */
9440 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9442 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9443 REAL_VALUE_TYPE trunc
, frac
;
9449 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9450 trunc
= frac
= *value
;
9453 /* For +-Inf, return (*arg1 = arg0, +-0). */
9455 frac
.sign
= value
->sign
;
9459 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9460 real_trunc (&trunc
, VOIDmode
, value
);
9461 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9462 /* If the original number was negative and already
9463 integral, then the fractional part is -0.0. */
9464 if (value
->sign
&& frac
.cl
== rvc_zero
)
9465 frac
.sign
= value
->sign
;
9469 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9470 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
9471 build_real (rettype
, trunc
));
9472 TREE_SIDE_EFFECTS (arg1
) = 1;
9473 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
9474 build_real (rettype
, frac
));
9480 /* Given a location LOC, an interclass builtin function decl FNDECL
9481 and its single argument ARG, return an folded expression computing
9482 the same, or NULL_TREE if we either couldn't or didn't want to fold
9483 (the latter happen if there's an RTL instruction available). */
9486 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
9488 enum machine_mode mode
;
9490 if (!validate_arg (arg
, REAL_TYPE
))
9493 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9496 mode
= TYPE_MODE (TREE_TYPE (arg
));
9498 /* If there is no optab, try generic code. */
9499 switch (DECL_FUNCTION_CODE (fndecl
))
9503 CASE_FLT_FN (BUILT_IN_ISINF
):
9505 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9506 tree
const isgr_fn
= built_in_decls
[BUILT_IN_ISGREATER
];
9507 tree
const type
= TREE_TYPE (arg
);
9511 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9512 real_from_string (&r
, buf
);
9513 result
= build_call_expr (isgr_fn
, 2,
9514 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9515 build_real (type
, r
));
9518 CASE_FLT_FN (BUILT_IN_FINITE
):
9519 case BUILT_IN_ISFINITE
:
9521 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9522 tree
const isle_fn
= built_in_decls
[BUILT_IN_ISLESSEQUAL
];
9523 tree
const type
= TREE_TYPE (arg
);
9527 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9528 real_from_string (&r
, buf
);
9529 result
= build_call_expr (isle_fn
, 2,
9530 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9531 build_real (type
, r
));
9532 /*result = fold_build2_loc (loc, UNGT_EXPR,
9533 TREE_TYPE (TREE_TYPE (fndecl)),
9534 fold_build1_loc (loc, ABS_EXPR, type, arg),
9535 build_real (type, r));
9536 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9537 TREE_TYPE (TREE_TYPE (fndecl)),
9541 case BUILT_IN_ISNORMAL
:
9543 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9544 islessequal(fabs(x),DBL_MAX). */
9545 tree
const isle_fn
= built_in_decls
[BUILT_IN_ISLESSEQUAL
];
9546 tree
const isge_fn
= built_in_decls
[BUILT_IN_ISGREATEREQUAL
];
9547 tree
const type
= TREE_TYPE (arg
);
9548 REAL_VALUE_TYPE rmax
, rmin
;
9551 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9552 real_from_string (&rmax
, buf
);
9553 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9554 real_from_string (&rmin
, buf
);
9555 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9556 result
= build_call_expr (isle_fn
, 2, arg
,
9557 build_real (type
, rmax
));
9558 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
9559 build_call_expr (isge_fn
, 2, arg
,
9560 build_real (type
, rmin
)));
9570 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9571 ARG is the argument for the call. */
9574 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
9576 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9579 if (!validate_arg (arg
, REAL_TYPE
))
9582 switch (builtin_index
)
9584 case BUILT_IN_ISINF
:
9585 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9586 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9588 if (TREE_CODE (arg
) == REAL_CST
)
9590 r
= TREE_REAL_CST (arg
);
9591 if (real_isinf (&r
))
9592 return real_compare (GT_EXPR
, &r
, &dconst0
)
9593 ? integer_one_node
: integer_minus_one_node
;
9595 return integer_zero_node
;
9600 case BUILT_IN_ISINF_SIGN
:
9602 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9603 /* In a boolean context, GCC will fold the inner COND_EXPR to
9604 1. So e.g. "if (isinf_sign(x))" would be folded to just
9605 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9606 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
9607 tree isinf_fn
= built_in_decls
[BUILT_IN_ISINF
];
9608 tree tmp
= NULL_TREE
;
9610 arg
= builtin_save_expr (arg
);
9612 if (signbit_fn
&& isinf_fn
)
9614 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
9615 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
9617 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9618 signbit_call
, integer_zero_node
);
9619 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9620 isinf_call
, integer_zero_node
);
9622 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
9623 integer_minus_one_node
, integer_one_node
);
9624 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9632 case BUILT_IN_ISFINITE
:
9633 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
)))
9634 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9635 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9637 if (TREE_CODE (arg
) == REAL_CST
)
9639 r
= TREE_REAL_CST (arg
);
9640 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
9645 case BUILT_IN_ISNAN
:
9646 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
))))
9647 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9649 if (TREE_CODE (arg
) == REAL_CST
)
9651 r
= TREE_REAL_CST (arg
);
9652 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9655 arg
= builtin_save_expr (arg
);
9656 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
9663 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9664 This builtin will generate code to return the appropriate floating
9665 point classification depending on the value of the floating point
9666 number passed in. The possible return values must be supplied as
9667 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9668 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9669 one floating point argument which is "type generic". */
9672 fold_builtin_fpclassify (location_t loc
, tree exp
)
9674 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
9675 arg
, type
, res
, tmp
;
9676 enum machine_mode mode
;
9680 /* Verify the required arguments in the original call. */
9681 if (!validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
,
9682 INTEGER_TYPE
, INTEGER_TYPE
,
9683 INTEGER_TYPE
, REAL_TYPE
, VOID_TYPE
))
9686 fp_nan
= CALL_EXPR_ARG (exp
, 0);
9687 fp_infinite
= CALL_EXPR_ARG (exp
, 1);
9688 fp_normal
= CALL_EXPR_ARG (exp
, 2);
9689 fp_subnormal
= CALL_EXPR_ARG (exp
, 3);
9690 fp_zero
= CALL_EXPR_ARG (exp
, 4);
9691 arg
= CALL_EXPR_ARG (exp
, 5);
9692 type
= TREE_TYPE (arg
);
9693 mode
= TYPE_MODE (type
);
9694 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9698 (fabs(x) == Inf ? FP_INFINITE :
9699 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9700 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9702 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9703 build_real (type
, dconst0
));
9704 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9705 tmp
, fp_zero
, fp_subnormal
);
9707 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9708 real_from_string (&r
, buf
);
9709 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
9710 arg
, build_real (type
, r
));
9711 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
9713 if (HONOR_INFINITIES (mode
))
9716 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9717 build_real (type
, r
));
9718 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
9722 if (HONOR_NANS (mode
))
9724 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
9725 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
9731 /* Fold a call to an unordered comparison function such as
9732 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9733 being called and ARG0 and ARG1 are the arguments for the call.
9734 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9735 the opposite of the desired result. UNORDERED_CODE is used
9736 for modes that can hold NaNs and ORDERED_CODE is used for
9740 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
9741 enum tree_code unordered_code
,
9742 enum tree_code ordered_code
)
9744 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9745 enum tree_code code
;
9747 enum tree_code code0
, code1
;
9748 tree cmp_type
= NULL_TREE
;
9750 type0
= TREE_TYPE (arg0
);
9751 type1
= TREE_TYPE (arg1
);
9753 code0
= TREE_CODE (type0
);
9754 code1
= TREE_CODE (type1
);
9756 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9757 /* Choose the wider of two real types. */
9758 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9760 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9762 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9765 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
9766 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
9768 if (unordered_code
== UNORDERED_EXPR
)
9770 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9771 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
9772 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
9775 code
= HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))) ? unordered_code
9777 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
9778 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
9781 /* Fold a call to built-in function FNDECL with 0 arguments.
9782 IGNORE is true if the result of the function call is ignored. This
9783 function returns NULL_TREE if no simplification was possible. */
9786 fold_builtin_0 (location_t loc
, tree fndecl
, bool ignore ATTRIBUTE_UNUSED
)
9788 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9789 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9792 CASE_FLT_FN (BUILT_IN_INF
):
9793 case BUILT_IN_INFD32
:
9794 case BUILT_IN_INFD64
:
9795 case BUILT_IN_INFD128
:
9796 return fold_builtin_inf (loc
, type
, true);
9798 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9799 return fold_builtin_inf (loc
, type
, false);
9801 case BUILT_IN_CLASSIFY_TYPE
:
9802 return fold_builtin_classify_type (NULL_TREE
);
9810 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9811 IGNORE is true if the result of the function call is ignored. This
9812 function returns NULL_TREE if no simplification was possible. */
9815 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
, bool ignore
)
9817 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9818 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9821 case BUILT_IN_CONSTANT_P
:
9823 tree val
= fold_builtin_constant_p (arg0
);
9825 /* Gimplification will pull the CALL_EXPR for the builtin out of
9826 an if condition. When not optimizing, we'll not CSE it back.
9827 To avoid link error types of regressions, return false now. */
9828 if (!val
&& !optimize
)
9829 val
= integer_zero_node
;
9834 case BUILT_IN_CLASSIFY_TYPE
:
9835 return fold_builtin_classify_type (arg0
);
9837 case BUILT_IN_STRLEN
:
9838 return fold_builtin_strlen (loc
, type
, arg0
);
9840 CASE_FLT_FN (BUILT_IN_FABS
):
9841 return fold_builtin_fabs (loc
, arg0
, type
);
9845 case BUILT_IN_LLABS
:
9846 case BUILT_IN_IMAXABS
:
9847 return fold_builtin_abs (loc
, arg0
, type
);
9849 CASE_FLT_FN (BUILT_IN_CONJ
):
9850 if (validate_arg (arg0
, COMPLEX_TYPE
)
9851 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9852 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
9855 CASE_FLT_FN (BUILT_IN_CREAL
):
9856 if (validate_arg (arg0
, COMPLEX_TYPE
)
9857 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9858 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));;
9861 CASE_FLT_FN (BUILT_IN_CIMAG
):
9862 if (validate_arg (arg0
, COMPLEX_TYPE
)
9863 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9864 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
9867 CASE_FLT_FN (BUILT_IN_CCOS
):
9868 return fold_builtin_ccos(loc
, arg0
, type
, fndecl
, /*hyper=*/ false);
9870 CASE_FLT_FN (BUILT_IN_CCOSH
):
9871 return fold_builtin_ccos(loc
, arg0
, type
, fndecl
, /*hyper=*/ true);
9873 CASE_FLT_FN (BUILT_IN_CPROJ
):
9874 return fold_builtin_cproj(loc
, arg0
, type
);
9876 CASE_FLT_FN (BUILT_IN_CSIN
):
9877 if (validate_arg (arg0
, COMPLEX_TYPE
)
9878 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9879 return do_mpc_arg1 (arg0
, type
, mpc_sin
);
9882 CASE_FLT_FN (BUILT_IN_CSINH
):
9883 if (validate_arg (arg0
, COMPLEX_TYPE
)
9884 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9885 return do_mpc_arg1 (arg0
, type
, mpc_sinh
);
9888 CASE_FLT_FN (BUILT_IN_CTAN
):
9889 if (validate_arg (arg0
, COMPLEX_TYPE
)
9890 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9891 return do_mpc_arg1 (arg0
, type
, mpc_tan
);
9894 CASE_FLT_FN (BUILT_IN_CTANH
):
9895 if (validate_arg (arg0
, COMPLEX_TYPE
)
9896 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9897 return do_mpc_arg1 (arg0
, type
, mpc_tanh
);
9900 CASE_FLT_FN (BUILT_IN_CLOG
):
9901 if (validate_arg (arg0
, COMPLEX_TYPE
)
9902 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9903 return do_mpc_arg1 (arg0
, type
, mpc_log
);
9906 CASE_FLT_FN (BUILT_IN_CSQRT
):
9907 if (validate_arg (arg0
, COMPLEX_TYPE
)
9908 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9909 return do_mpc_arg1 (arg0
, type
, mpc_sqrt
);
9912 CASE_FLT_FN (BUILT_IN_CASIN
):
9913 if (validate_arg (arg0
, COMPLEX_TYPE
)
9914 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9915 return do_mpc_arg1 (arg0
, type
, mpc_asin
);
9918 CASE_FLT_FN (BUILT_IN_CACOS
):
9919 if (validate_arg (arg0
, COMPLEX_TYPE
)
9920 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9921 return do_mpc_arg1 (arg0
, type
, mpc_acos
);
9924 CASE_FLT_FN (BUILT_IN_CATAN
):
9925 if (validate_arg (arg0
, COMPLEX_TYPE
)
9926 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9927 return do_mpc_arg1 (arg0
, type
, mpc_atan
);
9930 CASE_FLT_FN (BUILT_IN_CASINH
):
9931 if (validate_arg (arg0
, COMPLEX_TYPE
)
9932 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9933 return do_mpc_arg1 (arg0
, type
, mpc_asinh
);
9936 CASE_FLT_FN (BUILT_IN_CACOSH
):
9937 if (validate_arg (arg0
, COMPLEX_TYPE
)
9938 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9939 return do_mpc_arg1 (arg0
, type
, mpc_acosh
);
9942 CASE_FLT_FN (BUILT_IN_CATANH
):
9943 if (validate_arg (arg0
, COMPLEX_TYPE
)
9944 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9945 return do_mpc_arg1 (arg0
, type
, mpc_atanh
);
9948 CASE_FLT_FN (BUILT_IN_CABS
):
9949 return fold_builtin_cabs (loc
, arg0
, type
, fndecl
);
9951 CASE_FLT_FN (BUILT_IN_CARG
):
9952 return fold_builtin_carg (loc
, arg0
, type
);
9954 CASE_FLT_FN (BUILT_IN_SQRT
):
9955 return fold_builtin_sqrt (loc
, arg0
, type
);
9957 CASE_FLT_FN (BUILT_IN_CBRT
):
9958 return fold_builtin_cbrt (loc
, arg0
, type
);
9960 CASE_FLT_FN (BUILT_IN_ASIN
):
9961 if (validate_arg (arg0
, REAL_TYPE
))
9962 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
9963 &dconstm1
, &dconst1
, true);
9966 CASE_FLT_FN (BUILT_IN_ACOS
):
9967 if (validate_arg (arg0
, REAL_TYPE
))
9968 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
9969 &dconstm1
, &dconst1
, true);
9972 CASE_FLT_FN (BUILT_IN_ATAN
):
9973 if (validate_arg (arg0
, REAL_TYPE
))
9974 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
9977 CASE_FLT_FN (BUILT_IN_ASINH
):
9978 if (validate_arg (arg0
, REAL_TYPE
))
9979 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
9982 CASE_FLT_FN (BUILT_IN_ACOSH
):
9983 if (validate_arg (arg0
, REAL_TYPE
))
9984 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
9985 &dconst1
, NULL
, true);
9988 CASE_FLT_FN (BUILT_IN_ATANH
):
9989 if (validate_arg (arg0
, REAL_TYPE
))
9990 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
9991 &dconstm1
, &dconst1
, false);
9994 CASE_FLT_FN (BUILT_IN_SIN
):
9995 if (validate_arg (arg0
, REAL_TYPE
))
9996 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
9999 CASE_FLT_FN (BUILT_IN_COS
):
10000 return fold_builtin_cos (loc
, arg0
, type
, fndecl
);
10002 CASE_FLT_FN (BUILT_IN_TAN
):
10003 return fold_builtin_tan (arg0
, type
);
10005 CASE_FLT_FN (BUILT_IN_CEXP
):
10006 return fold_builtin_cexp (loc
, arg0
, type
);
10008 CASE_FLT_FN (BUILT_IN_CEXPI
):
10009 if (validate_arg (arg0
, REAL_TYPE
))
10010 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
10013 CASE_FLT_FN (BUILT_IN_SINH
):
10014 if (validate_arg (arg0
, REAL_TYPE
))
10015 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
10018 CASE_FLT_FN (BUILT_IN_COSH
):
10019 return fold_builtin_cosh (loc
, arg0
, type
, fndecl
);
10021 CASE_FLT_FN (BUILT_IN_TANH
):
10022 if (validate_arg (arg0
, REAL_TYPE
))
10023 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
10026 CASE_FLT_FN (BUILT_IN_ERF
):
10027 if (validate_arg (arg0
, REAL_TYPE
))
10028 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
10031 CASE_FLT_FN (BUILT_IN_ERFC
):
10032 if (validate_arg (arg0
, REAL_TYPE
))
10033 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
10036 CASE_FLT_FN (BUILT_IN_TGAMMA
):
10037 if (validate_arg (arg0
, REAL_TYPE
))
10038 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
10041 CASE_FLT_FN (BUILT_IN_EXP
):
10042 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp
);
10044 CASE_FLT_FN (BUILT_IN_EXP2
):
10045 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp2
);
10047 CASE_FLT_FN (BUILT_IN_EXP10
):
10048 CASE_FLT_FN (BUILT_IN_POW10
):
10049 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp10
);
10051 CASE_FLT_FN (BUILT_IN_EXPM1
):
10052 if (validate_arg (arg0
, REAL_TYPE
))
10053 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
10056 CASE_FLT_FN (BUILT_IN_LOG
):
10057 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log
);
10059 CASE_FLT_FN (BUILT_IN_LOG2
):
10060 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log2
);
10062 CASE_FLT_FN (BUILT_IN_LOG10
):
10063 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log10
);
10065 CASE_FLT_FN (BUILT_IN_LOG1P
):
10066 if (validate_arg (arg0
, REAL_TYPE
))
10067 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
10068 &dconstm1
, NULL
, false);
10071 CASE_FLT_FN (BUILT_IN_J0
):
10072 if (validate_arg (arg0
, REAL_TYPE
))
10073 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
10077 CASE_FLT_FN (BUILT_IN_J1
):
10078 if (validate_arg (arg0
, REAL_TYPE
))
10079 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
10083 CASE_FLT_FN (BUILT_IN_Y0
):
10084 if (validate_arg (arg0
, REAL_TYPE
))
10085 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
10086 &dconst0
, NULL
, false);
10089 CASE_FLT_FN (BUILT_IN_Y1
):
10090 if (validate_arg (arg0
, REAL_TYPE
))
10091 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
10092 &dconst0
, NULL
, false);
10095 CASE_FLT_FN (BUILT_IN_NAN
):
10096 case BUILT_IN_NAND32
:
10097 case BUILT_IN_NAND64
:
10098 case BUILT_IN_NAND128
:
10099 return fold_builtin_nan (arg0
, type
, true);
10101 CASE_FLT_FN (BUILT_IN_NANS
):
10102 return fold_builtin_nan (arg0
, type
, false);
10104 CASE_FLT_FN (BUILT_IN_FLOOR
):
10105 return fold_builtin_floor (loc
, fndecl
, arg0
);
10107 CASE_FLT_FN (BUILT_IN_CEIL
):
10108 return fold_builtin_ceil (loc
, fndecl
, arg0
);
10110 CASE_FLT_FN (BUILT_IN_TRUNC
):
10111 return fold_builtin_trunc (loc
, fndecl
, arg0
);
10113 CASE_FLT_FN (BUILT_IN_ROUND
):
10114 return fold_builtin_round (loc
, fndecl
, arg0
);
10116 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
10117 CASE_FLT_FN (BUILT_IN_RINT
):
10118 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg0
);
10120 CASE_FLT_FN (BUILT_IN_LCEIL
):
10121 CASE_FLT_FN (BUILT_IN_LLCEIL
):
10122 CASE_FLT_FN (BUILT_IN_LFLOOR
):
10123 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
10124 CASE_FLT_FN (BUILT_IN_LROUND
):
10125 CASE_FLT_FN (BUILT_IN_LLROUND
):
10126 return fold_builtin_int_roundingfn (loc
, fndecl
, arg0
);
10128 CASE_FLT_FN (BUILT_IN_LRINT
):
10129 CASE_FLT_FN (BUILT_IN_LLRINT
):
10130 return fold_fixed_mathfn (loc
, fndecl
, arg0
);
10132 case BUILT_IN_BSWAP32
:
10133 case BUILT_IN_BSWAP64
:
10134 return fold_builtin_bswap (fndecl
, arg0
);
10136 CASE_INT_FN (BUILT_IN_FFS
):
10137 CASE_INT_FN (BUILT_IN_CLZ
):
10138 CASE_INT_FN (BUILT_IN_CTZ
):
10139 CASE_INT_FN (BUILT_IN_POPCOUNT
):
10140 CASE_INT_FN (BUILT_IN_PARITY
):
10141 return fold_builtin_bitop (fndecl
, arg0
);
10143 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
10144 return fold_builtin_signbit (loc
, arg0
, type
);
10146 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
10147 return fold_builtin_significand (loc
, arg0
, type
);
10149 CASE_FLT_FN (BUILT_IN_ILOGB
):
10150 CASE_FLT_FN (BUILT_IN_LOGB
):
10151 return fold_builtin_logb (loc
, arg0
, type
);
10153 case BUILT_IN_ISASCII
:
10154 return fold_builtin_isascii (loc
, arg0
);
10156 case BUILT_IN_TOASCII
:
10157 return fold_builtin_toascii (loc
, arg0
);
10159 case BUILT_IN_ISDIGIT
:
10160 return fold_builtin_isdigit (loc
, arg0
);
10162 CASE_FLT_FN (BUILT_IN_FINITE
):
10163 case BUILT_IN_FINITED32
:
10164 case BUILT_IN_FINITED64
:
10165 case BUILT_IN_FINITED128
:
10166 case BUILT_IN_ISFINITE
:
10168 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
10171 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10174 CASE_FLT_FN (BUILT_IN_ISINF
):
10175 case BUILT_IN_ISINFD32
:
10176 case BUILT_IN_ISINFD64
:
10177 case BUILT_IN_ISINFD128
:
10179 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
10182 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10185 case BUILT_IN_ISNORMAL
:
10186 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10188 case BUILT_IN_ISINF_SIGN
:
10189 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
10191 CASE_FLT_FN (BUILT_IN_ISNAN
):
10192 case BUILT_IN_ISNAND32
:
10193 case BUILT_IN_ISNAND64
:
10194 case BUILT_IN_ISNAND128
:
10195 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
10197 case BUILT_IN_PRINTF
:
10198 case BUILT_IN_PRINTF_UNLOCKED
:
10199 case BUILT_IN_VPRINTF
:
10200 return fold_builtin_printf (loc
, fndecl
, arg0
, NULL_TREE
, ignore
, fcode
);
10202 case BUILT_IN_FREE
:
10203 if (integer_zerop (arg0
))
10204 return build_empty_stmt (loc
);
10215 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10216 IGNORE is true if the result of the function call is ignored. This
10217 function returns NULL_TREE if no simplification was possible. */
10220 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, bool ignore
)
10222 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10223 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10227 CASE_FLT_FN (BUILT_IN_JN
):
10228 if (validate_arg (arg0
, INTEGER_TYPE
)
10229 && validate_arg (arg1
, REAL_TYPE
))
10230 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10233 CASE_FLT_FN (BUILT_IN_YN
):
10234 if (validate_arg (arg0
, INTEGER_TYPE
)
10235 && validate_arg (arg1
, REAL_TYPE
))
10236 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10240 CASE_FLT_FN (BUILT_IN_DREM
):
10241 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10242 if (validate_arg (arg0
, REAL_TYPE
)
10243 && validate_arg(arg1
, REAL_TYPE
))
10244 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10247 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10248 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10249 if (validate_arg (arg0
, REAL_TYPE
)
10250 && validate_arg(arg1
, POINTER_TYPE
))
10251 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10254 CASE_FLT_FN (BUILT_IN_ATAN2
):
10255 if (validate_arg (arg0
, REAL_TYPE
)
10256 && validate_arg(arg1
, REAL_TYPE
))
10257 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10260 CASE_FLT_FN (BUILT_IN_FDIM
):
10261 if (validate_arg (arg0
, REAL_TYPE
)
10262 && validate_arg(arg1
, REAL_TYPE
))
10263 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10266 CASE_FLT_FN (BUILT_IN_HYPOT
):
10267 return fold_builtin_hypot (loc
, fndecl
, arg0
, arg1
, type
);
10269 CASE_FLT_FN (BUILT_IN_CPOW
):
10270 if (validate_arg (arg0
, COMPLEX_TYPE
)
10271 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10272 && validate_arg (arg1
, COMPLEX_TYPE
)
10273 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
)
10274 return do_mpc_arg2 (arg0
, arg1
, type
, /*do_nonfinite=*/ 0, mpc_pow
);
10277 CASE_FLT_FN (BUILT_IN_LDEXP
):
10278 return fold_builtin_load_exponent (loc
, arg0
, arg1
, type
, /*ldexp=*/true);
10279 CASE_FLT_FN (BUILT_IN_SCALBN
):
10280 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10281 return fold_builtin_load_exponent (loc
, arg0
, arg1
,
10282 type
, /*ldexp=*/false);
10284 CASE_FLT_FN (BUILT_IN_FREXP
):
10285 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
10287 CASE_FLT_FN (BUILT_IN_MODF
):
10288 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
10290 case BUILT_IN_BZERO
:
10291 return fold_builtin_bzero (loc
, arg0
, arg1
, ignore
);
10293 case BUILT_IN_FPUTS
:
10294 return fold_builtin_fputs (loc
, arg0
, arg1
, ignore
, false, NULL_TREE
);
10296 case BUILT_IN_FPUTS_UNLOCKED
:
10297 return fold_builtin_fputs (loc
, arg0
, arg1
, ignore
, true, NULL_TREE
);
10299 case BUILT_IN_STRSTR
:
10300 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
10302 case BUILT_IN_STRCAT
:
10303 return fold_builtin_strcat (loc
, arg0
, arg1
);
10305 case BUILT_IN_STRSPN
:
10306 return fold_builtin_strspn (loc
, arg0
, arg1
);
10308 case BUILT_IN_STRCSPN
:
10309 return fold_builtin_strcspn (loc
, arg0
, arg1
);
10311 case BUILT_IN_STRCHR
:
10312 case BUILT_IN_INDEX
:
10313 return fold_builtin_strchr (loc
, arg0
, arg1
, type
);
10315 case BUILT_IN_STRRCHR
:
10316 case BUILT_IN_RINDEX
:
10317 return fold_builtin_strrchr (loc
, arg0
, arg1
, type
);
10319 case BUILT_IN_STRCPY
:
10320 return fold_builtin_strcpy (loc
, fndecl
, arg0
, arg1
, NULL_TREE
);
10322 case BUILT_IN_STPCPY
:
10325 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
10329 return build_call_expr_loc (loc
, fn
, 2, arg0
, arg1
);
10332 return fold_builtin_stpcpy (loc
, fndecl
, arg0
, arg1
);
10335 case BUILT_IN_STRCMP
:
10336 return fold_builtin_strcmp (loc
, arg0
, arg1
);
10338 case BUILT_IN_STRPBRK
:
10339 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
10341 case BUILT_IN_EXPECT
:
10342 return fold_builtin_expect (loc
, arg0
, arg1
);
10344 CASE_FLT_FN (BUILT_IN_POW
):
10345 return fold_builtin_pow (loc
, fndecl
, arg0
, arg1
, type
);
10347 CASE_FLT_FN (BUILT_IN_POWI
):
10348 return fold_builtin_powi (loc
, fndecl
, arg0
, arg1
, type
);
10350 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10351 return fold_builtin_copysign (loc
, fndecl
, arg0
, arg1
, type
);
10353 CASE_FLT_FN (BUILT_IN_FMIN
):
10354 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/false);
10356 CASE_FLT_FN (BUILT_IN_FMAX
):
10357 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/true);
10359 case BUILT_IN_ISGREATER
:
10360 return fold_builtin_unordered_cmp (loc
, fndecl
,
10361 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10362 case BUILT_IN_ISGREATEREQUAL
:
10363 return fold_builtin_unordered_cmp (loc
, fndecl
,
10364 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10365 case BUILT_IN_ISLESS
:
10366 return fold_builtin_unordered_cmp (loc
, fndecl
,
10367 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10368 case BUILT_IN_ISLESSEQUAL
:
10369 return fold_builtin_unordered_cmp (loc
, fndecl
,
10370 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10371 case BUILT_IN_ISLESSGREATER
:
10372 return fold_builtin_unordered_cmp (loc
, fndecl
,
10373 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10374 case BUILT_IN_ISUNORDERED
:
10375 return fold_builtin_unordered_cmp (loc
, fndecl
,
10376 arg0
, arg1
, UNORDERED_EXPR
,
10379 /* We do the folding for va_start in the expander. */
10380 case BUILT_IN_VA_START
:
10383 case BUILT_IN_SPRINTF
:
10384 return fold_builtin_sprintf (loc
, arg0
, arg1
, NULL_TREE
, ignore
);
10386 case BUILT_IN_OBJECT_SIZE
:
10387 return fold_builtin_object_size (arg0
, arg1
);
10389 case BUILT_IN_PRINTF
:
10390 case BUILT_IN_PRINTF_UNLOCKED
:
10391 case BUILT_IN_VPRINTF
:
10392 return fold_builtin_printf (loc
, fndecl
, arg0
, arg1
, ignore
, fcode
);
10394 case BUILT_IN_PRINTF_CHK
:
10395 case BUILT_IN_VPRINTF_CHK
:
10396 if (!validate_arg (arg0
, INTEGER_TYPE
)
10397 || TREE_SIDE_EFFECTS (arg0
))
10400 return fold_builtin_printf (loc
, fndecl
,
10401 arg1
, NULL_TREE
, ignore
, fcode
);
10404 case BUILT_IN_FPRINTF
:
10405 case BUILT_IN_FPRINTF_UNLOCKED
:
10406 case BUILT_IN_VFPRINTF
:
10407 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg1
, NULL_TREE
,
10416 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10417 and ARG2. IGNORE is true if the result of the function call is ignored.
10418 This function returns NULL_TREE if no simplification was possible. */
10421 fold_builtin_3 (location_t loc
, tree fndecl
,
10422 tree arg0
, tree arg1
, tree arg2
, bool ignore
)
10424 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10425 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10429 CASE_FLT_FN (BUILT_IN_SINCOS
):
10430 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
10432 CASE_FLT_FN (BUILT_IN_FMA
):
10433 if (validate_arg (arg0
, REAL_TYPE
)
10434 && validate_arg(arg1
, REAL_TYPE
)
10435 && validate_arg(arg2
, REAL_TYPE
))
10436 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
10439 CASE_FLT_FN (BUILT_IN_REMQUO
):
10440 if (validate_arg (arg0
, REAL_TYPE
)
10441 && validate_arg(arg1
, REAL_TYPE
)
10442 && validate_arg(arg2
, POINTER_TYPE
))
10443 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10446 case BUILT_IN_MEMSET
:
10447 return fold_builtin_memset (loc
, arg0
, arg1
, arg2
, type
, ignore
);
10449 case BUILT_IN_BCOPY
:
10450 return fold_builtin_memory_op (loc
, arg1
, arg0
, arg2
,
10451 void_type_node
, true, /*endp=*/3);
10453 case BUILT_IN_MEMCPY
:
10454 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10455 type
, ignore
, /*endp=*/0);
10457 case BUILT_IN_MEMPCPY
:
10458 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10459 type
, ignore
, /*endp=*/1);
10461 case BUILT_IN_MEMMOVE
:
10462 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10463 type
, ignore
, /*endp=*/3);
10465 case BUILT_IN_STRNCAT
:
10466 return fold_builtin_strncat (loc
, arg0
, arg1
, arg2
);
10468 case BUILT_IN_STRNCPY
:
10469 return fold_builtin_strncpy (loc
, fndecl
, arg0
, arg1
, arg2
, NULL_TREE
);
10471 case BUILT_IN_STRNCMP
:
10472 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
10474 case BUILT_IN_MEMCHR
:
10475 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
10477 case BUILT_IN_BCMP
:
10478 case BUILT_IN_MEMCMP
:
10479 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
10481 case BUILT_IN_SPRINTF
:
10482 return fold_builtin_sprintf (loc
, arg0
, arg1
, arg2
, ignore
);
10484 case BUILT_IN_STRCPY_CHK
:
10485 case BUILT_IN_STPCPY_CHK
:
10486 return fold_builtin_stxcpy_chk (loc
, fndecl
, arg0
, arg1
, arg2
, NULL_TREE
,
10489 case BUILT_IN_STRCAT_CHK
:
10490 return fold_builtin_strcat_chk (loc
, fndecl
, arg0
, arg1
, arg2
);
10492 case BUILT_IN_PRINTF_CHK
:
10493 case BUILT_IN_VPRINTF_CHK
:
10494 if (!validate_arg (arg0
, INTEGER_TYPE
)
10495 || TREE_SIDE_EFFECTS (arg0
))
10498 return fold_builtin_printf (loc
, fndecl
, arg1
, arg2
, ignore
, fcode
);
10501 case BUILT_IN_FPRINTF
:
10502 case BUILT_IN_FPRINTF_UNLOCKED
:
10503 case BUILT_IN_VFPRINTF
:
10504 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg1
, arg2
,
10507 case BUILT_IN_FPRINTF_CHK
:
10508 case BUILT_IN_VFPRINTF_CHK
:
10509 if (!validate_arg (arg1
, INTEGER_TYPE
)
10510 || TREE_SIDE_EFFECTS (arg1
))
10513 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg2
, NULL_TREE
,
10522 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10523 ARG2, and ARG3. IGNORE is true if the result of the function call is
10524 ignored. This function returns NULL_TREE if no simplification was
10528 fold_builtin_4 (location_t loc
, tree fndecl
,
10529 tree arg0
, tree arg1
, tree arg2
, tree arg3
, bool ignore
)
10531 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10535 case BUILT_IN_MEMCPY_CHK
:
10536 case BUILT_IN_MEMPCPY_CHK
:
10537 case BUILT_IN_MEMMOVE_CHK
:
10538 case BUILT_IN_MEMSET_CHK
:
10539 return fold_builtin_memory_chk (loc
, fndecl
, arg0
, arg1
, arg2
, arg3
,
10541 DECL_FUNCTION_CODE (fndecl
));
10543 case BUILT_IN_STRNCPY_CHK
:
10544 return fold_builtin_strncpy_chk (loc
, arg0
, arg1
, arg2
, arg3
, NULL_TREE
);
10546 case BUILT_IN_STRNCAT_CHK
:
10547 return fold_builtin_strncat_chk (loc
, fndecl
, arg0
, arg1
, arg2
, arg3
);
10549 case BUILT_IN_FPRINTF_CHK
:
10550 case BUILT_IN_VFPRINTF_CHK
:
10551 if (!validate_arg (arg1
, INTEGER_TYPE
)
10552 || TREE_SIDE_EFFECTS (arg1
))
10555 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg2
, arg3
,
10565 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10566 arguments, where NARGS <= 4. IGNORE is true if the result of the
10567 function call is ignored. This function returns NULL_TREE if no
10568 simplification was possible. Note that this only folds builtins with
10569 fixed argument patterns. Foldings that do varargs-to-varargs
10570 transformations, or that match calls with more than 4 arguments,
10571 need to be handled with fold_builtin_varargs instead. */
10573 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10576 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool ignore
)
10578 tree ret
= NULL_TREE
;
10583 ret
= fold_builtin_0 (loc
, fndecl
, ignore
);
10586 ret
= fold_builtin_1 (loc
, fndecl
, args
[0], ignore
);
10589 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1], ignore
);
10592 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2], ignore
);
10595 ret
= fold_builtin_4 (loc
, fndecl
, args
[0], args
[1], args
[2], args
[3],
10603 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10604 SET_EXPR_LOCATION (ret
, loc
);
10605 TREE_NO_WARNING (ret
) = 1;
10611 /* Builtins with folding operations that operate on "..." arguments
10612 need special handling; we need to store the arguments in a convenient
10613 data structure before attempting any folding. Fortunately there are
10614 only a few builtins that fall into this category. FNDECL is the
10615 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10616 result of the function call is ignored. */
10619 fold_builtin_varargs (location_t loc
, tree fndecl
, tree exp
,
10620 bool ignore ATTRIBUTE_UNUSED
)
10622 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10623 tree ret
= NULL_TREE
;
10627 case BUILT_IN_SPRINTF_CHK
:
10628 case BUILT_IN_VSPRINTF_CHK
:
10629 ret
= fold_builtin_sprintf_chk (loc
, exp
, fcode
);
10632 case BUILT_IN_SNPRINTF_CHK
:
10633 case BUILT_IN_VSNPRINTF_CHK
:
10634 ret
= fold_builtin_snprintf_chk (loc
, exp
, NULL_TREE
, fcode
);
10637 case BUILT_IN_FPCLASSIFY
:
10638 ret
= fold_builtin_fpclassify (loc
, exp
);
10646 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10647 SET_EXPR_LOCATION (ret
, loc
);
10648 TREE_NO_WARNING (ret
) = 1;
10654 /* Return true if FNDECL shouldn't be folded right now.
10655 If a built-in function has an inline attribute always_inline
10656 wrapper, defer folding it after always_inline functions have
10657 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10658 might not be performed. */
10661 avoid_folding_inline_builtin (tree fndecl
)
10663 return (DECL_DECLARED_INLINE_P (fndecl
)
10664 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
10666 && !cfun
->always_inline_functions_inlined
10667 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
10670 /* A wrapper function for builtin folding that prevents warnings for
10671 "statement without effect" and the like, caused by removing the
10672 call node earlier than the warning is generated. */
10675 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
10677 tree ret
= NULL_TREE
;
10678 tree fndecl
= get_callee_fndecl (exp
);
10680 && TREE_CODE (fndecl
) == FUNCTION_DECL
10681 && DECL_BUILT_IN (fndecl
)
10682 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10683 yet. Defer folding until we see all the arguments
10684 (after inlining). */
10685 && !CALL_EXPR_VA_ARG_PACK (exp
))
10687 int nargs
= call_expr_nargs (exp
);
10689 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10690 instead last argument is __builtin_va_arg_pack (). Defer folding
10691 even in that case, until arguments are finalized. */
10692 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
10694 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
10696 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10697 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10698 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10702 if (avoid_folding_inline_builtin (fndecl
))
10705 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10706 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
10707 CALL_EXPR_ARGP (exp
), ignore
);
10710 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
10712 tree
*args
= CALL_EXPR_ARGP (exp
);
10713 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
10716 ret
= fold_builtin_varargs (loc
, fndecl
, exp
, ignore
);
10724 /* Conveniently construct a function call expression. FNDECL names the
10725 function to be called and N arguments are passed in the array
10729 build_call_expr_loc_array (location_t loc
, tree fndecl
, int n
, tree
*argarray
)
10731 tree fntype
= TREE_TYPE (fndecl
);
10732 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10734 return fold_builtin_call_array (loc
, TREE_TYPE (fntype
), fn
, n
, argarray
);
10737 /* Conveniently construct a function call expression. FNDECL names the
10738 function to be called and the arguments are passed in the vector
10742 build_call_expr_loc_vec (location_t loc
, tree fndecl
, VEC(tree
,gc
) *vec
)
10744 return build_call_expr_loc_array (loc
, fndecl
, VEC_length (tree
, vec
),
10745 VEC_address (tree
, vec
));
10749 /* Conveniently construct a function call expression. FNDECL names the
10750 function to be called, N is the number of arguments, and the "..."
10751 parameters are the argument expressions. */
10754 build_call_expr_loc (location_t loc
, tree fndecl
, int n
, ...)
10757 tree
*argarray
= XALLOCAVEC (tree
, n
);
10761 for (i
= 0; i
< n
; i
++)
10762 argarray
[i
] = va_arg (ap
, tree
);
10764 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
10767 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10768 varargs macros aren't supported by all bootstrap compilers. */
10771 build_call_expr (tree fndecl
, int n
, ...)
10774 tree
*argarray
= XALLOCAVEC (tree
, n
);
10778 for (i
= 0; i
< n
; i
++)
10779 argarray
[i
] = va_arg (ap
, tree
);
10781 return build_call_expr_loc_array (UNKNOWN_LOCATION
, fndecl
, n
, argarray
);
10784 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10785 N arguments are passed in the array ARGARRAY. */
10788 fold_builtin_call_array (location_t loc
, tree type
,
10793 tree ret
= NULL_TREE
;
10796 if (TREE_CODE (fn
) == ADDR_EXPR
)
10798 tree fndecl
= TREE_OPERAND (fn
, 0);
10799 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10800 && DECL_BUILT_IN (fndecl
))
10802 /* If last argument is __builtin_va_arg_pack (), arguments to this
10803 function are not finalized yet. Defer folding until they are. */
10804 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
10806 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
10808 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10809 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10810 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10811 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
10813 if (avoid_folding_inline_builtin (fndecl
))
10814 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
10815 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10817 ret
= targetm
.fold_builtin (fndecl
, n
, argarray
, false);
10821 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
10823 else if (n
<= MAX_ARGS_TO_FOLD_BUILTIN
)
10825 /* First try the transformations that don't require consing up
10827 ret
= fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
10832 /* If we got this far, we need to build an exp. */
10833 exp
= build_call_array_loc (loc
, type
, fn
, n
, argarray
);
10834 ret
= fold_builtin_varargs (loc
, fndecl
, exp
, false);
10835 return ret
? ret
: exp
;
10839 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
10842 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10843 along with N new arguments specified as the "..." parameters. SKIP
10844 is the number of arguments in EXP to be omitted. This function is used
10845 to do varargs-to-varargs transformations. */
10848 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
10850 int oldnargs
= call_expr_nargs (exp
);
10851 int nargs
= oldnargs
- skip
+ n
;
10852 tree fntype
= TREE_TYPE (fndecl
);
10853 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10861 buffer
= XALLOCAVEC (tree
, nargs
);
10863 for (i
= 0; i
< n
; i
++)
10864 buffer
[i
] = va_arg (ap
, tree
);
10866 for (j
= skip
; j
< oldnargs
; j
++, i
++)
10867 buffer
[i
] = CALL_EXPR_ARG (exp
, j
);
10870 buffer
= CALL_EXPR_ARGP (exp
) + skip
;
10872 return fold (build_call_array_loc (loc
, TREE_TYPE (exp
), fn
, nargs
, buffer
));
10875 /* Validate a single argument ARG against a tree code CODE representing
10879 validate_arg (const_tree arg
, enum tree_code code
)
10883 else if (code
== POINTER_TYPE
)
10884 return POINTER_TYPE_P (TREE_TYPE (arg
));
10885 else if (code
== INTEGER_TYPE
)
10886 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
10887 return code
== TREE_CODE (TREE_TYPE (arg
));
10890 /* This function validates the types of a function call argument list
10891 against a specified list of tree_codes. If the last specifier is a 0,
10892 that represents an ellipses, otherwise the last specifier must be a
10895 This is the GIMPLE version of validate_arglist. Eventually we want to
10896 completely convert builtins.c to work from GIMPLEs and the tree based
10897 validate_arglist will then be removed. */
10900 validate_gimple_arglist (const_gimple call
, ...)
10902 enum tree_code code
;
10908 va_start (ap
, call
);
10913 code
= (enum tree_code
) va_arg (ap
, int);
10917 /* This signifies an ellipses, any further arguments are all ok. */
10921 /* This signifies an endlink, if no arguments remain, return
10922 true, otherwise return false. */
10923 res
= (i
== gimple_call_num_args (call
));
10926 /* If no parameters remain or the parameter's code does not
10927 match the specified code, return false. Otherwise continue
10928 checking any remaining arguments. */
10929 arg
= gimple_call_arg (call
, i
++);
10930 if (!validate_arg (arg
, code
))
10937 /* We need gotos here since we can only have one VA_CLOSE in a
10945 /* This function validates the types of a function call argument list
10946 against a specified list of tree_codes. If the last specifier is a 0,
10947 that represents an ellipses, otherwise the last specifier must be a
10951 validate_arglist (const_tree callexpr
, ...)
10953 enum tree_code code
;
10956 const_call_expr_arg_iterator iter
;
10959 va_start (ap
, callexpr
);
10960 init_const_call_expr_arg_iterator (callexpr
, &iter
);
10964 code
= (enum tree_code
) va_arg (ap
, int);
10968 /* This signifies an ellipses, any further arguments are all ok. */
10972 /* This signifies an endlink, if no arguments remain, return
10973 true, otherwise return false. */
10974 res
= !more_const_call_expr_args_p (&iter
);
10977 /* If no parameters remain or the parameter's code does not
10978 match the specified code, return false. Otherwise continue
10979 checking any remaining arguments. */
10980 arg
= next_const_call_expr_arg (&iter
);
10981 if (!validate_arg (arg
, code
))
10988 /* We need gotos here since we can only have one VA_CLOSE in a
10996 /* Default target-specific builtin expander that does nothing. */
10999 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
11000 rtx target ATTRIBUTE_UNUSED
,
11001 rtx subtarget ATTRIBUTE_UNUSED
,
11002 enum machine_mode mode ATTRIBUTE_UNUSED
,
11003 int ignore ATTRIBUTE_UNUSED
)
11008 /* Returns true is EXP represents data that would potentially reside
11009 in a readonly section. */
11012 readonly_data_expr (tree exp
)
11016 if (TREE_CODE (exp
) != ADDR_EXPR
)
11019 exp
= get_base_address (TREE_OPERAND (exp
, 0));
11023 /* Make sure we call decl_readonly_section only for trees it
11024 can handle (since it returns true for everything it doesn't
11026 if (TREE_CODE (exp
) == STRING_CST
11027 || TREE_CODE (exp
) == CONSTRUCTOR
11028 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
11029 return decl_readonly_section (exp
, 0);
11034 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11035 to the call, and TYPE is its return type.
11037 Return NULL_TREE if no simplification was possible, otherwise return the
11038 simplified form of the call as a tree.
11040 The simplified form may be a constant or other expression which
11041 computes the same value, but in a more efficient manner (including
11042 calls to other builtin functions).
11044 The call may contain arguments which need to be evaluated, but
11045 which are not useful to determine the result of the call. In
11046 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11047 COMPOUND_EXPR will be an argument which must be evaluated.
11048 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11049 COMPOUND_EXPR in the chain will contain the tree for the simplified
11050 form of the builtin function call. */
11053 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
11055 if (!validate_arg (s1
, POINTER_TYPE
)
11056 || !validate_arg (s2
, POINTER_TYPE
))
11061 const char *p1
, *p2
;
11063 p2
= c_getstr (s2
);
11067 p1
= c_getstr (s1
);
11070 const char *r
= strstr (p1
, p2
);
11074 return build_int_cst (TREE_TYPE (s1
), 0);
11076 /* Return an offset into the constant string argument. */
11077 tem
= fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
11078 s1
, size_int (r
- p1
));
11079 return fold_convert_loc (loc
, type
, tem
);
11082 /* The argument is const char *, and the result is char *, so we need
11083 a type conversion here to avoid a warning. */
11085 return fold_convert_loc (loc
, type
, s1
);
11090 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
11094 /* New argument list transforming strstr(s1, s2) to
11095 strchr(s1, s2[0]). */
11096 return build_call_expr_loc (loc
, fn
, 2, s1
, build_int_cst (NULL_TREE
, p2
[0]));
11100 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11101 the call, and TYPE is its return type.
11103 Return NULL_TREE if no simplification was possible, otherwise return the
11104 simplified form of the call as a tree.
11106 The simplified form may be a constant or other expression which
11107 computes the same value, but in a more efficient manner (including
11108 calls to other builtin functions).
11110 The call may contain arguments which need to be evaluated, but
11111 which are not useful to determine the result of the call. In
11112 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11113 COMPOUND_EXPR will be an argument which must be evaluated.
11114 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11115 COMPOUND_EXPR in the chain will contain the tree for the simplified
11116 form of the builtin function call. */
11119 fold_builtin_strchr (location_t loc
, tree s1
, tree s2
, tree type
)
11121 if (!validate_arg (s1
, POINTER_TYPE
)
11122 || !validate_arg (s2
, INTEGER_TYPE
))
11128 if (TREE_CODE (s2
) != INTEGER_CST
)
11131 p1
= c_getstr (s1
);
11138 if (target_char_cast (s2
, &c
))
11141 r
= strchr (p1
, c
);
11144 return build_int_cst (TREE_TYPE (s1
), 0);
11146 /* Return an offset into the constant string argument. */
11147 tem
= fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
11148 s1
, size_int (r
- p1
));
11149 return fold_convert_loc (loc
, type
, tem
);
11155 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11156 the call, and TYPE is its return type.
11158 Return NULL_TREE if no simplification was possible, otherwise return the
11159 simplified form of the call as a tree.
11161 The simplified form may be a constant or other expression which
11162 computes the same value, but in a more efficient manner (including
11163 calls to other builtin functions).
11165 The call may contain arguments which need to be evaluated, but
11166 which are not useful to determine the result of the call. In
11167 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11168 COMPOUND_EXPR will be an argument which must be evaluated.
11169 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11170 COMPOUND_EXPR in the chain will contain the tree for the simplified
11171 form of the builtin function call. */
11174 fold_builtin_strrchr (location_t loc
, tree s1
, tree s2
, tree type
)
11176 if (!validate_arg (s1
, POINTER_TYPE
)
11177 || !validate_arg (s2
, INTEGER_TYPE
))
11184 if (TREE_CODE (s2
) != INTEGER_CST
)
11187 p1
= c_getstr (s1
);
11194 if (target_char_cast (s2
, &c
))
11197 r
= strrchr (p1
, c
);
11200 return build_int_cst (TREE_TYPE (s1
), 0);
11202 /* Return an offset into the constant string argument. */
11203 tem
= fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
11204 s1
, size_int (r
- p1
));
11205 return fold_convert_loc (loc
, type
, tem
);
11208 if (! integer_zerop (s2
))
11211 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
11215 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11216 return build_call_expr_loc (loc
, fn
, 2, s1
, s2
);
11220 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11221 to the call, and TYPE is its return type.
11223 Return NULL_TREE if no simplification was possible, otherwise return the
11224 simplified form of the call as a tree.
11226 The simplified form may be a constant or other expression which
11227 computes the same value, but in a more efficient manner (including
11228 calls to other builtin functions).
11230 The call may contain arguments which need to be evaluated, but
11231 which are not useful to determine the result of the call. In
11232 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11233 COMPOUND_EXPR will be an argument which must be evaluated.
11234 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11235 COMPOUND_EXPR in the chain will contain the tree for the simplified
11236 form of the builtin function call. */
11239 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
11241 if (!validate_arg (s1
, POINTER_TYPE
)
11242 || !validate_arg (s2
, POINTER_TYPE
))
11247 const char *p1
, *p2
;
11249 p2
= c_getstr (s2
);
11253 p1
= c_getstr (s1
);
11256 const char *r
= strpbrk (p1
, p2
);
11260 return build_int_cst (TREE_TYPE (s1
), 0);
11262 /* Return an offset into the constant string argument. */
11263 tem
= fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
11264 s1
, size_int (r
- p1
));
11265 return fold_convert_loc (loc
, type
, tem
);
11269 /* strpbrk(x, "") == NULL.
11270 Evaluate and ignore s1 in case it had side-effects. */
11271 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
11274 return NULL_TREE
; /* Really call strpbrk. */
11276 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
11280 /* New argument list transforming strpbrk(s1, s2) to
11281 strchr(s1, s2[0]). */
11282 return build_call_expr_loc (loc
, fn
, 2, s1
, build_int_cst (NULL_TREE
, p2
[0]));
11286 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11289 Return NULL_TREE if no simplification was possible, otherwise return the
11290 simplified form of the call as a tree.
11292 The simplified form may be a constant or other expression which
11293 computes the same value, but in a more efficient manner (including
11294 calls to other builtin functions).
11296 The call may contain arguments which need to be evaluated, but
11297 which are not useful to determine the result of the call. In
11298 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11299 COMPOUND_EXPR will be an argument which must be evaluated.
11300 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11301 COMPOUND_EXPR in the chain will contain the tree for the simplified
11302 form of the builtin function call. */
11305 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED
, tree dst
, tree src
)
11307 if (!validate_arg (dst
, POINTER_TYPE
)
11308 || !validate_arg (src
, POINTER_TYPE
))
11312 const char *p
= c_getstr (src
);
11314 /* If the string length is zero, return the dst parameter. */
11315 if (p
&& *p
== '\0')
11318 if (optimize_insn_for_speed_p ())
11320 /* See if we can store by pieces into (dst + strlen(dst)). */
11322 tree strlen_fn
= implicit_built_in_decls
[BUILT_IN_STRLEN
];
11323 tree strcpy_fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
11325 if (!strlen_fn
|| !strcpy_fn
)
11328 /* If we don't have a movstr we don't want to emit an strcpy
11329 call. We have to do that if the length of the source string
11330 isn't computable (in that case we can use memcpy probably
11331 later expanding to a sequence of mov instructions). If we
11332 have movstr instructions we can emit strcpy calls. */
11335 tree len
= c_strlen (src
, 1);
11336 if (! len
|| TREE_SIDE_EFFECTS (len
))
11340 /* Stabilize the argument list. */
11341 dst
= builtin_save_expr (dst
);
11343 /* Create strlen (dst). */
11344 newdst
= build_call_expr_loc (loc
, strlen_fn
, 1, dst
);
11345 /* Create (dst p+ strlen (dst)). */
11347 newdst
= fold_build2_loc (loc
, POINTER_PLUS_EXPR
,
11348 TREE_TYPE (dst
), dst
, newdst
);
11349 newdst
= builtin_save_expr (newdst
);
11351 call
= build_call_expr_loc (loc
, strcpy_fn
, 2, newdst
, src
);
11352 return build2 (COMPOUND_EXPR
, TREE_TYPE (dst
), call
, dst
);
11358 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11359 arguments to the call.
11361 Return NULL_TREE if no simplification was possible, otherwise return the
11362 simplified form of the call as a tree.
11364 The simplified form may be a constant or other expression which
11365 computes the same value, but in a more efficient manner (including
11366 calls to other builtin functions).
11368 The call may contain arguments which need to be evaluated, but
11369 which are not useful to determine the result of the call. In
11370 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11371 COMPOUND_EXPR will be an argument which must be evaluated.
11372 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11373 COMPOUND_EXPR in the chain will contain the tree for the simplified
11374 form of the builtin function call. */
11377 fold_builtin_strncat (location_t loc
, tree dst
, tree src
, tree len
)
11379 if (!validate_arg (dst
, POINTER_TYPE
)
11380 || !validate_arg (src
, POINTER_TYPE
)
11381 || !validate_arg (len
, INTEGER_TYPE
))
11385 const char *p
= c_getstr (src
);
11387 /* If the requested length is zero, or the src parameter string
11388 length is zero, return the dst parameter. */
11389 if (integer_zerop (len
) || (p
&& *p
== '\0'))
11390 return omit_two_operands_loc (loc
, TREE_TYPE (dst
), dst
, src
, len
);
11392 /* If the requested len is greater than or equal to the string
11393 length, call strcat. */
11394 if (TREE_CODE (len
) == INTEGER_CST
&& p
11395 && compare_tree_int (len
, strlen (p
)) >= 0)
11397 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCAT
];
11399 /* If the replacement _DECL isn't initialized, don't do the
11404 return build_call_expr_loc (loc
, fn
, 2, dst
, src
);
11410 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11413 Return NULL_TREE if no simplification was possible, otherwise return the
11414 simplified form of the call as a tree.
11416 The simplified form may be a constant or other expression which
11417 computes the same value, but in a more efficient manner (including
11418 calls to other builtin functions).
11420 The call may contain arguments which need to be evaluated, but
11421 which are not useful to determine the result of the call. In
11422 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11423 COMPOUND_EXPR will be an argument which must be evaluated.
11424 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11425 COMPOUND_EXPR in the chain will contain the tree for the simplified
11426 form of the builtin function call. */
11429 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
11431 if (!validate_arg (s1
, POINTER_TYPE
)
11432 || !validate_arg (s2
, POINTER_TYPE
))
11436 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11438 /* If both arguments are constants, evaluate at compile-time. */
11441 const size_t r
= strspn (p1
, p2
);
11442 return size_int (r
);
11445 /* If either argument is "", return NULL_TREE. */
11446 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
11447 /* Evaluate and ignore both arguments in case either one has
11449 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
11455 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11458 Return NULL_TREE if no simplification was possible, otherwise return the
11459 simplified form of the call as a tree.
11461 The simplified form may be a constant or other expression which
11462 computes the same value, but in a more efficient manner (including
11463 calls to other builtin functions).
11465 The call may contain arguments which need to be evaluated, but
11466 which are not useful to determine the result of the call. In
11467 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11468 COMPOUND_EXPR will be an argument which must be evaluated.
11469 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11470 COMPOUND_EXPR in the chain will contain the tree for the simplified
11471 form of the builtin function call. */
11474 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
11476 if (!validate_arg (s1
, POINTER_TYPE
)
11477 || !validate_arg (s2
, POINTER_TYPE
))
11481 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11483 /* If both arguments are constants, evaluate at compile-time. */
11486 const size_t r
= strcspn (p1
, p2
);
11487 return size_int (r
);
11490 /* If the first argument is "", return NULL_TREE. */
11491 if (p1
&& *p1
== '\0')
11493 /* Evaluate and ignore argument s2 in case it has
11495 return omit_one_operand_loc (loc
, size_type_node
,
11496 size_zero_node
, s2
);
11499 /* If the second argument is "", return __builtin_strlen(s1). */
11500 if (p2
&& *p2
== '\0')
11502 tree fn
= implicit_built_in_decls
[BUILT_IN_STRLEN
];
11504 /* If the replacement _DECL isn't initialized, don't do the
11509 return build_call_expr_loc (loc
, fn
, 1, s1
);
11515 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11516 to the call. IGNORE is true if the value returned
11517 by the builtin will be ignored. UNLOCKED is true is true if this
11518 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11519 the known length of the string. Return NULL_TREE if no simplification
11523 fold_builtin_fputs (location_t loc
, tree arg0
, tree arg1
,
11524 bool ignore
, bool unlocked
, tree len
)
11526 /* If we're using an unlocked function, assume the other unlocked
11527 functions exist explicitly. */
11528 tree
const fn_fputc
= unlocked
? built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
]
11529 : implicit_built_in_decls
[BUILT_IN_FPUTC
];
11530 tree
const fn_fwrite
= unlocked
? built_in_decls
[BUILT_IN_FWRITE_UNLOCKED
]
11531 : implicit_built_in_decls
[BUILT_IN_FWRITE
];
11533 /* If the return value is used, don't do the transformation. */
11537 /* Verify the arguments in the original call. */
11538 if (!validate_arg (arg0
, POINTER_TYPE
)
11539 || !validate_arg (arg1
, POINTER_TYPE
))
11543 len
= c_strlen (arg0
, 0);
11545 /* Get the length of the string passed to fputs. If the length
11546 can't be determined, punt. */
11548 || TREE_CODE (len
) != INTEGER_CST
)
11551 switch (compare_tree_int (len
, 1))
11553 case -1: /* length is 0, delete the call entirely . */
11554 return omit_one_operand_loc (loc
, integer_type_node
,
11555 integer_zero_node
, arg1
);;
11557 case 0: /* length is 1, call fputc. */
11559 const char *p
= c_getstr (arg0
);
11564 return build_call_expr_loc (loc
, fn_fputc
, 2,
11565 build_int_cst (NULL_TREE
, p
[0]), arg1
);
11571 case 1: /* length is greater than 1, call fwrite. */
11573 /* If optimizing for size keep fputs. */
11574 if (optimize_function_for_size_p (cfun
))
11576 /* New argument list transforming fputs(string, stream) to
11577 fwrite(string, 1, len, stream). */
11579 return build_call_expr_loc (loc
, fn_fwrite
, 4, arg0
,
11580 size_one_node
, len
, arg1
);
11585 gcc_unreachable ();
11590 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11591 produced. False otherwise. This is done so that we don't output the error
11592 or warning twice or three times. */
11595 fold_builtin_next_arg (tree exp
, bool va_start_p
)
11597 tree fntype
= TREE_TYPE (current_function_decl
);
11598 int nargs
= call_expr_nargs (exp
);
11601 if (!stdarg_p (fntype
))
11603 error ("%<va_start%> used in function with fixed args");
11609 if (va_start_p
&& (nargs
!= 2))
11611 error ("wrong number of arguments to function %<va_start%>");
11614 arg
= CALL_EXPR_ARG (exp
, 1);
11616 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11617 when we checked the arguments and if needed issued a warning. */
11622 /* Evidently an out of date version of <stdarg.h>; can't validate
11623 va_start's second argument, but can still work as intended. */
11624 warning (0, "%<__builtin_next_arg%> called without an argument");
11627 else if (nargs
> 1)
11629 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11632 arg
= CALL_EXPR_ARG (exp
, 0);
11635 if (TREE_CODE (arg
) == SSA_NAME
)
11636 arg
= SSA_NAME_VAR (arg
);
11638 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11639 or __builtin_next_arg (0) the first time we see it, after checking
11640 the arguments and if needed issuing a warning. */
11641 if (!integer_zerop (arg
))
11643 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
11645 /* Strip off all nops for the sake of the comparison. This
11646 is not quite the same as STRIP_NOPS. It does more.
11647 We must also strip off INDIRECT_EXPR for C++ reference
11649 while (CONVERT_EXPR_P (arg
)
11650 || TREE_CODE (arg
) == INDIRECT_REF
)
11651 arg
= TREE_OPERAND (arg
, 0);
11652 if (arg
!= last_parm
)
11654 /* FIXME: Sometimes with the tree optimizers we can get the
11655 not the last argument even though the user used the last
11656 argument. We just warn and set the arg to be the last
11657 argument so that we will get wrong-code because of
11659 warning (0, "second parameter of %<va_start%> not last named argument");
11662 /* Undefined by C99 7.15.1.4p4 (va_start):
11663 "If the parameter parmN is declared with the register storage
11664 class, with a function or array type, or with a type that is
11665 not compatible with the type that results after application of
11666 the default argument promotions, the behavior is undefined."
11668 else if (DECL_REGISTER (arg
))
11669 warning (0, "undefined behaviour when second parameter of "
11670 "%<va_start%> is declared with %<register%> storage");
11672 /* We want to verify the second parameter just once before the tree
11673 optimizers are run and then avoid keeping it in the tree,
11674 as otherwise we could warn even for correct code like:
11675 void foo (int i, ...)
11676 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11678 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
11680 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
11686 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11687 ORIG may be null if this is a 2-argument call. We don't attempt to
11688 simplify calls with more than 3 arguments.
11690 Return NULL_TREE if no simplification was possible, otherwise return the
11691 simplified form of the call as a tree. If IGNORED is true, it means that
11692 the caller does not use the returned value of the function. */
11695 fold_builtin_sprintf (location_t loc
, tree dest
, tree fmt
,
11696 tree orig
, int ignored
)
11699 const char *fmt_str
= NULL
;
11701 /* Verify the required arguments in the original call. We deal with two
11702 types of sprintf() calls: 'sprintf (str, fmt)' and
11703 'sprintf (dest, "%s", orig)'. */
11704 if (!validate_arg (dest
, POINTER_TYPE
)
11705 || !validate_arg (fmt
, POINTER_TYPE
))
11707 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
11710 /* Check whether the format is a literal string constant. */
11711 fmt_str
= c_getstr (fmt
);
11712 if (fmt_str
== NULL
)
11716 retval
= NULL_TREE
;
11718 if (!init_target_chars ())
11721 /* If the format doesn't contain % args or %%, use strcpy. */
11722 if (strchr (fmt_str
, target_percent
) == NULL
)
11724 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
11729 /* Don't optimize sprintf (buf, "abc", ptr++). */
11733 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11734 'format' is known to contain no % formats. */
11735 call
= build_call_expr_loc (loc
, fn
, 2, dest
, fmt
);
11737 retval
= build_int_cst (NULL_TREE
, strlen (fmt_str
));
11740 /* If the format is "%s", use strcpy if the result isn't used. */
11741 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
11744 fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
11749 /* Don't crash on sprintf (str1, "%s"). */
11753 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11756 retval
= c_strlen (orig
, 1);
11757 if (!retval
|| TREE_CODE (retval
) != INTEGER_CST
)
11760 call
= build_call_expr_loc (loc
, fn
, 2, dest
, orig
);
11763 if (call
&& retval
)
11765 retval
= fold_convert_loc
11766 (loc
, TREE_TYPE (TREE_TYPE (implicit_built_in_decls
[BUILT_IN_SPRINTF
])),
11768 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
11774 /* Expand a call EXP to __builtin_object_size. */
11777 expand_builtin_object_size (tree exp
)
11780 int object_size_type
;
11781 tree fndecl
= get_callee_fndecl (exp
);
11783 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11785 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11787 expand_builtin_trap ();
11791 ost
= CALL_EXPR_ARG (exp
, 1);
11794 if (TREE_CODE (ost
) != INTEGER_CST
11795 || tree_int_cst_sgn (ost
) < 0
11796 || compare_tree_int (ost
, 3) > 0)
11798 error ("%Klast argument of %D is not integer constant between 0 and 3",
11800 expand_builtin_trap ();
11804 object_size_type
= tree_low_cst (ost
, 0);
11806 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
11809 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11810 FCODE is the BUILT_IN_* to use.
11811 Return NULL_RTX if we failed; the caller should emit a normal call,
11812 otherwise try to get the result in TARGET, if convenient (and in
11813 mode MODE if that's convenient). */
11816 expand_builtin_memory_chk (tree exp
, rtx target
, enum machine_mode mode
,
11817 enum built_in_function fcode
)
11819 tree dest
, src
, len
, size
;
11821 if (!validate_arglist (exp
,
11823 fcode
== BUILT_IN_MEMSET_CHK
11824 ? INTEGER_TYPE
: POINTER_TYPE
,
11825 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11828 dest
= CALL_EXPR_ARG (exp
, 0);
11829 src
= CALL_EXPR_ARG (exp
, 1);
11830 len
= CALL_EXPR_ARG (exp
, 2);
11831 size
= CALL_EXPR_ARG (exp
, 3);
11833 if (! host_integerp (size
, 1))
11836 if (host_integerp (len
, 1) || integer_all_onesp (size
))
11840 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
11842 warning_at (tree_nonartificial_location (exp
),
11843 0, "%Kcall to %D will always overflow destination buffer",
11844 exp
, get_callee_fndecl (exp
));
11849 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11850 mem{cpy,pcpy,move,set} is available. */
11853 case BUILT_IN_MEMCPY_CHK
:
11854 fn
= built_in_decls
[BUILT_IN_MEMCPY
];
11856 case BUILT_IN_MEMPCPY_CHK
:
11857 fn
= built_in_decls
[BUILT_IN_MEMPCPY
];
11859 case BUILT_IN_MEMMOVE_CHK
:
11860 fn
= built_in_decls
[BUILT_IN_MEMMOVE
];
11862 case BUILT_IN_MEMSET_CHK
:
11863 fn
= built_in_decls
[BUILT_IN_MEMSET
];
11872 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
11873 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11874 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11875 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11877 else if (fcode
== BUILT_IN_MEMSET_CHK
)
11881 unsigned int dest_align
11882 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
11884 /* If DEST is not a pointer type, call the normal function. */
11885 if (dest_align
== 0)
11888 /* If SRC and DEST are the same (and not volatile), do nothing. */
11889 if (operand_equal_p (src
, dest
, 0))
11893 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11895 /* Evaluate and ignore LEN in case it has side-effects. */
11896 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
11897 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
11900 expr
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
11901 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
11904 /* __memmove_chk special case. */
11905 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
11907 unsigned int src_align
11908 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
11910 if (src_align
== 0)
11913 /* If src is categorized for a readonly section we can use
11914 normal __memcpy_chk. */
11915 if (readonly_data_expr (src
))
11917 tree fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
11920 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
11921 dest
, src
, len
, size
);
11922 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11923 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11924 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11931 /* Emit warning if a buffer overflow is detected at compile time. */
11934 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
11938 location_t loc
= tree_nonartificial_location (exp
);
11942 case BUILT_IN_STRCPY_CHK
:
11943 case BUILT_IN_STPCPY_CHK
:
11944 /* For __strcat_chk the warning will be emitted only if overflowing
11945 by at least strlen (dest) + 1 bytes. */
11946 case BUILT_IN_STRCAT_CHK
:
11947 len
= CALL_EXPR_ARG (exp
, 1);
11948 size
= CALL_EXPR_ARG (exp
, 2);
11951 case BUILT_IN_STRNCAT_CHK
:
11952 case BUILT_IN_STRNCPY_CHK
:
11953 len
= CALL_EXPR_ARG (exp
, 2);
11954 size
= CALL_EXPR_ARG (exp
, 3);
11956 case BUILT_IN_SNPRINTF_CHK
:
11957 case BUILT_IN_VSNPRINTF_CHK
:
11958 len
= CALL_EXPR_ARG (exp
, 1);
11959 size
= CALL_EXPR_ARG (exp
, 3);
11962 gcc_unreachable ();
11968 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
11973 len
= c_strlen (len
, 1);
11974 if (! len
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
11977 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
11979 tree src
= CALL_EXPR_ARG (exp
, 1);
11980 if (! src
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
11982 src
= c_strlen (src
, 1);
11983 if (! src
|| ! host_integerp (src
, 1))
11985 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
11986 exp
, get_callee_fndecl (exp
));
11989 else if (tree_int_cst_lt (src
, size
))
11992 else if (! host_integerp (len
, 1) || ! tree_int_cst_lt (size
, len
))
11995 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
11996 exp
, get_callee_fndecl (exp
));
11999 /* Emit warning if a buffer overflow is detected at compile time
12000 in __sprintf_chk/__vsprintf_chk calls. */
12003 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
12005 tree size
, len
, fmt
;
12006 const char *fmt_str
;
12007 int nargs
= call_expr_nargs (exp
);
12009 /* Verify the required arguments in the original call. */
12013 size
= CALL_EXPR_ARG (exp
, 2);
12014 fmt
= CALL_EXPR_ARG (exp
, 3);
12016 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
12019 /* Check whether the format is a literal string constant. */
12020 fmt_str
= c_getstr (fmt
);
12021 if (fmt_str
== NULL
)
12024 if (!init_target_chars ())
12027 /* If the format doesn't contain % args or %%, we know its size. */
12028 if (strchr (fmt_str
, target_percent
) == 0)
12029 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
12030 /* If the format is "%s" and first ... argument is a string literal,
12032 else if (fcode
== BUILT_IN_SPRINTF_CHK
12033 && strcmp (fmt_str
, target_percent_s
) == 0)
12039 arg
= CALL_EXPR_ARG (exp
, 4);
12040 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
12043 len
= c_strlen (arg
, 1);
12044 if (!len
|| ! host_integerp (len
, 1))
12050 if (! tree_int_cst_lt (len
, size
))
12051 warning_at (tree_nonartificial_location (exp
),
12052 0, "%Kcall to %D will always overflow destination buffer",
12053 exp
, get_callee_fndecl (exp
));
12056 /* Emit warning if a free is called with address of a variable. */
12059 maybe_emit_free_warning (tree exp
)
12061 tree arg
= CALL_EXPR_ARG (exp
, 0);
12064 if (TREE_CODE (arg
) != ADDR_EXPR
)
12067 arg
= get_base_address (TREE_OPERAND (arg
, 0));
12068 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
12071 if (SSA_VAR_P (arg
))
12072 warning_at (tree_nonartificial_location (exp
),
12073 0, "%Kattempt to free a non-heap object %qD", exp
, arg
);
12075 warning_at (tree_nonartificial_location (exp
),
12076 0, "%Kattempt to free a non-heap object", exp
);
12079 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12083 fold_builtin_object_size (tree ptr
, tree ost
)
12085 unsigned HOST_WIDE_INT bytes
;
12086 int object_size_type
;
12088 if (!validate_arg (ptr
, POINTER_TYPE
)
12089 || !validate_arg (ost
, INTEGER_TYPE
))
12094 if (TREE_CODE (ost
) != INTEGER_CST
12095 || tree_int_cst_sgn (ost
) < 0
12096 || compare_tree_int (ost
, 3) > 0)
12099 object_size_type
= tree_low_cst (ost
, 0);
12101 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12102 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12103 and (size_t) 0 for types 2 and 3. */
12104 if (TREE_SIDE_EFFECTS (ptr
))
12105 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
12107 if (TREE_CODE (ptr
) == ADDR_EXPR
)
12109 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
12110 if (double_int_fits_to_tree_p (size_type_node
,
12111 uhwi_to_double_int (bytes
)))
12112 return build_int_cstu (size_type_node
, bytes
);
12114 else if (TREE_CODE (ptr
) == SSA_NAME
)
12116 /* If object size is not known yet, delay folding until
12117 later. Maybe subsequent passes will help determining
12119 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
12120 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2 ? -1 : 0)
12121 && double_int_fits_to_tree_p (size_type_node
,
12122 uhwi_to_double_int (bytes
)))
12123 return build_int_cstu (size_type_node
, bytes
);
12129 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12130 DEST, SRC, LEN, and SIZE are the arguments to the call.
12131 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12132 code of the builtin. If MAXLEN is not NULL, it is maximum length
12133 passed as third argument. */
12136 fold_builtin_memory_chk (location_t loc
, tree fndecl
,
12137 tree dest
, tree src
, tree len
, tree size
,
12138 tree maxlen
, bool ignore
,
12139 enum built_in_function fcode
)
12143 if (!validate_arg (dest
, POINTER_TYPE
)
12144 || !validate_arg (src
,
12145 (fcode
== BUILT_IN_MEMSET_CHK
12146 ? INTEGER_TYPE
: POINTER_TYPE
))
12147 || !validate_arg (len
, INTEGER_TYPE
)
12148 || !validate_arg (size
, INTEGER_TYPE
))
12151 /* If SRC and DEST are the same (and not volatile), return DEST
12152 (resp. DEST+LEN for __mempcpy_chk). */
12153 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
12155 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
12156 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
12160 tree temp
= fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (dest
),
12162 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), temp
);
12166 if (! host_integerp (size
, 1))
12169 if (! integer_all_onesp (size
))
12171 if (! host_integerp (len
, 1))
12173 /* If LEN is not constant, try MAXLEN too.
12174 For MAXLEN only allow optimizing into non-_ocs function
12175 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12176 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12178 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
12180 /* (void) __mempcpy_chk () can be optimized into
12181 (void) __memcpy_chk (). */
12182 fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
12186 return build_call_expr_loc (loc
, fn
, 4, dest
, src
, len
, size
);
12194 if (tree_int_cst_lt (size
, maxlen
))
12199 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12200 mem{cpy,pcpy,move,set} is available. */
12203 case BUILT_IN_MEMCPY_CHK
:
12204 fn
= built_in_decls
[BUILT_IN_MEMCPY
];
12206 case BUILT_IN_MEMPCPY_CHK
:
12207 fn
= built_in_decls
[BUILT_IN_MEMPCPY
];
12209 case BUILT_IN_MEMMOVE_CHK
:
12210 fn
= built_in_decls
[BUILT_IN_MEMMOVE
];
12212 case BUILT_IN_MEMSET_CHK
:
12213 fn
= built_in_decls
[BUILT_IN_MEMSET
];
12222 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
12225 /* Fold a call to the __st[rp]cpy_chk builtin.
12226 DEST, SRC, and SIZE are the arguments to the call.
12227 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12228 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12229 strings passed as second argument. */
12232 fold_builtin_stxcpy_chk (location_t loc
, tree fndecl
, tree dest
,
12233 tree src
, tree size
,
12234 tree maxlen
, bool ignore
,
12235 enum built_in_function fcode
)
12239 if (!validate_arg (dest
, POINTER_TYPE
)
12240 || !validate_arg (src
, POINTER_TYPE
)
12241 || !validate_arg (size
, INTEGER_TYPE
))
12244 /* If SRC and DEST are the same (and not volatile), return DEST. */
12245 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
12246 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
12248 if (! host_integerp (size
, 1))
12251 if (! integer_all_onesp (size
))
12253 len
= c_strlen (src
, 1);
12254 if (! len
|| ! host_integerp (len
, 1))
12256 /* If LEN is not constant, try MAXLEN too.
12257 For MAXLEN only allow optimizing into non-_ocs function
12258 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12259 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12261 if (fcode
== BUILT_IN_STPCPY_CHK
)
12266 /* If return value of __stpcpy_chk is ignored,
12267 optimize into __strcpy_chk. */
12268 fn
= built_in_decls
[BUILT_IN_STRCPY_CHK
];
12272 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, size
);
12275 if (! len
|| TREE_SIDE_EFFECTS (len
))
12278 /* If c_strlen returned something, but not a constant,
12279 transform __strcpy_chk into __memcpy_chk. */
12280 fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
12284 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
12285 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
12286 build_call_expr_loc (loc
, fn
, 4,
12287 dest
, src
, len
, size
));
12293 if (! tree_int_cst_lt (maxlen
, size
))
12297 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12298 fn
= built_in_decls
[fcode
== BUILT_IN_STPCPY_CHK
12299 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
];
12303 return build_call_expr_loc (loc
, fn
, 2, dest
, src
);
12306 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12307 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12308 length passed as third argument. */
12311 fold_builtin_strncpy_chk (location_t loc
, tree dest
, tree src
,
12312 tree len
, tree size
, tree maxlen
)
12316 if (!validate_arg (dest
, POINTER_TYPE
)
12317 || !validate_arg (src
, POINTER_TYPE
)
12318 || !validate_arg (len
, INTEGER_TYPE
)
12319 || !validate_arg (size
, INTEGER_TYPE
))
12322 if (! host_integerp (size
, 1))
12325 if (! integer_all_onesp (size
))
12327 if (! host_integerp (len
, 1))
12329 /* If LEN is not constant, try MAXLEN too.
12330 For MAXLEN only allow optimizing into non-_ocs function
12331 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12332 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12338 if (tree_int_cst_lt (size
, maxlen
))
12342 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12343 fn
= built_in_decls
[BUILT_IN_STRNCPY
];
12347 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
12350 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12351 are the arguments to the call. */
12354 fold_builtin_strcat_chk (location_t loc
, tree fndecl
, tree dest
,
12355 tree src
, tree size
)
12360 if (!validate_arg (dest
, POINTER_TYPE
)
12361 || !validate_arg (src
, POINTER_TYPE
)
12362 || !validate_arg (size
, INTEGER_TYPE
))
12365 p
= c_getstr (src
);
12366 /* If the SRC parameter is "", return DEST. */
12367 if (p
&& *p
== '\0')
12368 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
12370 if (! host_integerp (size
, 1) || ! integer_all_onesp (size
))
12373 /* If __builtin_strcat_chk is used, assume strcat is available. */
12374 fn
= built_in_decls
[BUILT_IN_STRCAT
];
12378 return build_call_expr_loc (loc
, fn
, 2, dest
, src
);
12381 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12385 fold_builtin_strncat_chk (location_t loc
, tree fndecl
,
12386 tree dest
, tree src
, tree len
, tree size
)
12391 if (!validate_arg (dest
, POINTER_TYPE
)
12392 || !validate_arg (src
, POINTER_TYPE
)
12393 || !validate_arg (size
, INTEGER_TYPE
)
12394 || !validate_arg (size
, INTEGER_TYPE
))
12397 p
= c_getstr (src
);
12398 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12399 if (p
&& *p
== '\0')
12400 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
12401 else if (integer_zerop (len
))
12402 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
12404 if (! host_integerp (size
, 1))
12407 if (! integer_all_onesp (size
))
12409 tree src_len
= c_strlen (src
, 1);
12411 && host_integerp (src_len
, 1)
12412 && host_integerp (len
, 1)
12413 && ! tree_int_cst_lt (len
, src_len
))
12415 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12416 fn
= built_in_decls
[BUILT_IN_STRCAT_CHK
];
12420 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, size
);
12425 /* If __builtin_strncat_chk is used, assume strncat is available. */
12426 fn
= built_in_decls
[BUILT_IN_STRNCAT
];
12430 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
12433 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12434 a normal call should be emitted rather than expanding the function
12435 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12438 fold_builtin_sprintf_chk (location_t loc
, tree exp
,
12439 enum built_in_function fcode
)
12441 tree dest
, size
, len
, fn
, fmt
, flag
;
12442 const char *fmt_str
;
12443 int nargs
= call_expr_nargs (exp
);
12445 /* Verify the required arguments in the original call. */
12448 dest
= CALL_EXPR_ARG (exp
, 0);
12449 if (!validate_arg (dest
, POINTER_TYPE
))
12451 flag
= CALL_EXPR_ARG (exp
, 1);
12452 if (!validate_arg (flag
, INTEGER_TYPE
))
12454 size
= CALL_EXPR_ARG (exp
, 2);
12455 if (!validate_arg (size
, INTEGER_TYPE
))
12457 fmt
= CALL_EXPR_ARG (exp
, 3);
12458 if (!validate_arg (fmt
, POINTER_TYPE
))
12461 if (! host_integerp (size
, 1))
12466 if (!init_target_chars ())
12469 /* Check whether the format is a literal string constant. */
12470 fmt_str
= c_getstr (fmt
);
12471 if (fmt_str
!= NULL
)
12473 /* If the format doesn't contain % args or %%, we know the size. */
12474 if (strchr (fmt_str
, target_percent
) == 0)
12476 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
12477 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
12479 /* If the format is "%s" and first ... argument is a string literal,
12480 we know the size too. */
12481 else if (fcode
== BUILT_IN_SPRINTF_CHK
12482 && strcmp (fmt_str
, target_percent_s
) == 0)
12488 arg
= CALL_EXPR_ARG (exp
, 4);
12489 if (validate_arg (arg
, POINTER_TYPE
))
12491 len
= c_strlen (arg
, 1);
12492 if (! len
|| ! host_integerp (len
, 1))
12499 if (! integer_all_onesp (size
))
12501 if (! len
|| ! tree_int_cst_lt (len
, size
))
12505 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12506 or if format doesn't contain % chars or is "%s". */
12507 if (! integer_zerop (flag
))
12509 if (fmt_str
== NULL
)
12511 if (strchr (fmt_str
, target_percent
) != NULL
12512 && strcmp (fmt_str
, target_percent_s
))
12516 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12517 fn
= built_in_decls
[fcode
== BUILT_IN_VSPRINTF_CHK
12518 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
];
12522 return rewrite_call_expr (loc
, exp
, 4, fn
, 2, dest
, fmt
);
12525 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12526 a normal call should be emitted rather than expanding the function
12527 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12528 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12529 passed as second argument. */
12532 fold_builtin_snprintf_chk (location_t loc
, tree exp
, tree maxlen
,
12533 enum built_in_function fcode
)
12535 tree dest
, size
, len
, fn
, fmt
, flag
;
12536 const char *fmt_str
;
12538 /* Verify the required arguments in the original call. */
12539 if (call_expr_nargs (exp
) < 5)
12541 dest
= CALL_EXPR_ARG (exp
, 0);
12542 if (!validate_arg (dest
, POINTER_TYPE
))
12544 len
= CALL_EXPR_ARG (exp
, 1);
12545 if (!validate_arg (len
, INTEGER_TYPE
))
12547 flag
= CALL_EXPR_ARG (exp
, 2);
12548 if (!validate_arg (flag
, INTEGER_TYPE
))
12550 size
= CALL_EXPR_ARG (exp
, 3);
12551 if (!validate_arg (size
, INTEGER_TYPE
))
12553 fmt
= CALL_EXPR_ARG (exp
, 4);
12554 if (!validate_arg (fmt
, POINTER_TYPE
))
12557 if (! host_integerp (size
, 1))
12560 if (! integer_all_onesp (size
))
12562 if (! host_integerp (len
, 1))
12564 /* If LEN is not constant, try MAXLEN too.
12565 For MAXLEN only allow optimizing into non-_ocs function
12566 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12567 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12573 if (tree_int_cst_lt (size
, maxlen
))
12577 if (!init_target_chars ())
12580 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12581 or if format doesn't contain % chars or is "%s". */
12582 if (! integer_zerop (flag
))
12584 fmt_str
= c_getstr (fmt
);
12585 if (fmt_str
== NULL
)
12587 if (strchr (fmt_str
, target_percent
) != NULL
12588 && strcmp (fmt_str
, target_percent_s
))
12592 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12594 fn
= built_in_decls
[fcode
== BUILT_IN_VSNPRINTF_CHK
12595 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
];
12599 return rewrite_call_expr (loc
, exp
, 5, fn
, 3, dest
, len
, fmt
);
12602 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12603 FMT and ARG are the arguments to the call; we don't fold cases with
12604 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12606 Return NULL_TREE if no simplification was possible, otherwise return the
12607 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12608 code of the function to be simplified. */
12611 fold_builtin_printf (location_t loc
, tree fndecl
, tree fmt
,
12612 tree arg
, bool ignore
,
12613 enum built_in_function fcode
)
12615 tree fn_putchar
, fn_puts
, newarg
, call
= NULL_TREE
;
12616 const char *fmt_str
= NULL
;
12618 /* If the return value is used, don't do the transformation. */
12622 /* Verify the required arguments in the original call. */
12623 if (!validate_arg (fmt
, POINTER_TYPE
))
12626 /* Check whether the format is a literal string constant. */
12627 fmt_str
= c_getstr (fmt
);
12628 if (fmt_str
== NULL
)
12631 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
12633 /* If we're using an unlocked function, assume the other
12634 unlocked functions exist explicitly. */
12635 fn_putchar
= built_in_decls
[BUILT_IN_PUTCHAR_UNLOCKED
];
12636 fn_puts
= built_in_decls
[BUILT_IN_PUTS_UNLOCKED
];
12640 fn_putchar
= implicit_built_in_decls
[BUILT_IN_PUTCHAR
];
12641 fn_puts
= implicit_built_in_decls
[BUILT_IN_PUTS
];
12644 if (!init_target_chars ())
12647 if (strcmp (fmt_str
, target_percent_s
) == 0
12648 || strchr (fmt_str
, target_percent
) == NULL
)
12652 if (strcmp (fmt_str
, target_percent_s
) == 0)
12654 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
12657 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12660 str
= c_getstr (arg
);
12666 /* The format specifier doesn't contain any '%' characters. */
12667 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
12673 /* If the string was "", printf does nothing. */
12674 if (str
[0] == '\0')
12675 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
12677 /* If the string has length of 1, call putchar. */
12678 if (str
[1] == '\0')
12680 /* Given printf("c"), (where c is any one character,)
12681 convert "c"[0] to an int and pass that to the replacement
12683 newarg
= build_int_cst (NULL_TREE
, str
[0]);
12685 call
= build_call_expr_loc (loc
, fn_putchar
, 1, newarg
);
12689 /* If the string was "string\n", call puts("string"). */
12690 size_t len
= strlen (str
);
12691 if ((unsigned char)str
[len
- 1] == target_newline
)
12693 /* Create a NUL-terminated string that's one char shorter
12694 than the original, stripping off the trailing '\n'. */
12695 char *newstr
= XALLOCAVEC (char, len
);
12696 memcpy (newstr
, str
, len
- 1);
12697 newstr
[len
- 1] = 0;
12699 newarg
= build_string_literal (len
, newstr
);
12701 call
= build_call_expr_loc (loc
, fn_puts
, 1, newarg
);
12704 /* We'd like to arrange to call fputs(string,stdout) here,
12705 but we need stdout and don't have a way to get it yet. */
12710 /* The other optimizations can be done only on the non-va_list variants. */
12711 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
12714 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12715 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
12717 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12720 call
= build_call_expr_loc (loc
, fn_puts
, 1, arg
);
12723 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12724 else if (strcmp (fmt_str
, target_percent_c
) == 0)
12726 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
12729 call
= build_call_expr_loc (loc
, fn_putchar
, 1, arg
);
12735 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), call
);
12738 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12739 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12740 more than 3 arguments, and ARG may be null in the 2-argument case.
12742 Return NULL_TREE if no simplification was possible, otherwise return the
12743 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12744 code of the function to be simplified. */
12747 fold_builtin_fprintf (location_t loc
, tree fndecl
, tree fp
,
12748 tree fmt
, tree arg
, bool ignore
,
12749 enum built_in_function fcode
)
12751 tree fn_fputc
, fn_fputs
, call
= NULL_TREE
;
12752 const char *fmt_str
= NULL
;
12754 /* If the return value is used, don't do the transformation. */
12758 /* Verify the required arguments in the original call. */
12759 if (!validate_arg (fp
, POINTER_TYPE
))
12761 if (!validate_arg (fmt
, POINTER_TYPE
))
12764 /* Check whether the format is a literal string constant. */
12765 fmt_str
= c_getstr (fmt
);
12766 if (fmt_str
== NULL
)
12769 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
12771 /* If we're using an unlocked function, assume the other
12772 unlocked functions exist explicitly. */
12773 fn_fputc
= built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
];
12774 fn_fputs
= built_in_decls
[BUILT_IN_FPUTS_UNLOCKED
];
12778 fn_fputc
= implicit_built_in_decls
[BUILT_IN_FPUTC
];
12779 fn_fputs
= implicit_built_in_decls
[BUILT_IN_FPUTS
];
12782 if (!init_target_chars ())
12785 /* If the format doesn't contain % args or %%, use strcpy. */
12786 if (strchr (fmt_str
, target_percent
) == NULL
)
12788 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
12792 /* If the format specifier was "", fprintf does nothing. */
12793 if (fmt_str
[0] == '\0')
12795 /* If FP has side-effects, just wait until gimplification is
12797 if (TREE_SIDE_EFFECTS (fp
))
12800 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
12803 /* When "string" doesn't contain %, replace all cases of
12804 fprintf (fp, string) with fputs (string, fp). The fputs
12805 builtin will take care of special cases like length == 1. */
12807 call
= build_call_expr_loc (loc
, fn_fputs
, 2, fmt
, fp
);
12810 /* The other optimizations can be done only on the non-va_list variants. */
12811 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
12814 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12815 else if (strcmp (fmt_str
, target_percent_s
) == 0)
12817 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12820 call
= build_call_expr_loc (loc
, fn_fputs
, 2, arg
, fp
);
12823 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12824 else if (strcmp (fmt_str
, target_percent_c
) == 0)
12826 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
12829 call
= build_call_expr_loc (loc
, fn_fputc
, 2, arg
, fp
);
12834 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), call
);
12837 /* Initialize format string characters in the target charset. */
12840 init_target_chars (void)
12845 target_newline
= lang_hooks
.to_target_charset ('\n');
12846 target_percent
= lang_hooks
.to_target_charset ('%');
12847 target_c
= lang_hooks
.to_target_charset ('c');
12848 target_s
= lang_hooks
.to_target_charset ('s');
12849 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
12853 target_percent_c
[0] = target_percent
;
12854 target_percent_c
[1] = target_c
;
12855 target_percent_c
[2] = '\0';
12857 target_percent_s
[0] = target_percent
;
12858 target_percent_s
[1] = target_s
;
12859 target_percent_s
[2] = '\0';
12861 target_percent_s_newline
[0] = target_percent
;
12862 target_percent_s_newline
[1] = target_s
;
12863 target_percent_s_newline
[2] = target_newline
;
12864 target_percent_s_newline
[3] = '\0';
12871 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12872 and no overflow/underflow occurred. INEXACT is true if M was not
12873 exactly calculated. TYPE is the tree type for the result. This
12874 function assumes that you cleared the MPFR flags and then
12875 calculated M to see if anything subsequently set a flag prior to
12876 entering this function. Return NULL_TREE if any checks fail. */
12879 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
12881 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12882 overflow/underflow occurred. If -frounding-math, proceed iff the
12883 result of calling FUNC was exact. */
12884 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12885 && (!flag_rounding_math
|| !inexact
))
12887 REAL_VALUE_TYPE rr
;
12889 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
12890 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12891 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12892 but the mpft_t is not, then we underflowed in the
12894 if (real_isfinite (&rr
)
12895 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
12897 REAL_VALUE_TYPE rmode
;
12899 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
12900 /* Proceed iff the specified mode can hold the value. */
12901 if (real_identical (&rmode
, &rr
))
12902 return build_real (type
, rmode
);
12908 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12909 number and no overflow/underflow occurred. INEXACT is true if M
12910 was not exactly calculated. TYPE is the tree type for the result.
12911 This function assumes that you cleared the MPFR flags and then
12912 calculated M to see if anything subsequently set a flag prior to
12913 entering this function. Return NULL_TREE if any checks fail, if
12914 FORCE_CONVERT is true, then bypass the checks. */
12917 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
12919 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12920 overflow/underflow occurred. If -frounding-math, proceed iff the
12921 result of calling FUNC was exact. */
12923 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
12924 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12925 && (!flag_rounding_math
|| !inexact
)))
12927 REAL_VALUE_TYPE re
, im
;
12929 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
12930 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
12931 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12932 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12933 but the mpft_t is not, then we underflowed in the
12936 || (real_isfinite (&re
) && real_isfinite (&im
)
12937 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
12938 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
12940 REAL_VALUE_TYPE re_mode
, im_mode
;
12942 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
12943 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
12944 /* Proceed iff the specified mode can hold the value. */
12946 || (real_identical (&re_mode
, &re
)
12947 && real_identical (&im_mode
, &im
)))
12948 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
12949 build_real (TREE_TYPE (type
), im_mode
));
12955 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12956 FUNC on it and return the resulting value as a tree with type TYPE.
12957 If MIN and/or MAX are not NULL, then the supplied ARG must be
12958 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12959 acceptable values, otherwise they are not. The mpfr precision is
12960 set to the precision of TYPE. We assume that function FUNC returns
12961 zero if the result could be calculated exactly within the requested
12965 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
12966 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
12969 tree result
= NULL_TREE
;
12973 /* To proceed, MPFR must exactly represent the target floating point
12974 format, which only happens when the target base equals two. */
12975 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12976 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
12978 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
12980 if (real_isfinite (ra
)
12981 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
12982 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
12984 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12985 const int prec
= fmt
->p
;
12986 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12990 mpfr_init2 (m
, prec
);
12991 mpfr_from_real (m
, ra
, GMP_RNDN
);
12992 mpfr_clear_flags ();
12993 inexact
= func (m
, m
, rnd
);
12994 result
= do_mpfr_ckconv (m
, type
, inexact
);
13002 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13003 FUNC on it and return the resulting value as a tree with type TYPE.
13004 The mpfr precision is set to the precision of TYPE. We assume that
13005 function FUNC returns zero if the result could be calculated
13006 exactly within the requested precision. */
13009 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
13010 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
13012 tree result
= NULL_TREE
;
13017 /* To proceed, MPFR must exactly represent the target floating point
13018 format, which only happens when the target base equals two. */
13019 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13020 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
13021 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
13023 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
13024 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
13026 if (real_isfinite (ra1
) && real_isfinite (ra2
))
13028 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13029 const int prec
= fmt
->p
;
13030 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13034 mpfr_inits2 (prec
, m1
, m2
, NULL
);
13035 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13036 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
13037 mpfr_clear_flags ();
13038 inexact
= func (m1
, m1
, m2
, rnd
);
13039 result
= do_mpfr_ckconv (m1
, type
, inexact
);
13040 mpfr_clears (m1
, m2
, NULL
);
13047 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13048 FUNC on it and return the resulting value as a tree with type TYPE.
13049 The mpfr precision is set to the precision of TYPE. We assume that
13050 function FUNC returns zero if the result could be calculated
13051 exactly within the requested precision. */
13054 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
13055 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
13057 tree result
= NULL_TREE
;
13063 /* To proceed, MPFR must exactly represent the target floating point
13064 format, which only happens when the target base equals two. */
13065 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13066 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
13067 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
13068 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
13070 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
13071 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
13072 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
13074 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
13076 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13077 const int prec
= fmt
->p
;
13078 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13082 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
13083 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13084 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
13085 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
13086 mpfr_clear_flags ();
13087 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
13088 result
= do_mpfr_ckconv (m1
, type
, inexact
);
13089 mpfr_clears (m1
, m2
, m3
, NULL
);
13096 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13097 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13098 If ARG_SINP and ARG_COSP are NULL then the result is returned
13099 as a complex value.
13100 The type is taken from the type of ARG and is used for setting the
13101 precision of the calculation and results. */
13104 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
13106 tree
const type
= TREE_TYPE (arg
);
13107 tree result
= NULL_TREE
;
13111 /* To proceed, MPFR must exactly represent the target floating point
13112 format, which only happens when the target base equals two. */
13113 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13114 && TREE_CODE (arg
) == REAL_CST
13115 && !TREE_OVERFLOW (arg
))
13117 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
13119 if (real_isfinite (ra
))
13121 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13122 const int prec
= fmt
->p
;
13123 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13124 tree result_s
, result_c
;
13128 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
13129 mpfr_from_real (m
, ra
, GMP_RNDN
);
13130 mpfr_clear_flags ();
13131 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
13132 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
13133 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
13134 mpfr_clears (m
, ms
, mc
, NULL
);
13135 if (result_s
&& result_c
)
13137 /* If we are to return in a complex value do so. */
13138 if (!arg_sinp
&& !arg_cosp
)
13139 return build_complex (build_complex_type (type
),
13140 result_c
, result_s
);
13142 /* Dereference the sin/cos pointer arguments. */
13143 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
13144 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
13145 /* Proceed if valid pointer type were passed in. */
13146 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
13147 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
13149 /* Set the values. */
13150 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
13152 TREE_SIDE_EFFECTS (result_s
) = 1;
13153 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
13155 TREE_SIDE_EFFECTS (result_c
) = 1;
13156 /* Combine the assignments into a compound expr. */
13157 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13158 result_s
, result_c
));
13166 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13167 two-argument mpfr order N Bessel function FUNC on them and return
13168 the resulting value as a tree with type TYPE. The mpfr precision
13169 is set to the precision of TYPE. We assume that function FUNC
13170 returns zero if the result could be calculated exactly within the
13171 requested precision. */
13173 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
13174 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
13175 const REAL_VALUE_TYPE
*min
, bool inclusive
)
13177 tree result
= NULL_TREE
;
13182 /* To proceed, MPFR must exactly represent the target floating point
13183 format, which only happens when the target base equals two. */
13184 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13185 && host_integerp (arg1
, 0)
13186 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
13188 const HOST_WIDE_INT n
= tree_low_cst(arg1
, 0);
13189 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
13192 && real_isfinite (ra
)
13193 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
13195 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13196 const int prec
= fmt
->p
;
13197 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13201 mpfr_init2 (m
, prec
);
13202 mpfr_from_real (m
, ra
, GMP_RNDN
);
13203 mpfr_clear_flags ();
13204 inexact
= func (m
, n
, m
, rnd
);
13205 result
= do_mpfr_ckconv (m
, type
, inexact
);
13213 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13214 the pointer *(ARG_QUO) and return the result. The type is taken
13215 from the type of ARG0 and is used for setting the precision of the
13216 calculation and results. */
13219 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
13221 tree
const type
= TREE_TYPE (arg0
);
13222 tree result
= NULL_TREE
;
13227 /* To proceed, MPFR must exactly represent the target floating point
13228 format, which only happens when the target base equals two. */
13229 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13230 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
13231 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
13233 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
13234 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
13236 if (real_isfinite (ra0
) && real_isfinite (ra1
))
13238 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13239 const int prec
= fmt
->p
;
13240 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13245 mpfr_inits2 (prec
, m0
, m1
, NULL
);
13246 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
13247 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13248 mpfr_clear_flags ();
13249 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
13250 /* Remquo is independent of the rounding mode, so pass
13251 inexact=0 to do_mpfr_ckconv(). */
13252 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
13253 mpfr_clears (m0
, m1
, NULL
);
13256 /* MPFR calculates quo in the host's long so it may
13257 return more bits in quo than the target int can hold
13258 if sizeof(host long) > sizeof(target int). This can
13259 happen even for native compilers in LP64 mode. In
13260 these cases, modulo the quo value with the largest
13261 number that the target int can hold while leaving one
13262 bit for the sign. */
13263 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
13264 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
13266 /* Dereference the quo pointer argument. */
13267 arg_quo
= build_fold_indirect_ref (arg_quo
);
13268 /* Proceed iff a valid pointer type was passed in. */
13269 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
13271 /* Set the value. */
13272 tree result_quo
= fold_build2 (MODIFY_EXPR
,
13273 TREE_TYPE (arg_quo
), arg_quo
,
13274 build_int_cst (NULL
, integer_quo
));
13275 TREE_SIDE_EFFECTS (result_quo
) = 1;
13276 /* Combine the quo assignment with the rem. */
13277 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13278 result_quo
, result_rem
));
13286 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13287 resulting value as a tree with type TYPE. The mpfr precision is
13288 set to the precision of TYPE. We assume that this mpfr function
13289 returns zero if the result could be calculated exactly within the
13290 requested precision. In addition, the integer pointer represented
13291 by ARG_SG will be dereferenced and set to the appropriate signgam
13295 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
13297 tree result
= NULL_TREE
;
13301 /* To proceed, MPFR must exactly represent the target floating point
13302 format, which only happens when the target base equals two. Also
13303 verify ARG is a constant and that ARG_SG is an int pointer. */
13304 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13305 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
13306 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
13307 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
13309 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
13311 /* In addition to NaN and Inf, the argument cannot be zero or a
13312 negative integer. */
13313 if (real_isfinite (ra
)
13314 && ra
->cl
!= rvc_zero
13315 && !(real_isneg(ra
) && real_isinteger(ra
, TYPE_MODE (type
))))
13317 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13318 const int prec
= fmt
->p
;
13319 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13324 mpfr_init2 (m
, prec
);
13325 mpfr_from_real (m
, ra
, GMP_RNDN
);
13326 mpfr_clear_flags ();
13327 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
13328 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
13334 /* Dereference the arg_sg pointer argument. */
13335 arg_sg
= build_fold_indirect_ref (arg_sg
);
13336 /* Assign the signgam value into *arg_sg. */
13337 result_sg
= fold_build2 (MODIFY_EXPR
,
13338 TREE_TYPE (arg_sg
), arg_sg
,
13339 build_int_cst (NULL
, sg
));
13340 TREE_SIDE_EFFECTS (result_sg
) = 1;
13341 /* Combine the signgam assignment with the lgamma result. */
13342 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13343 result_sg
, result_lg
));
13351 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13352 function FUNC on it and return the resulting value as a tree with
13353 type TYPE. The mpfr precision is set to the precision of TYPE. We
13354 assume that function FUNC returns zero if the result could be
13355 calculated exactly within the requested precision. */
13358 do_mpc_arg1 (tree arg
, tree type
, int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
))
13360 tree result
= NULL_TREE
;
13364 /* To proceed, MPFR must exactly represent the target floating point
13365 format, which only happens when the target base equals two. */
13366 if (TREE_CODE (arg
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg
)
13367 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
13368 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg
))))->b
== 2)
13370 const REAL_VALUE_TYPE
*const re
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
13371 const REAL_VALUE_TYPE
*const im
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
13373 if (real_isfinite (re
) && real_isfinite (im
))
13375 const struct real_format
*const fmt
=
13376 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
13377 const int prec
= fmt
->p
;
13378 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13379 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
13383 mpc_init2 (m
, prec
);
13384 mpfr_from_real (mpc_realref(m
), re
, rnd
);
13385 mpfr_from_real (mpc_imagref(m
), im
, rnd
);
13386 mpfr_clear_flags ();
13387 inexact
= func (m
, m
, crnd
);
13388 result
= do_mpc_ckconv (m
, type
, inexact
, /*force_convert=*/ 0);
13396 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13397 mpc function FUNC on it and return the resulting value as a tree
13398 with type TYPE. The mpfr precision is set to the precision of
13399 TYPE. We assume that function FUNC returns zero if the result
13400 could be calculated exactly within the requested precision. If
13401 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13402 in the arguments and/or results. */
13405 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
13406 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
13408 tree result
= NULL_TREE
;
13413 /* To proceed, MPFR must exactly represent the target floating point
13414 format, which only happens when the target base equals two. */
13415 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
13416 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
13417 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
13418 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
13419 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
13421 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
13422 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
13423 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
13424 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
13427 || (real_isfinite (re0
) && real_isfinite (im0
)
13428 && real_isfinite (re1
) && real_isfinite (im1
)))
13430 const struct real_format
*const fmt
=
13431 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
13432 const int prec
= fmt
->p
;
13433 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13434 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
13438 mpc_init2 (m0
, prec
);
13439 mpc_init2 (m1
, prec
);
13440 mpfr_from_real (mpc_realref(m0
), re0
, rnd
);
13441 mpfr_from_real (mpc_imagref(m0
), im0
, rnd
);
13442 mpfr_from_real (mpc_realref(m1
), re1
, rnd
);
13443 mpfr_from_real (mpc_imagref(m1
), im1
, rnd
);
13444 mpfr_clear_flags ();
13445 inexact
= func (m0
, m0
, m1
, crnd
);
13446 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
13456 The functions below provide an alternate interface for folding
13457 builtin function calls presented as GIMPLE_CALL statements rather
13458 than as CALL_EXPRs. The folded result is still expressed as a
13459 tree. There is too much code duplication in the handling of
13460 varargs functions, and a more intrusive re-factoring would permit
13461 better sharing of code between the tree and statement-based
13462 versions of these functions. */
13464 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13465 along with N new arguments specified as the "..." parameters. SKIP
13466 is the number of arguments in STMT to be omitted. This function is used
13467 to do varargs-to-varargs transformations. */
13470 gimple_rewrite_call_expr (gimple stmt
, int skip
, tree fndecl
, int n
, ...)
13472 int oldnargs
= gimple_call_num_args (stmt
);
13473 int nargs
= oldnargs
- skip
+ n
;
13474 tree fntype
= TREE_TYPE (fndecl
);
13475 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
13479 location_t loc
= gimple_location (stmt
);
13481 buffer
= XALLOCAVEC (tree
, nargs
);
13483 for (i
= 0; i
< n
; i
++)
13484 buffer
[i
] = va_arg (ap
, tree
);
13486 for (j
= skip
; j
< oldnargs
; j
++, i
++)
13487 buffer
[i
] = gimple_call_arg (stmt
, j
);
13489 return fold (build_call_array_loc (loc
, TREE_TYPE (fntype
), fn
, nargs
, buffer
));
13492 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13493 a normal call should be emitted rather than expanding the function
13494 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13497 gimple_fold_builtin_sprintf_chk (gimple stmt
, enum built_in_function fcode
)
13499 tree dest
, size
, len
, fn
, fmt
, flag
;
13500 const char *fmt_str
;
13501 int nargs
= gimple_call_num_args (stmt
);
13503 /* Verify the required arguments in the original call. */
13506 dest
= gimple_call_arg (stmt
, 0);
13507 if (!validate_arg (dest
, POINTER_TYPE
))
13509 flag
= gimple_call_arg (stmt
, 1);
13510 if (!validate_arg (flag
, INTEGER_TYPE
))
13512 size
= gimple_call_arg (stmt
, 2);
13513 if (!validate_arg (size
, INTEGER_TYPE
))
13515 fmt
= gimple_call_arg (stmt
, 3);
13516 if (!validate_arg (fmt
, POINTER_TYPE
))
13519 if (! host_integerp (size
, 1))
13524 if (!init_target_chars ())
13527 /* Check whether the format is a literal string constant. */
13528 fmt_str
= c_getstr (fmt
);
13529 if (fmt_str
!= NULL
)
13531 /* If the format doesn't contain % args or %%, we know the size. */
13532 if (strchr (fmt_str
, target_percent
) == 0)
13534 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
13535 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
13537 /* If the format is "%s" and first ... argument is a string literal,
13538 we know the size too. */
13539 else if (fcode
== BUILT_IN_SPRINTF_CHK
13540 && strcmp (fmt_str
, target_percent_s
) == 0)
13546 arg
= gimple_call_arg (stmt
, 4);
13547 if (validate_arg (arg
, POINTER_TYPE
))
13549 len
= c_strlen (arg
, 1);
13550 if (! len
|| ! host_integerp (len
, 1))
13557 if (! integer_all_onesp (size
))
13559 if (! len
|| ! tree_int_cst_lt (len
, size
))
13563 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13564 or if format doesn't contain % chars or is "%s". */
13565 if (! integer_zerop (flag
))
13567 if (fmt_str
== NULL
)
13569 if (strchr (fmt_str
, target_percent
) != NULL
13570 && strcmp (fmt_str
, target_percent_s
))
13574 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13575 fn
= built_in_decls
[fcode
== BUILT_IN_VSPRINTF_CHK
13576 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
];
13580 return gimple_rewrite_call_expr (stmt
, 4, fn
, 2, dest
, fmt
);
13583 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13584 a normal call should be emitted rather than expanding the function
13585 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13586 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13587 passed as second argument. */
13590 gimple_fold_builtin_snprintf_chk (gimple stmt
, tree maxlen
,
13591 enum built_in_function fcode
)
13593 tree dest
, size
, len
, fn
, fmt
, flag
;
13594 const char *fmt_str
;
13596 /* Verify the required arguments in the original call. */
13597 if (gimple_call_num_args (stmt
) < 5)
13599 dest
= gimple_call_arg (stmt
, 0);
13600 if (!validate_arg (dest
, POINTER_TYPE
))
13602 len
= gimple_call_arg (stmt
, 1);
13603 if (!validate_arg (len
, INTEGER_TYPE
))
13605 flag
= gimple_call_arg (stmt
, 2);
13606 if (!validate_arg (flag
, INTEGER_TYPE
))
13608 size
= gimple_call_arg (stmt
, 3);
13609 if (!validate_arg (size
, INTEGER_TYPE
))
13611 fmt
= gimple_call_arg (stmt
, 4);
13612 if (!validate_arg (fmt
, POINTER_TYPE
))
13615 if (! host_integerp (size
, 1))
13618 if (! integer_all_onesp (size
))
13620 if (! host_integerp (len
, 1))
13622 /* If LEN is not constant, try MAXLEN too.
13623 For MAXLEN only allow optimizing into non-_ocs function
13624 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13625 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
13631 if (tree_int_cst_lt (size
, maxlen
))
13635 if (!init_target_chars ())
13638 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13639 or if format doesn't contain % chars or is "%s". */
13640 if (! integer_zerop (flag
))
13642 fmt_str
= c_getstr (fmt
);
13643 if (fmt_str
== NULL
)
13645 if (strchr (fmt_str
, target_percent
) != NULL
13646 && strcmp (fmt_str
, target_percent_s
))
13650 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13652 fn
= built_in_decls
[fcode
== BUILT_IN_VSNPRINTF_CHK
13653 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
];
13657 return gimple_rewrite_call_expr (stmt
, 5, fn
, 3, dest
, len
, fmt
);
13660 /* Builtins with folding operations that operate on "..." arguments
13661 need special handling; we need to store the arguments in a convenient
13662 data structure before attempting any folding. Fortunately there are
13663 only a few builtins that fall into this category. FNDECL is the
13664 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13665 result of the function call is ignored. */
13668 gimple_fold_builtin_varargs (tree fndecl
, gimple stmt
,
13669 bool ignore ATTRIBUTE_UNUSED
)
13671 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
13672 tree ret
= NULL_TREE
;
13676 case BUILT_IN_SPRINTF_CHK
:
13677 case BUILT_IN_VSPRINTF_CHK
:
13678 ret
= gimple_fold_builtin_sprintf_chk (stmt
, fcode
);
13681 case BUILT_IN_SNPRINTF_CHK
:
13682 case BUILT_IN_VSNPRINTF_CHK
:
13683 ret
= gimple_fold_builtin_snprintf_chk (stmt
, NULL_TREE
, fcode
);
13690 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
13691 TREE_NO_WARNING (ret
) = 1;
13697 /* A wrapper function for builtin folding that prevents warnings for
13698 "statement without effect" and the like, caused by removing the
13699 call node earlier than the warning is generated. */
13702 fold_call_stmt (gimple stmt
, bool ignore
)
13704 tree ret
= NULL_TREE
;
13705 tree fndecl
= gimple_call_fndecl (stmt
);
13706 location_t loc
= gimple_location (stmt
);
13708 && TREE_CODE (fndecl
) == FUNCTION_DECL
13709 && DECL_BUILT_IN (fndecl
)
13710 && !gimple_call_va_arg_pack_p (stmt
))
13712 int nargs
= gimple_call_num_args (stmt
);
13714 if (avoid_folding_inline_builtin (fndecl
))
13716 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
13718 return targetm
.fold_builtin (fndecl
, nargs
,
13720 ? gimple_call_arg_ptr (stmt
, 0)
13721 : &error_mark_node
), ignore
);
13725 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
13727 tree args
[MAX_ARGS_TO_FOLD_BUILTIN
];
13729 for (i
= 0; i
< nargs
; i
++)
13730 args
[i
] = gimple_call_arg (stmt
, i
);
13731 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
13734 ret
= gimple_fold_builtin_varargs (fndecl
, stmt
, ignore
);
13737 /* Propagate location information from original call to
13738 expansion of builtin. Otherwise things like
13739 maybe_emit_chk_warning, that operate on the expansion
13740 of a builtin, will use the wrong location information. */
13741 if (gimple_has_location (stmt
))
13743 tree realret
= ret
;
13744 if (TREE_CODE (ret
) == NOP_EXPR
)
13745 realret
= TREE_OPERAND (ret
, 0);
13746 if (CAN_HAVE_LOCATION_P (realret
)
13747 && !EXPR_HAS_LOCATION (realret
))
13748 SET_EXPR_LOCATION (realret
, loc
);
13758 /* Look up the function in built_in_decls that corresponds to DECL
13759 and set ASMSPEC as its user assembler name. DECL must be a
13760 function decl that declares a builtin. */
13763 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
13766 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
13767 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
13770 builtin
= built_in_decls
[DECL_FUNCTION_CODE (decl
)];
13771 set_user_assembler_name (builtin
, asmspec
);
13772 switch (DECL_FUNCTION_CODE (decl
))
13774 case BUILT_IN_MEMCPY
:
13775 init_block_move_fn (asmspec
);
13776 memcpy_libfunc
= set_user_assembler_libfunc ("memcpy", asmspec
);
13778 case BUILT_IN_MEMSET
:
13779 init_block_clear_fn (asmspec
);
13780 memset_libfunc
= set_user_assembler_libfunc ("memset", asmspec
);
13782 case BUILT_IN_MEMMOVE
:
13783 memmove_libfunc
= set_user_assembler_libfunc ("memmove", asmspec
);
13785 case BUILT_IN_MEMCMP
:
13786 memcmp_libfunc
= set_user_assembler_libfunc ("memcmp", asmspec
);
13788 case BUILT_IN_ABORT
:
13789 abort_libfunc
= set_user_assembler_libfunc ("abort", asmspec
);
13792 if (INT_TYPE_SIZE
< BITS_PER_WORD
)
13794 set_user_assembler_libfunc ("ffs", asmspec
);
13795 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
,
13796 MODE_INT
, 0), "ffs");
13804 /* Return true if DECL is a builtin that expands to a constant or similarly
13807 is_simple_builtin (tree decl
)
13809 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
13810 switch (DECL_FUNCTION_CODE (decl
))
13812 /* Builtins that expand to constants. */
13813 case BUILT_IN_CONSTANT_P
:
13814 case BUILT_IN_EXPECT
:
13815 case BUILT_IN_OBJECT_SIZE
:
13816 case BUILT_IN_UNREACHABLE
:
13817 /* Simple register moves or loads from stack. */
13818 case BUILT_IN_RETURN_ADDRESS
:
13819 case BUILT_IN_EXTRACT_RETURN_ADDR
:
13820 case BUILT_IN_FROB_RETURN_ADDR
:
13821 case BUILT_IN_RETURN
:
13822 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
13823 case BUILT_IN_FRAME_ADDRESS
:
13824 case BUILT_IN_VA_END
:
13825 case BUILT_IN_STACK_SAVE
:
13826 case BUILT_IN_STACK_RESTORE
:
13827 /* Exception state returns or moves registers around. */
13828 case BUILT_IN_EH_FILTER
:
13829 case BUILT_IN_EH_POINTER
:
13830 case BUILT_IN_EH_COPY_VALUES
:
13840 /* Return true if DECL is a builtin that is not expensive, i.e., they are
13841 most probably expanded inline into reasonably simple code. This is a
13842 superset of is_simple_builtin. */
13844 is_inexpensive_builtin (tree decl
)
13848 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
13850 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
13851 switch (DECL_FUNCTION_CODE (decl
))
13854 case BUILT_IN_ALLOCA
:
13855 case BUILT_IN_BSWAP32
:
13856 case BUILT_IN_BSWAP64
:
13858 case BUILT_IN_CLZIMAX
:
13859 case BUILT_IN_CLZL
:
13860 case BUILT_IN_CLZLL
:
13862 case BUILT_IN_CTZIMAX
:
13863 case BUILT_IN_CTZL
:
13864 case BUILT_IN_CTZLL
:
13866 case BUILT_IN_FFSIMAX
:
13867 case BUILT_IN_FFSL
:
13868 case BUILT_IN_FFSLL
:
13869 case BUILT_IN_IMAXABS
:
13870 case BUILT_IN_FINITE
:
13871 case BUILT_IN_FINITEF
:
13872 case BUILT_IN_FINITEL
:
13873 case BUILT_IN_FINITED32
:
13874 case BUILT_IN_FINITED64
:
13875 case BUILT_IN_FINITED128
:
13876 case BUILT_IN_FPCLASSIFY
:
13877 case BUILT_IN_ISFINITE
:
13878 case BUILT_IN_ISINF_SIGN
:
13879 case BUILT_IN_ISINF
:
13880 case BUILT_IN_ISINFF
:
13881 case BUILT_IN_ISINFL
:
13882 case BUILT_IN_ISINFD32
:
13883 case BUILT_IN_ISINFD64
:
13884 case BUILT_IN_ISINFD128
:
13885 case BUILT_IN_ISNAN
:
13886 case BUILT_IN_ISNANF
:
13887 case BUILT_IN_ISNANL
:
13888 case BUILT_IN_ISNAND32
:
13889 case BUILT_IN_ISNAND64
:
13890 case BUILT_IN_ISNAND128
:
13891 case BUILT_IN_ISNORMAL
:
13892 case BUILT_IN_ISGREATER
:
13893 case BUILT_IN_ISGREATEREQUAL
:
13894 case BUILT_IN_ISLESS
:
13895 case BUILT_IN_ISLESSEQUAL
:
13896 case BUILT_IN_ISLESSGREATER
:
13897 case BUILT_IN_ISUNORDERED
:
13898 case BUILT_IN_VA_ARG_PACK
:
13899 case BUILT_IN_VA_ARG_PACK_LEN
:
13900 case BUILT_IN_VA_COPY
:
13901 case BUILT_IN_TRAP
:
13902 case BUILT_IN_SAVEREGS
:
13903 case BUILT_IN_POPCOUNTL
:
13904 case BUILT_IN_POPCOUNTLL
:
13905 case BUILT_IN_POPCOUNTIMAX
:
13906 case BUILT_IN_POPCOUNT
:
13907 case BUILT_IN_PARITYL
:
13908 case BUILT_IN_PARITYLL
:
13909 case BUILT_IN_PARITYIMAX
:
13910 case BUILT_IN_PARITY
:
13911 case BUILT_IN_LABS
:
13912 case BUILT_IN_LLABS
:
13913 case BUILT_IN_PREFETCH
:
13917 return is_simple_builtin (decl
);