1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
46 #include "langhooks.h"
47 #include "basic-block.h"
48 #include "tree-mudflap.h"
49 #include "tree-flow.h"
50 #include "value-prof.h"
51 #include "diagnostic-core.h"
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 static tree
do_mpc_arg1 (tree
, tree
, int (*)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
));
60 struct target_builtins default_target_builtins
;
62 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names
[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names
[(int) END_BUILTINS
] =
72 #include "builtins.def"
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 builtin_info_type builtin_info
;
80 static const char *c_getstr (tree
);
81 static rtx
c_readstr (const char *, enum machine_mode
);
82 static int target_char_cast (tree
, char *);
83 static rtx
get_memory_rtx (tree
, tree
);
84 static int apply_args_size (void);
85 static int apply_result_size (void);
86 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
87 static rtx
result_vector (int, rtx
);
89 static void expand_builtin_update_setjmp_buf (rtx
);
90 static void expand_builtin_prefetch (tree
);
91 static rtx
expand_builtin_apply_args (void);
92 static rtx
expand_builtin_apply_args_1 (void);
93 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
94 static void expand_builtin_return (rtx
);
95 static enum type_class
type_to_class (tree
);
96 static rtx
expand_builtin_classify_type (tree
);
97 static void expand_errno_check (tree
, rtx
);
98 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
99 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
100 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
101 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
102 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
103 static rtx
expand_builtin_sincos (tree
);
104 static rtx
expand_builtin_cexpi (tree
, rtx
);
105 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
106 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
107 static rtx
expand_builtin_next_arg (void);
108 static rtx
expand_builtin_va_start (tree
);
109 static rtx
expand_builtin_va_end (tree
);
110 static rtx
expand_builtin_va_copy (tree
);
111 static rtx
expand_builtin_memcmp (tree
, rtx
, enum machine_mode
);
112 static rtx
expand_builtin_strcmp (tree
, rtx
);
113 static rtx
expand_builtin_strncmp (tree
, rtx
, enum machine_mode
);
114 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
115 static rtx
expand_builtin_memcpy (tree
, rtx
);
116 static rtx
expand_builtin_mempcpy (tree
, rtx
, enum machine_mode
);
117 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
118 enum machine_mode
, int);
119 static rtx
expand_builtin_strcpy (tree
, rtx
);
120 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
121 static rtx
expand_builtin_stpcpy (tree
, rtx
, enum machine_mode
);
122 static rtx
expand_builtin_strncpy (tree
, rtx
);
123 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, enum machine_mode
);
124 static rtx
expand_builtin_memset (tree
, rtx
, enum machine_mode
);
125 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, enum machine_mode
, tree
);
126 static rtx
expand_builtin_bzero (tree
);
127 static rtx
expand_builtin_strlen (tree
, rtx
, enum machine_mode
);
128 static rtx
expand_builtin_alloca (tree
, bool);
129 static rtx
expand_builtin_unop (enum machine_mode
, tree
, rtx
, rtx
, optab
);
130 static rtx
expand_builtin_frame_address (tree
, tree
);
131 static tree
stabilize_va_list_loc (location_t
, tree
, int);
132 static rtx
expand_builtin_expect (tree
, rtx
);
133 static tree
fold_builtin_constant_p (tree
);
134 static tree
fold_builtin_expect (location_t
, tree
, tree
);
135 static tree
fold_builtin_classify_type (tree
);
136 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
137 static tree
fold_builtin_inf (location_t
, tree
, int);
138 static tree
fold_builtin_nan (tree
, tree
, int);
139 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
140 static bool validate_arg (const_tree
, enum tree_code code
);
141 static bool integer_valued_real_p (tree
);
142 static tree
fold_trunc_transparent_mathfn (location_t
, tree
, tree
);
143 static bool readonly_data_expr (tree
);
144 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
145 static rtx
expand_builtin_signbit (tree
, rtx
);
146 static tree
fold_builtin_sqrt (location_t
, tree
, tree
);
147 static tree
fold_builtin_cbrt (location_t
, tree
, tree
);
148 static tree
fold_builtin_pow (location_t
, tree
, tree
, tree
, tree
);
149 static tree
fold_builtin_powi (location_t
, tree
, tree
, tree
, tree
);
150 static tree
fold_builtin_cos (location_t
, tree
, tree
, tree
);
151 static tree
fold_builtin_cosh (location_t
, tree
, tree
, tree
);
152 static tree
fold_builtin_tan (tree
, tree
);
153 static tree
fold_builtin_trunc (location_t
, tree
, tree
);
154 static tree
fold_builtin_floor (location_t
, tree
, tree
);
155 static tree
fold_builtin_ceil (location_t
, tree
, tree
);
156 static tree
fold_builtin_round (location_t
, tree
, tree
);
157 static tree
fold_builtin_int_roundingfn (location_t
, tree
, tree
);
158 static tree
fold_builtin_bitop (tree
, tree
);
159 static tree
fold_builtin_memory_op (location_t
, tree
, tree
, tree
, tree
, bool, int);
160 static tree
fold_builtin_strchr (location_t
, tree
, tree
, tree
);
161 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
162 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
163 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
164 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
165 static tree
fold_builtin_signbit (location_t
, tree
, tree
);
166 static tree
fold_builtin_copysign (location_t
, tree
, tree
, tree
, tree
);
167 static tree
fold_builtin_isascii (location_t
, tree
);
168 static tree
fold_builtin_toascii (location_t
, tree
);
169 static tree
fold_builtin_isdigit (location_t
, tree
);
170 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
171 static tree
fold_builtin_abs (location_t
, tree
, tree
);
172 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
174 static tree
fold_builtin_n (location_t
, tree
, tree
*, int, bool);
175 static tree
fold_builtin_0 (location_t
, tree
, bool);
176 static tree
fold_builtin_1 (location_t
, tree
, tree
, bool);
177 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
, bool);
178 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
, bool);
179 static tree
fold_builtin_4 (location_t
, tree
, tree
, tree
, tree
, tree
, bool);
180 static tree
fold_builtin_varargs (location_t
, tree
, tree
, bool);
182 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
183 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
184 static tree
fold_builtin_strrchr (location_t
, tree
, tree
, tree
);
185 static tree
fold_builtin_strcat (location_t
, tree
, tree
);
186 static tree
fold_builtin_strncat (location_t
, tree
, tree
, tree
);
187 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
188 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
189 static tree
fold_builtin_sprintf (location_t
, tree
, tree
, tree
, int);
190 static tree
fold_builtin_snprintf (location_t
, tree
, tree
, tree
, tree
, int);
192 static rtx
expand_builtin_object_size (tree
);
193 static rtx
expand_builtin_memory_chk (tree
, rtx
, enum machine_mode
,
194 enum built_in_function
);
195 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
196 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
197 static void maybe_emit_free_warning (tree
);
198 static tree
fold_builtin_object_size (tree
, tree
);
199 static tree
fold_builtin_strcat_chk (location_t
, tree
, tree
, tree
, tree
);
200 static tree
fold_builtin_strncat_chk (location_t
, tree
, tree
, tree
, tree
, tree
);
201 static tree
fold_builtin_sprintf_chk (location_t
, tree
, enum built_in_function
);
202 static tree
fold_builtin_printf (location_t
, tree
, tree
, tree
, bool, enum built_in_function
);
203 static tree
fold_builtin_fprintf (location_t
, tree
, tree
, tree
, tree
, bool,
204 enum built_in_function
);
205 static bool init_target_chars (void);
207 static unsigned HOST_WIDE_INT target_newline
;
208 static unsigned HOST_WIDE_INT target_percent
;
209 static unsigned HOST_WIDE_INT target_c
;
210 static unsigned HOST_WIDE_INT target_s
;
211 static char target_percent_c
[3];
212 static char target_percent_s
[3];
213 static char target_percent_s_newline
[4];
214 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
215 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
216 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
217 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
218 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
219 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
220 static tree
do_mpfr_sincos (tree
, tree
, tree
);
221 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
222 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
223 const REAL_VALUE_TYPE
*, bool);
224 static tree
do_mpfr_remquo (tree
, tree
, tree
);
225 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
226 static void expand_builtin_sync_synchronize (void);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
231 is_builtin_name (const char *name
)
233 if (strncmp (name
, "__builtin_", 10) == 0)
235 if (strncmp (name
, "__sync_", 7) == 0)
237 if (strncmp (name
, "__atomic_", 9) == 0)
243 /* Return true if DECL is a function symbol representing a built-in. */
246 is_builtin_fn (tree decl
)
248 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
252 /* Return true if NODE should be considered for inline expansion regardless
253 of the optimization level. This means whenever a function is invoked with
254 its "internal" name, which normally contains the prefix "__builtin". */
257 called_as_built_in (tree node
)
259 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
260 we want the name used to call the function, not the name it
262 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
263 return is_builtin_name (name
);
266 /* Compute values M and N such that M divides (address of EXP - N) and such
267 that N < M. If these numbers can be determined, store M in alignp and N in
268 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
269 *alignp and any bit-offset to *bitposp.
271 Note that the address (and thus the alignment) computed here is based
272 on the address to which a symbol resolves, whereas DECL_ALIGN is based
273 on the address at which an object is actually located. These two
274 addresses are not always the same. For example, on ARM targets,
275 the address &foo of a Thumb function foo() has the lowest bit set,
276 whereas foo() itself starts on an even address. */
279 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
280 unsigned HOST_WIDE_INT
*bitposp
)
282 HOST_WIDE_INT bitsize
, bitpos
;
284 enum machine_mode mode
;
285 int unsignedp
, volatilep
;
286 unsigned int inner
, align
= BITS_PER_UNIT
;
287 bool known_alignment
= false;
289 /* Get the innermost object and the constant (bitpos) and possibly
290 variable (offset) offset of the access. */
291 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
292 &mode
, &unsignedp
, &volatilep
, true);
294 /* Extract alignment information from the innermost object and
295 possibly adjust bitpos and offset. */
296 if (TREE_CODE (exp
) == CONST_DECL
)
297 exp
= DECL_INITIAL (exp
);
299 && TREE_CODE (exp
) != LABEL_DECL
)
301 if (TREE_CODE (exp
) == FUNCTION_DECL
)
303 /* Function addresses can encode extra information besides their
304 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
305 allows the low bit to be used as a virtual bit, we know
306 that the address itself must be 2-byte aligned. */
307 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
309 known_alignment
= true;
310 align
= 2 * BITS_PER_UNIT
;
315 known_alignment
= true;
316 align
= DECL_ALIGN (exp
);
319 else if (CONSTANT_CLASS_P (exp
))
321 known_alignment
= true;
322 align
= TYPE_ALIGN (TREE_TYPE (exp
));
323 #ifdef CONSTANT_ALIGNMENT
324 align
= (unsigned)CONSTANT_ALIGNMENT (exp
, align
);
327 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
329 known_alignment
= true;
330 align
= TYPE_ALIGN (TREE_TYPE (exp
));
332 else if (TREE_CODE (exp
) == INDIRECT_REF
)
334 known_alignment
= true;
335 align
= TYPE_ALIGN (TREE_TYPE (exp
));
337 else if (TREE_CODE (exp
) == MEM_REF
)
339 tree addr
= TREE_OPERAND (exp
, 0);
341 unsigned HOST_WIDE_INT ptr_bitpos
;
343 if (TREE_CODE (addr
) == BIT_AND_EXPR
344 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
346 known_alignment
= true;
347 align
= (TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1))
348 & -TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1)));
349 align
*= BITS_PER_UNIT
;
350 addr
= TREE_OPERAND (addr
, 0);
353 if (get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
))
355 known_alignment
= true;
356 bitpos
+= ptr_bitpos
& ~(align
- 1);
357 align
= MAX (ptr_align
, align
);
360 bitpos
+= mem_ref_offset (exp
).low
* BITS_PER_UNIT
;
362 else if (TREE_CODE (exp
) == TARGET_MEM_REF
)
365 unsigned HOST_WIDE_INT ptr_bitpos
;
366 tree addr
= TMR_BASE (exp
);
368 if (TREE_CODE (addr
) == BIT_AND_EXPR
369 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
371 known_alignment
= true;
372 align
= (TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1))
373 & -TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1)));
374 align
*= BITS_PER_UNIT
;
375 addr
= TREE_OPERAND (addr
, 0);
378 if (get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
))
380 known_alignment
= true;
381 bitpos
+= ptr_bitpos
& ~(align
- 1);
382 align
= MAX (ptr_align
, align
);
385 if (TMR_OFFSET (exp
))
386 bitpos
+= TREE_INT_CST_LOW (TMR_OFFSET (exp
)) * BITS_PER_UNIT
;
387 if (TMR_INDEX (exp
) && TMR_STEP (exp
))
389 unsigned HOST_WIDE_INT step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
390 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
391 known_alignment
= true;
393 else if (TMR_INDEX (exp
))
394 known_alignment
= false;
396 if (TMR_INDEX2 (exp
))
397 known_alignment
= false;
400 /* If there is a non-constant offset part extract the maximum
401 alignment that can prevail. */
407 if (TREE_CODE (offset
) == PLUS_EXPR
)
409 next_offset
= TREE_OPERAND (offset
, 0);
410 offset
= TREE_OPERAND (offset
, 1);
414 if (host_integerp (offset
, 1))
416 /* Any overflow in calculating offset_bits won't change
419 = ((unsigned) tree_low_cst (offset
, 1) * BITS_PER_UNIT
);
422 inner
= MIN (inner
, (offset_bits
& -offset_bits
));
424 else if (TREE_CODE (offset
) == MULT_EXPR
425 && host_integerp (TREE_OPERAND (offset
, 1), 1))
427 /* Any overflow in calculating offset_factor won't change
429 unsigned offset_factor
430 = ((unsigned) tree_low_cst (TREE_OPERAND (offset
, 1), 1)
434 inner
= MIN (inner
, (offset_factor
& -offset_factor
));
438 known_alignment
= false;
441 offset
= next_offset
;
446 /* Alignment is innermost object alignment adjusted by the constant
447 and non-constant offset parts. */
448 align
= MIN (align
, inner
);
449 bitpos
= bitpos
& (align
- 1);
454 bitpos
= bitpos
& (BITS_PER_UNIT
- 1);
455 *alignp
= BITS_PER_UNIT
;
458 return known_alignment
;
461 /* Return the alignment in bits of EXP, an object. */
464 get_object_alignment (tree exp
)
466 unsigned HOST_WIDE_INT bitpos
= 0;
469 get_object_alignment_1 (exp
, &align
, &bitpos
);
471 /* align and bitpos now specify known low bits of the pointer.
472 ptr & (align - 1) == bitpos. */
475 align
= (bitpos
& -bitpos
);
479 /* Return the alignment of object EXP, also considering its type when we do
480 not know of explicit misalignment. Only handle MEM_REF and TARGET_MEM_REF.
482 ??? Note that, in the general case, the type of an expression is not kept
483 consistent with misalignment information by the front-end, for example when
484 taking the address of a member of a packed structure. However, in most of
485 the cases, expressions have the alignment of their type so we optimistically
486 fall back to this alignment when we cannot compute a misalignment. */
489 get_object_or_type_alignment (tree exp
)
491 unsigned HOST_WIDE_INT misalign
;
493 bool known_alignment
;
495 gcc_assert (TREE_CODE (exp
) == MEM_REF
|| TREE_CODE (exp
) == TARGET_MEM_REF
);
496 known_alignment
= get_object_alignment_1 (exp
, &align
, &misalign
);
498 align
= (misalign
& -misalign
);
499 else if (!known_alignment
)
500 align
= TYPE_ALIGN (TREE_TYPE (exp
));
505 /* For a pointer valued expression EXP compute values M and N such that M
506 divides (EXP - N) and such that N < M. If these numbers can be determined,
507 store M in alignp and N in *BITPOSP and return true. Otherwise return false
508 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp.
510 If EXP is not a pointer, false is returned too. */
513 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
514 unsigned HOST_WIDE_INT
*bitposp
)
518 if (TREE_CODE (exp
) == ADDR_EXPR
)
519 return get_object_alignment_1 (TREE_OPERAND (exp
, 0), alignp
, bitposp
);
520 else if (TREE_CODE (exp
) == SSA_NAME
521 && POINTER_TYPE_P (TREE_TYPE (exp
)))
523 unsigned int ptr_align
, ptr_misalign
;
524 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
526 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
528 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
529 *alignp
= ptr_align
* BITS_PER_UNIT
;
535 *alignp
= BITS_PER_UNIT
;
541 *alignp
= BITS_PER_UNIT
;
545 /* Return the alignment in bits of EXP, a pointer valued expression.
546 The alignment returned is, by default, the alignment of the thing that
547 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
549 Otherwise, look at the expression to see if we can do better, i.e., if the
550 expression is actually pointing at an object whose alignment is tighter. */
553 get_pointer_alignment (tree exp
)
555 unsigned HOST_WIDE_INT bitpos
= 0;
558 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
560 /* align and bitpos now specify known low bits of the pointer.
561 ptr & (align - 1) == bitpos. */
564 align
= (bitpos
& -bitpos
);
569 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
570 way, because it could contain a zero byte in the middle.
571 TREE_STRING_LENGTH is the size of the character array, not the string.
573 ONLY_VALUE should be nonzero if the result is not going to be emitted
574 into the instruction stream and zero if it is going to be expanded.
575 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
576 is returned, otherwise NULL, since
577 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
578 evaluate the side-effects.
580 The value returned is of type `ssizetype'.
582 Unfortunately, string_constant can't access the values of const char
583 arrays with initializers, so neither can we do so here. */
586 c_strlen (tree src
, int only_value
)
589 HOST_WIDE_INT offset
;
595 if (TREE_CODE (src
) == COND_EXPR
596 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
600 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
601 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
602 if (tree_int_cst_equal (len1
, len2
))
606 if (TREE_CODE (src
) == COMPOUND_EXPR
607 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
608 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
610 loc
= EXPR_LOC_OR_HERE (src
);
612 src
= string_constant (src
, &offset_node
);
616 max
= TREE_STRING_LENGTH (src
) - 1;
617 ptr
= TREE_STRING_POINTER (src
);
619 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
621 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
622 compute the offset to the following null if we don't know where to
623 start searching for it. */
626 for (i
= 0; i
< max
; i
++)
630 /* We don't know the starting offset, but we do know that the string
631 has no internal zero bytes. We can assume that the offset falls
632 within the bounds of the string; otherwise, the programmer deserves
633 what he gets. Subtract the offset from the length of the string,
634 and return that. This would perhaps not be valid if we were dealing
635 with named arrays in addition to literal string constants. */
637 return size_diffop_loc (loc
, size_int (max
), offset_node
);
640 /* We have a known offset into the string. Start searching there for
641 a null character if we can represent it as a single HOST_WIDE_INT. */
642 if (offset_node
== 0)
644 else if (! host_integerp (offset_node
, 0))
647 offset
= tree_low_cst (offset_node
, 0);
649 /* If the offset is known to be out of bounds, warn, and call strlen at
651 if (offset
< 0 || offset
> max
)
653 /* Suppress multiple warnings for propagated constant strings. */
654 if (! TREE_NO_WARNING (src
))
656 warning_at (loc
, 0, "offset outside bounds of constant string");
657 TREE_NO_WARNING (src
) = 1;
662 /* Use strlen to search for the first zero byte. Since any strings
663 constructed with build_string will have nulls appended, we win even
664 if we get handed something like (char[4])"abcd".
666 Since OFFSET is our starting index into the string, no further
667 calculation is needed. */
668 return ssize_int (strlen (ptr
+ offset
));
671 /* Return a char pointer for a C string if it is a string constant
672 or sum of string constant and integer constant. */
679 src
= string_constant (src
, &offset_node
);
683 if (offset_node
== 0)
684 return TREE_STRING_POINTER (src
);
685 else if (!host_integerp (offset_node
, 1)
686 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
689 return TREE_STRING_POINTER (src
) + tree_low_cst (offset_node
, 1);
692 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
693 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
696 c_readstr (const char *str
, enum machine_mode mode
)
702 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
707 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
710 if (WORDS_BIG_ENDIAN
)
711 j
= GET_MODE_SIZE (mode
) - i
- 1;
712 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
713 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
714 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
716 gcc_assert (j
< 2 * HOST_BITS_PER_WIDE_INT
);
719 ch
= (unsigned char) str
[i
];
720 c
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
722 return immed_double_const (c
[0], c
[1], mode
);
725 /* Cast a target constant CST to target CHAR and if that value fits into
726 host char type, return zero and put that value into variable pointed to by
730 target_char_cast (tree cst
, char *p
)
732 unsigned HOST_WIDE_INT val
, hostval
;
734 if (TREE_CODE (cst
) != INTEGER_CST
735 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
738 val
= TREE_INT_CST_LOW (cst
);
739 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
740 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
743 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
744 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
753 /* Similar to save_expr, but assumes that arbitrary code is not executed
754 in between the multiple evaluations. In particular, we assume that a
755 non-addressable local variable will not be modified. */
758 builtin_save_expr (tree exp
)
760 if (TREE_CODE (exp
) == SSA_NAME
761 || (TREE_ADDRESSABLE (exp
) == 0
762 && (TREE_CODE (exp
) == PARM_DECL
763 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
)))))
766 return save_expr (exp
);
769 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
770 times to get the address of either a higher stack frame, or a return
771 address located within it (depending on FNDECL_CODE). */
774 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
778 #ifdef INITIAL_FRAME_ADDRESS_RTX
779 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
783 /* For a zero count with __builtin_return_address, we don't care what
784 frame address we return, because target-specific definitions will
785 override us. Therefore frame pointer elimination is OK, and using
786 the soft frame pointer is OK.
788 For a nonzero count, or a zero count with __builtin_frame_address,
789 we require a stable offset from the current frame pointer to the
790 previous one, so we must use the hard frame pointer, and
791 we must disable frame pointer elimination. */
792 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
793 tem
= frame_pointer_rtx
;
796 tem
= hard_frame_pointer_rtx
;
798 /* Tell reload not to eliminate the frame pointer. */
799 crtl
->accesses_prior_frames
= 1;
803 /* Some machines need special handling before we can access
804 arbitrary frames. For example, on the SPARC, we must first flush
805 all register windows to the stack. */
806 #ifdef SETUP_FRAME_ADDRESSES
808 SETUP_FRAME_ADDRESSES ();
811 /* On the SPARC, the return address is not in the frame, it is in a
812 register. There is no way to access it off of the current frame
813 pointer, but it can be accessed off the previous frame pointer by
814 reading the value from the register window save area. */
815 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
816 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
820 /* Scan back COUNT frames to the specified frame. */
821 for (i
= 0; i
< count
; i
++)
823 /* Assume the dynamic chain pointer is in the word that the
824 frame address points to, unless otherwise specified. */
825 #ifdef DYNAMIC_CHAIN_ADDRESS
826 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
828 tem
= memory_address (Pmode
, tem
);
829 tem
= gen_frame_mem (Pmode
, tem
);
830 tem
= copy_to_reg (tem
);
833 /* For __builtin_frame_address, return what we've got. But, on
834 the SPARC for example, we may have to add a bias. */
835 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
836 #ifdef FRAME_ADDR_RTX
837 return FRAME_ADDR_RTX (tem
);
842 /* For __builtin_return_address, get the return address from that frame. */
843 #ifdef RETURN_ADDR_RTX
844 tem
= RETURN_ADDR_RTX (count
, tem
);
846 tem
= memory_address (Pmode
,
847 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
848 tem
= gen_frame_mem (Pmode
, tem
);
853 /* Alias set used for setjmp buffer. */
854 static alias_set_type setjmp_alias_set
= -1;
856 /* Construct the leading half of a __builtin_setjmp call. Control will
857 return to RECEIVER_LABEL. This is also called directly by the SJLJ
858 exception handling code. */
861 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
863 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
867 if (setjmp_alias_set
== -1)
868 setjmp_alias_set
= new_alias_set ();
870 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
872 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
874 /* We store the frame pointer and the address of receiver_label in
875 the buffer and use the rest of it for the stack save area, which
876 is machine-dependent. */
878 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
879 set_mem_alias_set (mem
, setjmp_alias_set
);
880 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
882 mem
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
, GET_MODE_SIZE (Pmode
))),
883 set_mem_alias_set (mem
, setjmp_alias_set
);
885 emit_move_insn (validize_mem (mem
),
886 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
888 stack_save
= gen_rtx_MEM (sa_mode
,
889 plus_constant (buf_addr
,
890 2 * GET_MODE_SIZE (Pmode
)));
891 set_mem_alias_set (stack_save
, setjmp_alias_set
);
892 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
894 /* If there is further processing to do, do it. */
895 #ifdef HAVE_builtin_setjmp_setup
896 if (HAVE_builtin_setjmp_setup
)
897 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
900 /* We have a nonlocal label. */
901 cfun
->has_nonlocal_label
= 1;
904 /* Construct the trailing part of a __builtin_setjmp call. This is
905 also called directly by the SJLJ exception handling code. */
908 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
912 /* Clobber the FP when we get here, so we have to make sure it's
913 marked as used by this function. */
914 emit_use (hard_frame_pointer_rtx
);
916 /* Mark the static chain as clobbered here so life information
917 doesn't get messed up for it. */
918 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
919 if (chain
&& REG_P (chain
))
920 emit_clobber (chain
);
922 /* Now put in the code to restore the frame pointer, and argument
923 pointer, if needed. */
924 #ifdef HAVE_nonlocal_goto
925 if (! HAVE_nonlocal_goto
)
928 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
929 /* This might change the hard frame pointer in ways that aren't
930 apparent to early optimization passes, so force a clobber. */
931 emit_clobber (hard_frame_pointer_rtx
);
934 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
935 if (fixed_regs
[ARG_POINTER_REGNUM
])
937 #ifdef ELIMINABLE_REGS
939 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
941 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
942 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
943 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
946 if (i
== ARRAY_SIZE (elim_regs
))
949 /* Now restore our arg pointer from the address at which it
950 was saved in our stack frame. */
951 emit_move_insn (crtl
->args
.internal_arg_pointer
,
952 copy_to_reg (get_arg_pointer_save_area ()));
957 #ifdef HAVE_builtin_setjmp_receiver
958 if (HAVE_builtin_setjmp_receiver
)
959 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
962 #ifdef HAVE_nonlocal_goto_receiver
963 if (HAVE_nonlocal_goto_receiver
)
964 emit_insn (gen_nonlocal_goto_receiver ());
969 /* We must not allow the code we just generated to be reordered by
970 scheduling. Specifically, the update of the frame pointer must
971 happen immediately, not later. */
972 emit_insn (gen_blockage ());
975 /* __builtin_longjmp is passed a pointer to an array of five words (not
976 all will be used on all machines). It operates similarly to the C
977 library function of the same name, but is more efficient. Much of
978 the code below is copied from the handling of non-local gotos. */
981 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
983 rtx fp
, lab
, stack
, insn
, last
;
984 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
986 /* DRAP is needed for stack realign if longjmp is expanded to current
988 if (SUPPORTS_STACK_ALIGNMENT
)
989 crtl
->need_drap
= true;
991 if (setjmp_alias_set
== -1)
992 setjmp_alias_set
= new_alias_set ();
994 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
996 buf_addr
= force_reg (Pmode
, buf_addr
);
998 /* We require that the user must pass a second argument of 1, because
999 that is what builtin_setjmp will return. */
1000 gcc_assert (value
== const1_rtx
);
1002 last
= get_last_insn ();
1003 #ifdef HAVE_builtin_longjmp
1004 if (HAVE_builtin_longjmp
)
1005 emit_insn (gen_builtin_longjmp (buf_addr
));
1009 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
1010 lab
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
,
1011 GET_MODE_SIZE (Pmode
)));
1013 stack
= gen_rtx_MEM (sa_mode
, plus_constant (buf_addr
,
1014 2 * GET_MODE_SIZE (Pmode
)));
1015 set_mem_alias_set (fp
, setjmp_alias_set
);
1016 set_mem_alias_set (lab
, setjmp_alias_set
);
1017 set_mem_alias_set (stack
, setjmp_alias_set
);
1019 /* Pick up FP, label, and SP from the block and jump. This code is
1020 from expand_goto in stmt.c; see there for detailed comments. */
1021 #ifdef HAVE_nonlocal_goto
1022 if (HAVE_nonlocal_goto
)
1023 /* We have to pass a value to the nonlocal_goto pattern that will
1024 get copied into the static_chain pointer, but it does not matter
1025 what that value is, because builtin_setjmp does not use it. */
1026 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
1030 lab
= copy_to_reg (lab
);
1032 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1033 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1035 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1036 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1038 emit_use (hard_frame_pointer_rtx
);
1039 emit_use (stack_pointer_rtx
);
1040 emit_indirect_jump (lab
);
1044 /* Search backwards and mark the jump insn as a non-local goto.
1045 Note that this precludes the use of __builtin_longjmp to a
1046 __builtin_setjmp target in the same function. However, we've
1047 already cautioned the user that these functions are for
1048 internal exception handling use only. */
1049 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1051 gcc_assert (insn
!= last
);
1055 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1058 else if (CALL_P (insn
))
1063 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1064 and the address of the save area. */
1067 expand_builtin_nonlocal_goto (tree exp
)
1069 tree t_label
, t_save_area
;
1070 rtx r_label
, r_save_area
, r_fp
, r_sp
, insn
;
1072 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1075 t_label
= CALL_EXPR_ARG (exp
, 0);
1076 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1078 r_label
= expand_normal (t_label
);
1079 r_label
= convert_memory_address (Pmode
, r_label
);
1080 r_save_area
= expand_normal (t_save_area
);
1081 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1082 /* Copy the address of the save location to a register just in case it was
1083 based on the frame pointer. */
1084 r_save_area
= copy_to_reg (r_save_area
);
1085 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1086 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1087 plus_constant (r_save_area
, GET_MODE_SIZE (Pmode
)));
1089 crtl
->has_nonlocal_goto
= 1;
1091 #ifdef HAVE_nonlocal_goto
1092 /* ??? We no longer need to pass the static chain value, afaik. */
1093 if (HAVE_nonlocal_goto
)
1094 emit_insn (gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1098 r_label
= copy_to_reg (r_label
);
1100 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1101 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1103 /* Restore frame pointer for containing function. */
1104 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1105 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1107 /* USE of hard_frame_pointer_rtx added for consistency;
1108 not clear if really needed. */
1109 emit_use (hard_frame_pointer_rtx
);
1110 emit_use (stack_pointer_rtx
);
1112 /* If the architecture is using a GP register, we must
1113 conservatively assume that the target function makes use of it.
1114 The prologue of functions with nonlocal gotos must therefore
1115 initialize the GP register to the appropriate value, and we
1116 must then make sure that this value is live at the point
1117 of the jump. (Note that this doesn't necessarily apply
1118 to targets with a nonlocal_goto pattern; they are free
1119 to implement it in their own way. Note also that this is
1120 a no-op if the GP register is a global invariant.) */
1121 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1122 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1123 emit_use (pic_offset_table_rtx
);
1125 emit_indirect_jump (r_label
);
1128 /* Search backwards to the jump insn and mark it as a
1130 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1134 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1137 else if (CALL_P (insn
))
1144 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1145 (not all will be used on all machines) that was passed to __builtin_setjmp.
1146 It updates the stack pointer in that block to correspond to the current
1150 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1152 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1154 = gen_rtx_MEM (sa_mode
,
1157 plus_constant (buf_addr
, 2 * GET_MODE_SIZE (Pmode
))));
1159 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1162 /* Expand a call to __builtin_prefetch. For a target that does not support
1163 data prefetch, evaluate the memory address argument in case it has side
1167 expand_builtin_prefetch (tree exp
)
1169 tree arg0
, arg1
, arg2
;
1173 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1176 arg0
= CALL_EXPR_ARG (exp
, 0);
1178 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1179 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1181 nargs
= call_expr_nargs (exp
);
1183 arg1
= CALL_EXPR_ARG (exp
, 1);
1185 arg1
= integer_zero_node
;
1187 arg2
= CALL_EXPR_ARG (exp
, 2);
1189 arg2
= integer_three_node
;
1191 /* Argument 0 is an address. */
1192 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1194 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1195 if (TREE_CODE (arg1
) != INTEGER_CST
)
1197 error ("second argument to %<__builtin_prefetch%> must be a constant");
1198 arg1
= integer_zero_node
;
1200 op1
= expand_normal (arg1
);
1201 /* Argument 1 must be either zero or one. */
1202 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1204 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1209 /* Argument 2 (locality) must be a compile-time constant int. */
1210 if (TREE_CODE (arg2
) != INTEGER_CST
)
1212 error ("third argument to %<__builtin_prefetch%> must be a constant");
1213 arg2
= integer_zero_node
;
1215 op2
= expand_normal (arg2
);
1216 /* Argument 2 must be 0, 1, 2, or 3. */
1217 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1219 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1223 #ifdef HAVE_prefetch
1226 struct expand_operand ops
[3];
1228 create_address_operand (&ops
[0], op0
);
1229 create_integer_operand (&ops
[1], INTVAL (op1
));
1230 create_integer_operand (&ops
[2], INTVAL (op2
));
1231 if (maybe_expand_insn (CODE_FOR_prefetch
, 3, ops
))
1236 /* Don't do anything with direct references to volatile memory, but
1237 generate code to handle other side effects. */
1238 if (!MEM_P (op0
) && side_effects_p (op0
))
1242 /* Get a MEM rtx for expression EXP which is the address of an operand
1243 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1244 the maximum length of the block of memory that might be accessed or
1248 get_memory_rtx (tree exp
, tree len
)
1250 tree orig_exp
= exp
;
1254 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1255 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1256 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1257 exp
= TREE_OPERAND (exp
, 0);
1259 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1260 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1262 /* Get an expression we can use to find the attributes to assign to MEM.
1263 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1264 we can. First remove any nops. */
1265 while (CONVERT_EXPR_P (exp
)
1266 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1267 exp
= TREE_OPERAND (exp
, 0);
1270 if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
1271 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1272 && host_integerp (TREE_OPERAND (exp
, 1), 0)
1273 && (off
= tree_low_cst (TREE_OPERAND (exp
, 1), 0)) > 0)
1274 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
1275 else if (TREE_CODE (exp
) == ADDR_EXPR
)
1276 exp
= TREE_OPERAND (exp
, 0);
1277 else if (POINTER_TYPE_P (TREE_TYPE (exp
)))
1278 exp
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (exp
)), exp
);
1282 /* Honor attributes derived from exp, except for the alias set
1283 (as builtin stringops may alias with anything) and the size
1284 (as stringops may access multiple array elements). */
1287 set_mem_attributes (mem
, exp
, 0);
1290 mem
= adjust_automodify_address_nv (mem
, BLKmode
, NULL
, off
);
1292 /* Allow the string and memory builtins to overflow from one
1293 field into another, see http://gcc.gnu.org/PR23561.
1294 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1295 memory accessed by the string or memory builtin will fit
1296 within the field. */
1297 if (MEM_EXPR (mem
) && TREE_CODE (MEM_EXPR (mem
)) == COMPONENT_REF
)
1299 tree mem_expr
= MEM_EXPR (mem
);
1300 HOST_WIDE_INT offset
= -1, length
= -1;
1303 while (TREE_CODE (inner
) == ARRAY_REF
1304 || CONVERT_EXPR_P (inner
)
1305 || TREE_CODE (inner
) == VIEW_CONVERT_EXPR
1306 || TREE_CODE (inner
) == SAVE_EXPR
)
1307 inner
= TREE_OPERAND (inner
, 0);
1309 gcc_assert (TREE_CODE (inner
) == COMPONENT_REF
);
1311 if (MEM_OFFSET_KNOWN_P (mem
))
1312 offset
= MEM_OFFSET (mem
);
1314 if (offset
>= 0 && len
&& host_integerp (len
, 0))
1315 length
= tree_low_cst (len
, 0);
1317 while (TREE_CODE (inner
) == COMPONENT_REF
)
1319 tree field
= TREE_OPERAND (inner
, 1);
1320 gcc_assert (TREE_CODE (mem_expr
) == COMPONENT_REF
);
1321 gcc_assert (field
== TREE_OPERAND (mem_expr
, 1));
1323 /* Bitfields are generally not byte-addressable. */
1324 gcc_assert (!DECL_BIT_FIELD (field
)
1325 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 1)
1326 % BITS_PER_UNIT
) == 0
1327 && host_integerp (DECL_SIZE (field
), 0)
1328 && (TREE_INT_CST_LOW (DECL_SIZE (field
))
1329 % BITS_PER_UNIT
) == 0));
1331 /* If we can prove that the memory starting at XEXP (mem, 0) and
1332 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1333 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1334 fields without DECL_SIZE_UNIT like flexible array members. */
1336 && DECL_SIZE_UNIT (field
)
1337 && host_integerp (DECL_SIZE_UNIT (field
), 0))
1340 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field
));
1343 && offset
+ length
<= size
)
1348 && host_integerp (DECL_FIELD_OFFSET (field
), 0))
1349 offset
+= TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field
))
1350 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 1)
1358 mem_expr
= TREE_OPERAND (mem_expr
, 0);
1359 inner
= TREE_OPERAND (inner
, 0);
1362 if (mem_expr
== NULL
)
1364 if (mem_expr
!= MEM_EXPR (mem
))
1366 set_mem_expr (mem
, mem_expr
);
1368 set_mem_offset (mem
, offset
);
1370 clear_mem_offset (mem
);
1373 set_mem_alias_set (mem
, 0);
1374 clear_mem_size (mem
);
1380 /* Built-in functions to perform an untyped call and return. */
1382 #define apply_args_mode \
1383 (this_target_builtins->x_apply_args_mode)
1384 #define apply_result_mode \
1385 (this_target_builtins->x_apply_result_mode)
1387 /* Return the size required for the block returned by __builtin_apply_args,
1388 and initialize apply_args_mode. */
1391 apply_args_size (void)
1393 static int size
= -1;
1396 enum machine_mode mode
;
1398 /* The values computed by this function never change. */
1401 /* The first value is the incoming arg-pointer. */
1402 size
= GET_MODE_SIZE (Pmode
);
1404 /* The second value is the structure value address unless this is
1405 passed as an "invisible" first argument. */
1406 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1407 size
+= GET_MODE_SIZE (Pmode
);
1409 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1410 if (FUNCTION_ARG_REGNO_P (regno
))
1412 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1414 gcc_assert (mode
!= VOIDmode
);
1416 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1417 if (size
% align
!= 0)
1418 size
= CEIL (size
, align
) * align
;
1419 size
+= GET_MODE_SIZE (mode
);
1420 apply_args_mode
[regno
] = mode
;
1424 apply_args_mode
[regno
] = VOIDmode
;
1430 /* Return the size required for the block returned by __builtin_apply,
1431 and initialize apply_result_mode. */
1434 apply_result_size (void)
1436 static int size
= -1;
1438 enum machine_mode mode
;
1440 /* The values computed by this function never change. */
1445 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1446 if (targetm
.calls
.function_value_regno_p (regno
))
1448 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1450 gcc_assert (mode
!= VOIDmode
);
1452 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1453 if (size
% align
!= 0)
1454 size
= CEIL (size
, align
) * align
;
1455 size
+= GET_MODE_SIZE (mode
);
1456 apply_result_mode
[regno
] = mode
;
1459 apply_result_mode
[regno
] = VOIDmode
;
1461 /* Allow targets that use untyped_call and untyped_return to override
1462 the size so that machine-specific information can be stored here. */
1463 #ifdef APPLY_RESULT_SIZE
1464 size
= APPLY_RESULT_SIZE
;
1470 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1471 /* Create a vector describing the result block RESULT. If SAVEP is true,
1472 the result block is used to save the values; otherwise it is used to
1473 restore the values. */
1476 result_vector (int savep
, rtx result
)
1478 int regno
, size
, align
, nelts
;
1479 enum machine_mode mode
;
1481 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1484 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1485 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1487 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1488 if (size
% align
!= 0)
1489 size
= CEIL (size
, align
) * align
;
1490 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1491 mem
= adjust_address (result
, mode
, size
);
1492 savevec
[nelts
++] = (savep
1493 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
1494 : gen_rtx_SET (VOIDmode
, reg
, mem
));
1495 size
+= GET_MODE_SIZE (mode
);
1497 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1499 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1501 /* Save the state required to perform an untyped call with the same
1502 arguments as were passed to the current function. */
1505 expand_builtin_apply_args_1 (void)
1508 int size
, align
, regno
;
1509 enum machine_mode mode
;
1510 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1512 /* Create a block where the arg-pointer, structure value address,
1513 and argument registers can be saved. */
1514 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1516 /* Walk past the arg-pointer and structure value address. */
1517 size
= GET_MODE_SIZE (Pmode
);
1518 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1519 size
+= GET_MODE_SIZE (Pmode
);
1521 /* Save each register used in calling a function to the block. */
1522 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1523 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1525 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1526 if (size
% align
!= 0)
1527 size
= CEIL (size
, align
) * align
;
1529 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1531 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1532 size
+= GET_MODE_SIZE (mode
);
1535 /* Save the arg pointer to the block. */
1536 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1537 #ifdef STACK_GROWS_DOWNWARD
1538 /* We need the pointer as the caller actually passed them to us, not
1539 as we might have pretended they were passed. Make sure it's a valid
1540 operand, as emit_move_insn isn't expected to handle a PLUS. */
1542 = force_operand (plus_constant (tem
, crtl
->args
.pretend_args_size
),
1545 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1547 size
= GET_MODE_SIZE (Pmode
);
1549 /* Save the structure value address unless this is passed as an
1550 "invisible" first argument. */
1551 if (struct_incoming_value
)
1553 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1554 copy_to_reg (struct_incoming_value
));
1555 size
+= GET_MODE_SIZE (Pmode
);
1558 /* Return the address of the block. */
1559 return copy_addr_to_reg (XEXP (registers
, 0));
1562 /* __builtin_apply_args returns block of memory allocated on
1563 the stack into which is stored the arg pointer, structure
1564 value address, static chain, and all the registers that might
1565 possibly be used in performing a function call. The code is
1566 moved to the start of the function so the incoming values are
1570 expand_builtin_apply_args (void)
1572 /* Don't do __builtin_apply_args more than once in a function.
1573 Save the result of the first call and reuse it. */
1574 if (apply_args_value
!= 0)
1575 return apply_args_value
;
1577 /* When this function is called, it means that registers must be
1578 saved on entry to this function. So we migrate the
1579 call to the first insn of this function. */
1584 temp
= expand_builtin_apply_args_1 ();
1588 apply_args_value
= temp
;
1590 /* Put the insns after the NOTE that starts the function.
1591 If this is inside a start_sequence, make the outer-level insn
1592 chain current, so the code is placed at the start of the
1593 function. If internal_arg_pointer is a non-virtual pseudo,
1594 it needs to be placed after the function that initializes
1596 push_topmost_sequence ();
1597 if (REG_P (crtl
->args
.internal_arg_pointer
)
1598 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1599 emit_insn_before (seq
, parm_birth_insn
);
1601 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1602 pop_topmost_sequence ();
1607 /* Perform an untyped call and save the state required to perform an
1608 untyped return of whatever value was returned by the given function. */
1611 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1613 int size
, align
, regno
;
1614 enum machine_mode mode
;
1615 rtx incoming_args
, result
, reg
, dest
, src
, call_insn
;
1616 rtx old_stack_level
= 0;
1617 rtx call_fusage
= 0;
1618 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1620 arguments
= convert_memory_address (Pmode
, arguments
);
1622 /* Create a block where the return registers can be saved. */
1623 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1625 /* Fetch the arg pointer from the ARGUMENTS block. */
1626 incoming_args
= gen_reg_rtx (Pmode
);
1627 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1628 #ifndef STACK_GROWS_DOWNWARD
1629 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1630 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1633 /* Push a new argument block and copy the arguments. Do not allow
1634 the (potential) memcpy call below to interfere with our stack
1636 do_pending_stack_adjust ();
1639 /* Save the stack with nonlocal if available. */
1640 #ifdef HAVE_save_stack_nonlocal
1641 if (HAVE_save_stack_nonlocal
)
1642 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1645 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1647 /* Allocate a block of memory onto the stack and copy the memory
1648 arguments to the outgoing arguments address. We can pass TRUE
1649 as the 4th argument because we just saved the stack pointer
1650 and will restore it right after the call. */
1651 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1653 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1654 may have already set current_function_calls_alloca to true.
1655 current_function_calls_alloca won't be set if argsize is zero,
1656 so we have to guarantee need_drap is true here. */
1657 if (SUPPORTS_STACK_ALIGNMENT
)
1658 crtl
->need_drap
= true;
1660 dest
= virtual_outgoing_args_rtx
;
1661 #ifndef STACK_GROWS_DOWNWARD
1662 if (CONST_INT_P (argsize
))
1663 dest
= plus_constant (dest
, -INTVAL (argsize
));
1665 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1667 dest
= gen_rtx_MEM (BLKmode
, dest
);
1668 set_mem_align (dest
, PARM_BOUNDARY
);
1669 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1670 set_mem_align (src
, PARM_BOUNDARY
);
1671 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1673 /* Refer to the argument block. */
1675 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1676 set_mem_align (arguments
, PARM_BOUNDARY
);
1678 /* Walk past the arg-pointer and structure value address. */
1679 size
= GET_MODE_SIZE (Pmode
);
1681 size
+= GET_MODE_SIZE (Pmode
);
1683 /* Restore each of the registers previously saved. Make USE insns
1684 for each of these registers for use in making the call. */
1685 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1686 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1688 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1689 if (size
% align
!= 0)
1690 size
= CEIL (size
, align
) * align
;
1691 reg
= gen_rtx_REG (mode
, regno
);
1692 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1693 use_reg (&call_fusage
, reg
);
1694 size
+= GET_MODE_SIZE (mode
);
1697 /* Restore the structure value address unless this is passed as an
1698 "invisible" first argument. */
1699 size
= GET_MODE_SIZE (Pmode
);
1702 rtx value
= gen_reg_rtx (Pmode
);
1703 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1704 emit_move_insn (struct_value
, value
);
1705 if (REG_P (struct_value
))
1706 use_reg (&call_fusage
, struct_value
);
1707 size
+= GET_MODE_SIZE (Pmode
);
1710 /* All arguments and registers used for the call are set up by now! */
1711 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1713 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1714 and we don't want to load it into a register as an optimization,
1715 because prepare_call_address already did it if it should be done. */
1716 if (GET_CODE (function
) != SYMBOL_REF
)
1717 function
= memory_address (FUNCTION_MODE
, function
);
1719 /* Generate the actual call instruction and save the return value. */
1720 #ifdef HAVE_untyped_call
1721 if (HAVE_untyped_call
)
1722 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1723 result
, result_vector (1, result
)));
1726 #ifdef HAVE_call_value
1727 if (HAVE_call_value
)
1731 /* Locate the unique return register. It is not possible to
1732 express a call that sets more than one return register using
1733 call_value; use untyped_call for that. In fact, untyped_call
1734 only needs to save the return registers in the given block. */
1735 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1736 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1738 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1740 valreg
= gen_rtx_REG (mode
, regno
);
1743 emit_call_insn (GEN_CALL_VALUE (valreg
,
1744 gen_rtx_MEM (FUNCTION_MODE
, function
),
1745 const0_rtx
, NULL_RTX
, const0_rtx
));
1747 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1753 /* Find the CALL insn we just emitted, and attach the register usage
1755 call_insn
= last_call_insn ();
1756 add_function_usage_to (call_insn
, call_fusage
);
1758 /* Restore the stack. */
1759 #ifdef HAVE_save_stack_nonlocal
1760 if (HAVE_save_stack_nonlocal
)
1761 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1764 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1765 fixup_args_size_notes (call_insn
, get_last_insn(), 0);
1769 /* Return the address of the result block. */
1770 result
= copy_addr_to_reg (XEXP (result
, 0));
1771 return convert_memory_address (ptr_mode
, result
);
1774 /* Perform an untyped return. */
1777 expand_builtin_return (rtx result
)
1779 int size
, align
, regno
;
1780 enum machine_mode mode
;
1782 rtx call_fusage
= 0;
1784 result
= convert_memory_address (Pmode
, result
);
1786 apply_result_size ();
1787 result
= gen_rtx_MEM (BLKmode
, result
);
1789 #ifdef HAVE_untyped_return
1790 if (HAVE_untyped_return
)
1792 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1798 /* Restore the return value and note that each value is used. */
1800 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1801 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1803 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1804 if (size
% align
!= 0)
1805 size
= CEIL (size
, align
) * align
;
1806 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1807 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1809 push_to_sequence (call_fusage
);
1811 call_fusage
= get_insns ();
1813 size
+= GET_MODE_SIZE (mode
);
1816 /* Put the USE insns before the return. */
1817 emit_insn (call_fusage
);
1819 /* Return whatever values was restored by jumping directly to the end
1821 expand_naked_return ();
1824 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1826 static enum type_class
1827 type_to_class (tree type
)
1829 switch (TREE_CODE (type
))
1831 case VOID_TYPE
: return void_type_class
;
1832 case INTEGER_TYPE
: return integer_type_class
;
1833 case ENUMERAL_TYPE
: return enumeral_type_class
;
1834 case BOOLEAN_TYPE
: return boolean_type_class
;
1835 case POINTER_TYPE
: return pointer_type_class
;
1836 case REFERENCE_TYPE
: return reference_type_class
;
1837 case OFFSET_TYPE
: return offset_type_class
;
1838 case REAL_TYPE
: return real_type_class
;
1839 case COMPLEX_TYPE
: return complex_type_class
;
1840 case FUNCTION_TYPE
: return function_type_class
;
1841 case METHOD_TYPE
: return method_type_class
;
1842 case RECORD_TYPE
: return record_type_class
;
1844 case QUAL_UNION_TYPE
: return union_type_class
;
1845 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1846 ? string_type_class
: array_type_class
);
1847 case LANG_TYPE
: return lang_type_class
;
1848 default: return no_type_class
;
1852 /* Expand a call EXP to __builtin_classify_type. */
1855 expand_builtin_classify_type (tree exp
)
1857 if (call_expr_nargs (exp
))
1858 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1859 return GEN_INT (no_type_class
);
1862 /* This helper macro, meant to be used in mathfn_built_in below,
1863 determines which among a set of three builtin math functions is
1864 appropriate for a given type mode. The `F' and `L' cases are
1865 automatically generated from the `double' case. */
1866 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1867 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1868 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1869 fcodel = BUILT_IN_MATHFN##L ; break;
1870 /* Similar to above, but appends _R after any F/L suffix. */
1871 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1872 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1873 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1874 fcodel = BUILT_IN_MATHFN##L_R ; break;
1876 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1877 if available. If IMPLICIT is true use the implicit builtin declaration,
1878 otherwise use the explicit declaration. If we can't do the conversion,
1882 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit_p
)
1884 enum built_in_function fcode
, fcodef
, fcodel
, fcode2
;
1888 CASE_MATHFN (BUILT_IN_ACOS
)
1889 CASE_MATHFN (BUILT_IN_ACOSH
)
1890 CASE_MATHFN (BUILT_IN_ASIN
)
1891 CASE_MATHFN (BUILT_IN_ASINH
)
1892 CASE_MATHFN (BUILT_IN_ATAN
)
1893 CASE_MATHFN (BUILT_IN_ATAN2
)
1894 CASE_MATHFN (BUILT_IN_ATANH
)
1895 CASE_MATHFN (BUILT_IN_CBRT
)
1896 CASE_MATHFN (BUILT_IN_CEIL
)
1897 CASE_MATHFN (BUILT_IN_CEXPI
)
1898 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1899 CASE_MATHFN (BUILT_IN_COS
)
1900 CASE_MATHFN (BUILT_IN_COSH
)
1901 CASE_MATHFN (BUILT_IN_DREM
)
1902 CASE_MATHFN (BUILT_IN_ERF
)
1903 CASE_MATHFN (BUILT_IN_ERFC
)
1904 CASE_MATHFN (BUILT_IN_EXP
)
1905 CASE_MATHFN (BUILT_IN_EXP10
)
1906 CASE_MATHFN (BUILT_IN_EXP2
)
1907 CASE_MATHFN (BUILT_IN_EXPM1
)
1908 CASE_MATHFN (BUILT_IN_FABS
)
1909 CASE_MATHFN (BUILT_IN_FDIM
)
1910 CASE_MATHFN (BUILT_IN_FLOOR
)
1911 CASE_MATHFN (BUILT_IN_FMA
)
1912 CASE_MATHFN (BUILT_IN_FMAX
)
1913 CASE_MATHFN (BUILT_IN_FMIN
)
1914 CASE_MATHFN (BUILT_IN_FMOD
)
1915 CASE_MATHFN (BUILT_IN_FREXP
)
1916 CASE_MATHFN (BUILT_IN_GAMMA
)
1917 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1918 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1919 CASE_MATHFN (BUILT_IN_HYPOT
)
1920 CASE_MATHFN (BUILT_IN_ILOGB
)
1921 CASE_MATHFN (BUILT_IN_ICEIL
)
1922 CASE_MATHFN (BUILT_IN_IFLOOR
)
1923 CASE_MATHFN (BUILT_IN_INF
)
1924 CASE_MATHFN (BUILT_IN_IRINT
)
1925 CASE_MATHFN (BUILT_IN_IROUND
)
1926 CASE_MATHFN (BUILT_IN_ISINF
)
1927 CASE_MATHFN (BUILT_IN_J0
)
1928 CASE_MATHFN (BUILT_IN_J1
)
1929 CASE_MATHFN (BUILT_IN_JN
)
1930 CASE_MATHFN (BUILT_IN_LCEIL
)
1931 CASE_MATHFN (BUILT_IN_LDEXP
)
1932 CASE_MATHFN (BUILT_IN_LFLOOR
)
1933 CASE_MATHFN (BUILT_IN_LGAMMA
)
1934 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1935 CASE_MATHFN (BUILT_IN_LLCEIL
)
1936 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1937 CASE_MATHFN (BUILT_IN_LLRINT
)
1938 CASE_MATHFN (BUILT_IN_LLROUND
)
1939 CASE_MATHFN (BUILT_IN_LOG
)
1940 CASE_MATHFN (BUILT_IN_LOG10
)
1941 CASE_MATHFN (BUILT_IN_LOG1P
)
1942 CASE_MATHFN (BUILT_IN_LOG2
)
1943 CASE_MATHFN (BUILT_IN_LOGB
)
1944 CASE_MATHFN (BUILT_IN_LRINT
)
1945 CASE_MATHFN (BUILT_IN_LROUND
)
1946 CASE_MATHFN (BUILT_IN_MODF
)
1947 CASE_MATHFN (BUILT_IN_NAN
)
1948 CASE_MATHFN (BUILT_IN_NANS
)
1949 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1950 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1951 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1952 CASE_MATHFN (BUILT_IN_POW
)
1953 CASE_MATHFN (BUILT_IN_POWI
)
1954 CASE_MATHFN (BUILT_IN_POW10
)
1955 CASE_MATHFN (BUILT_IN_REMAINDER
)
1956 CASE_MATHFN (BUILT_IN_REMQUO
)
1957 CASE_MATHFN (BUILT_IN_RINT
)
1958 CASE_MATHFN (BUILT_IN_ROUND
)
1959 CASE_MATHFN (BUILT_IN_SCALB
)
1960 CASE_MATHFN (BUILT_IN_SCALBLN
)
1961 CASE_MATHFN (BUILT_IN_SCALBN
)
1962 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1963 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1964 CASE_MATHFN (BUILT_IN_SIN
)
1965 CASE_MATHFN (BUILT_IN_SINCOS
)
1966 CASE_MATHFN (BUILT_IN_SINH
)
1967 CASE_MATHFN (BUILT_IN_SQRT
)
1968 CASE_MATHFN (BUILT_IN_TAN
)
1969 CASE_MATHFN (BUILT_IN_TANH
)
1970 CASE_MATHFN (BUILT_IN_TGAMMA
)
1971 CASE_MATHFN (BUILT_IN_TRUNC
)
1972 CASE_MATHFN (BUILT_IN_Y0
)
1973 CASE_MATHFN (BUILT_IN_Y1
)
1974 CASE_MATHFN (BUILT_IN_YN
)
1980 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1982 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1984 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1989 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1992 return builtin_decl_explicit (fcode2
);
1995 /* Like mathfn_built_in_1(), but always use the implicit array. */
1998 mathfn_built_in (tree type
, enum built_in_function fn
)
2000 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
2003 /* If errno must be maintained, expand the RTL to check if the result,
2004 TARGET, of a built-in function call, EXP, is NaN, and if so set
2008 expand_errno_check (tree exp
, rtx target
)
2010 rtx lab
= gen_label_rtx ();
2012 /* Test the result; if it is NaN, set errno=EDOM because
2013 the argument was not in the domain. */
2014 do_compare_rtx_and_jump (target
, target
, EQ
, 0, GET_MODE (target
),
2015 NULL_RTX
, NULL_RTX
, lab
,
2016 /* The jump is very likely. */
2017 REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1));
2020 /* If this built-in doesn't throw an exception, set errno directly. */
2021 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
2023 #ifdef GEN_ERRNO_RTX
2024 rtx errno_rtx
= GEN_ERRNO_RTX
;
2027 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
2029 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
2035 /* Make sure the library call isn't expanded as a tail call. */
2036 CALL_EXPR_TAILCALL (exp
) = 0;
2038 /* We can't set errno=EDOM directly; let the library call do it.
2039 Pop the arguments right away in case the call gets deleted. */
2041 expand_call (exp
, target
, 0);
2046 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2047 Return NULL_RTX if a normal call should be emitted rather than expanding
2048 the function in-line. EXP is the expression that is a call to the builtin
2049 function; if convenient, the result should be placed in TARGET.
2050 SUBTARGET may be used as the target for computing one of EXP's operands. */
2053 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
2055 optab builtin_optab
;
2057 tree fndecl
= get_callee_fndecl (exp
);
2058 enum machine_mode mode
;
2059 bool errno_set
= false;
2062 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2065 arg
= CALL_EXPR_ARG (exp
, 0);
2067 switch (DECL_FUNCTION_CODE (fndecl
))
2069 CASE_FLT_FN (BUILT_IN_SQRT
):
2070 errno_set
= ! tree_expr_nonnegative_p (arg
);
2071 builtin_optab
= sqrt_optab
;
2073 CASE_FLT_FN (BUILT_IN_EXP
):
2074 errno_set
= true; builtin_optab
= exp_optab
; break;
2075 CASE_FLT_FN (BUILT_IN_EXP10
):
2076 CASE_FLT_FN (BUILT_IN_POW10
):
2077 errno_set
= true; builtin_optab
= exp10_optab
; break;
2078 CASE_FLT_FN (BUILT_IN_EXP2
):
2079 errno_set
= true; builtin_optab
= exp2_optab
; break;
2080 CASE_FLT_FN (BUILT_IN_EXPM1
):
2081 errno_set
= true; builtin_optab
= expm1_optab
; break;
2082 CASE_FLT_FN (BUILT_IN_LOGB
):
2083 errno_set
= true; builtin_optab
= logb_optab
; break;
2084 CASE_FLT_FN (BUILT_IN_LOG
):
2085 errno_set
= true; builtin_optab
= log_optab
; break;
2086 CASE_FLT_FN (BUILT_IN_LOG10
):
2087 errno_set
= true; builtin_optab
= log10_optab
; break;
2088 CASE_FLT_FN (BUILT_IN_LOG2
):
2089 errno_set
= true; builtin_optab
= log2_optab
; break;
2090 CASE_FLT_FN (BUILT_IN_LOG1P
):
2091 errno_set
= true; builtin_optab
= log1p_optab
; break;
2092 CASE_FLT_FN (BUILT_IN_ASIN
):
2093 builtin_optab
= asin_optab
; break;
2094 CASE_FLT_FN (BUILT_IN_ACOS
):
2095 builtin_optab
= acos_optab
; break;
2096 CASE_FLT_FN (BUILT_IN_TAN
):
2097 builtin_optab
= tan_optab
; break;
2098 CASE_FLT_FN (BUILT_IN_ATAN
):
2099 builtin_optab
= atan_optab
; break;
2100 CASE_FLT_FN (BUILT_IN_FLOOR
):
2101 builtin_optab
= floor_optab
; break;
2102 CASE_FLT_FN (BUILT_IN_CEIL
):
2103 builtin_optab
= ceil_optab
; break;
2104 CASE_FLT_FN (BUILT_IN_TRUNC
):
2105 builtin_optab
= btrunc_optab
; break;
2106 CASE_FLT_FN (BUILT_IN_ROUND
):
2107 builtin_optab
= round_optab
; break;
2108 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
2109 builtin_optab
= nearbyint_optab
;
2110 if (flag_trapping_math
)
2112 /* Else fallthrough and expand as rint. */
2113 CASE_FLT_FN (BUILT_IN_RINT
):
2114 builtin_optab
= rint_optab
; break;
2115 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
2116 builtin_optab
= significand_optab
; break;
2121 /* Make a suitable register to place result in. */
2122 mode
= TYPE_MODE (TREE_TYPE (exp
));
2124 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2127 /* Before working hard, check whether the instruction is available. */
2128 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
2129 && (!errno_set
|| !optimize_insn_for_size_p ()))
2131 target
= gen_reg_rtx (mode
);
2133 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2134 need to expand the argument again. This way, we will not perform
2135 side-effects more the once. */
2136 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2138 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2142 /* Compute into TARGET.
2143 Set TARGET to wherever the result comes back. */
2144 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
2149 expand_errno_check (exp
, target
);
2151 /* Output the entire sequence. */
2152 insns
= get_insns ();
2158 /* If we were unable to expand via the builtin, stop the sequence
2159 (without outputting the insns) and call to the library function
2160 with the stabilized argument list. */
2164 return expand_call (exp
, target
, target
== const0_rtx
);
2167 /* Expand a call to the builtin binary math functions (pow and atan2).
2168 Return NULL_RTX if a normal call should be emitted rather than expanding the
2169 function in-line. EXP is the expression that is a call to the builtin
2170 function; if convenient, the result should be placed in TARGET.
2171 SUBTARGET may be used as the target for computing one of EXP's
2175 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2177 optab builtin_optab
;
2178 rtx op0
, op1
, insns
;
2179 int op1_type
= REAL_TYPE
;
2180 tree fndecl
= get_callee_fndecl (exp
);
2182 enum machine_mode mode
;
2183 bool errno_set
= true;
2185 switch (DECL_FUNCTION_CODE (fndecl
))
2187 CASE_FLT_FN (BUILT_IN_SCALBN
):
2188 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2189 CASE_FLT_FN (BUILT_IN_LDEXP
):
2190 op1_type
= INTEGER_TYPE
;
2195 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2198 arg0
= CALL_EXPR_ARG (exp
, 0);
2199 arg1
= CALL_EXPR_ARG (exp
, 1);
2201 switch (DECL_FUNCTION_CODE (fndecl
))
2203 CASE_FLT_FN (BUILT_IN_POW
):
2204 builtin_optab
= pow_optab
; break;
2205 CASE_FLT_FN (BUILT_IN_ATAN2
):
2206 builtin_optab
= atan2_optab
; break;
2207 CASE_FLT_FN (BUILT_IN_SCALB
):
2208 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2210 builtin_optab
= scalb_optab
; break;
2211 CASE_FLT_FN (BUILT_IN_SCALBN
):
2212 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2213 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2215 /* Fall through... */
2216 CASE_FLT_FN (BUILT_IN_LDEXP
):
2217 builtin_optab
= ldexp_optab
; break;
2218 CASE_FLT_FN (BUILT_IN_FMOD
):
2219 builtin_optab
= fmod_optab
; break;
2220 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2221 CASE_FLT_FN (BUILT_IN_DREM
):
2222 builtin_optab
= remainder_optab
; break;
2227 /* Make a suitable register to place result in. */
2228 mode
= TYPE_MODE (TREE_TYPE (exp
));
2230 /* Before working hard, check whether the instruction is available. */
2231 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2234 target
= gen_reg_rtx (mode
);
2236 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2239 if (errno_set
&& optimize_insn_for_size_p ())
2242 /* Always stabilize the argument list. */
2243 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2244 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2246 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2247 op1
= expand_normal (arg1
);
2251 /* Compute into TARGET.
2252 Set TARGET to wherever the result comes back. */
2253 target
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2254 target
, 0, OPTAB_DIRECT
);
2256 /* If we were unable to expand via the builtin, stop the sequence
2257 (without outputting the insns) and call to the library function
2258 with the stabilized argument list. */
2262 return expand_call (exp
, target
, target
== const0_rtx
);
2266 expand_errno_check (exp
, target
);
2268 /* Output the entire sequence. */
2269 insns
= get_insns ();
2276 /* Expand a call to the builtin trinary math functions (fma).
2277 Return NULL_RTX if a normal call should be emitted rather than expanding the
2278 function in-line. EXP is the expression that is a call to the builtin
2279 function; if convenient, the result should be placed in TARGET.
2280 SUBTARGET may be used as the target for computing one of EXP's
2284 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2286 optab builtin_optab
;
2287 rtx op0
, op1
, op2
, insns
;
2288 tree fndecl
= get_callee_fndecl (exp
);
2289 tree arg0
, arg1
, arg2
;
2290 enum machine_mode mode
;
2292 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2295 arg0
= CALL_EXPR_ARG (exp
, 0);
2296 arg1
= CALL_EXPR_ARG (exp
, 1);
2297 arg2
= CALL_EXPR_ARG (exp
, 2);
2299 switch (DECL_FUNCTION_CODE (fndecl
))
2301 CASE_FLT_FN (BUILT_IN_FMA
):
2302 builtin_optab
= fma_optab
; break;
2307 /* Make a suitable register to place result in. */
2308 mode
= TYPE_MODE (TREE_TYPE (exp
));
2310 /* Before working hard, check whether the instruction is available. */
2311 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2314 target
= gen_reg_rtx (mode
);
2316 /* Always stabilize the argument list. */
2317 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2318 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2319 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2321 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2322 op1
= expand_normal (arg1
);
2323 op2
= expand_normal (arg2
);
2327 /* Compute into TARGET.
2328 Set TARGET to wherever the result comes back. */
2329 target
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2332 /* If we were unable to expand via the builtin, stop the sequence
2333 (without outputting the insns) and call to the library function
2334 with the stabilized argument list. */
2338 return expand_call (exp
, target
, target
== const0_rtx
);
2341 /* Output the entire sequence. */
2342 insns
= get_insns ();
2349 /* Expand a call to the builtin sin and cos math functions.
2350 Return NULL_RTX if a normal call should be emitted rather than expanding the
2351 function in-line. EXP is the expression that is a call to the builtin
2352 function; if convenient, the result should be placed in TARGET.
2353 SUBTARGET may be used as the target for computing one of EXP's
2357 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2359 optab builtin_optab
;
2361 tree fndecl
= get_callee_fndecl (exp
);
2362 enum machine_mode mode
;
2365 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2368 arg
= CALL_EXPR_ARG (exp
, 0);
2370 switch (DECL_FUNCTION_CODE (fndecl
))
2372 CASE_FLT_FN (BUILT_IN_SIN
):
2373 CASE_FLT_FN (BUILT_IN_COS
):
2374 builtin_optab
= sincos_optab
; break;
2379 /* Make a suitable register to place result in. */
2380 mode
= TYPE_MODE (TREE_TYPE (exp
));
2382 /* Check if sincos insn is available, otherwise fallback
2383 to sin or cos insn. */
2384 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2385 switch (DECL_FUNCTION_CODE (fndecl
))
2387 CASE_FLT_FN (BUILT_IN_SIN
):
2388 builtin_optab
= sin_optab
; break;
2389 CASE_FLT_FN (BUILT_IN_COS
):
2390 builtin_optab
= cos_optab
; break;
2395 /* Before working hard, check whether the instruction is available. */
2396 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2398 target
= gen_reg_rtx (mode
);
2400 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2401 need to expand the argument again. This way, we will not perform
2402 side-effects more the once. */
2403 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2405 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2409 /* Compute into TARGET.
2410 Set TARGET to wherever the result comes back. */
2411 if (builtin_optab
== sincos_optab
)
2415 switch (DECL_FUNCTION_CODE (fndecl
))
2417 CASE_FLT_FN (BUILT_IN_SIN
):
2418 result
= expand_twoval_unop (builtin_optab
, op0
, 0, target
, 0);
2420 CASE_FLT_FN (BUILT_IN_COS
):
2421 result
= expand_twoval_unop (builtin_optab
, op0
, target
, 0, 0);
2426 gcc_assert (result
);
2430 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
2435 /* Output the entire sequence. */
2436 insns
= get_insns ();
2442 /* If we were unable to expand via the builtin, stop the sequence
2443 (without outputting the insns) and call to the library function
2444 with the stabilized argument list. */
2448 target
= expand_call (exp
, target
, target
== const0_rtx
);
2453 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2454 return an RTL instruction code that implements the functionality.
2455 If that isn't possible or available return CODE_FOR_nothing. */
2457 static enum insn_code
2458 interclass_mathfn_icode (tree arg
, tree fndecl
)
2460 bool errno_set
= false;
2461 optab builtin_optab
= 0;
2462 enum machine_mode mode
;
2464 switch (DECL_FUNCTION_CODE (fndecl
))
2466 CASE_FLT_FN (BUILT_IN_ILOGB
):
2467 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2468 CASE_FLT_FN (BUILT_IN_ISINF
):
2469 builtin_optab
= isinf_optab
; break;
2470 case BUILT_IN_ISNORMAL
:
2471 case BUILT_IN_ISFINITE
:
2472 CASE_FLT_FN (BUILT_IN_FINITE
):
2473 case BUILT_IN_FINITED32
:
2474 case BUILT_IN_FINITED64
:
2475 case BUILT_IN_FINITED128
:
2476 case BUILT_IN_ISINFD32
:
2477 case BUILT_IN_ISINFD64
:
2478 case BUILT_IN_ISINFD128
:
2479 /* These builtins have no optabs (yet). */
2485 /* There's no easy way to detect the case we need to set EDOM. */
2486 if (flag_errno_math
&& errno_set
)
2487 return CODE_FOR_nothing
;
2489 /* Optab mode depends on the mode of the input argument. */
2490 mode
= TYPE_MODE (TREE_TYPE (arg
));
2493 return optab_handler (builtin_optab
, mode
);
2494 return CODE_FOR_nothing
;
2497 /* Expand a call to one of the builtin math functions that operate on
2498 floating point argument and output an integer result (ilogb, isinf,
2500 Return 0 if a normal call should be emitted rather than expanding the
2501 function in-line. EXP is the expression that is a call to the builtin
2502 function; if convenient, the result should be placed in TARGET. */
2505 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2507 enum insn_code icode
= CODE_FOR_nothing
;
2509 tree fndecl
= get_callee_fndecl (exp
);
2510 enum machine_mode mode
;
2513 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2516 arg
= CALL_EXPR_ARG (exp
, 0);
2517 icode
= interclass_mathfn_icode (arg
, fndecl
);
2518 mode
= TYPE_MODE (TREE_TYPE (arg
));
2520 if (icode
!= CODE_FOR_nothing
)
2522 struct expand_operand ops
[1];
2523 rtx last
= get_last_insn ();
2524 tree orig_arg
= arg
;
2526 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2527 need to expand the argument again. This way, we will not perform
2528 side-effects more the once. */
2529 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2531 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2533 if (mode
!= GET_MODE (op0
))
2534 op0
= convert_to_mode (mode
, op0
, 0);
2536 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2537 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2538 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2539 return ops
[0].value
;
2541 delete_insns_since (last
);
2542 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2548 /* Expand a call to the builtin sincos math function.
2549 Return NULL_RTX if a normal call should be emitted rather than expanding the
2550 function in-line. EXP is the expression that is a call to the builtin
2554 expand_builtin_sincos (tree exp
)
2556 rtx op0
, op1
, op2
, target1
, target2
;
2557 enum machine_mode mode
;
2558 tree arg
, sinp
, cosp
;
2560 location_t loc
= EXPR_LOCATION (exp
);
2561 tree alias_type
, alias_off
;
2563 if (!validate_arglist (exp
, REAL_TYPE
,
2564 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2567 arg
= CALL_EXPR_ARG (exp
, 0);
2568 sinp
= CALL_EXPR_ARG (exp
, 1);
2569 cosp
= CALL_EXPR_ARG (exp
, 2);
2571 /* Make a suitable register to place result in. */
2572 mode
= TYPE_MODE (TREE_TYPE (arg
));
2574 /* Check if sincos insn is available, otherwise emit the call. */
2575 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2578 target1
= gen_reg_rtx (mode
);
2579 target2
= gen_reg_rtx (mode
);
2581 op0
= expand_normal (arg
);
2582 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2583 alias_off
= build_int_cst (alias_type
, 0);
2584 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2586 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2589 /* Compute into target1 and target2.
2590 Set TARGET to wherever the result comes back. */
2591 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2592 gcc_assert (result
);
2594 /* Move target1 and target2 to the memory locations indicated
2596 emit_move_insn (op1
, target1
);
2597 emit_move_insn (op2
, target2
);
2602 /* Expand a call to the internal cexpi builtin to the sincos math function.
2603 EXP is the expression that is a call to the builtin function; if convenient,
2604 the result should be placed in TARGET. */
2607 expand_builtin_cexpi (tree exp
, rtx target
)
2609 tree fndecl
= get_callee_fndecl (exp
);
2611 enum machine_mode mode
;
2613 location_t loc
= EXPR_LOCATION (exp
);
2615 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2618 arg
= CALL_EXPR_ARG (exp
, 0);
2619 type
= TREE_TYPE (arg
);
2620 mode
= TYPE_MODE (TREE_TYPE (arg
));
2622 /* Try expanding via a sincos optab, fall back to emitting a libcall
2623 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2624 is only generated from sincos, cexp or if we have either of them. */
2625 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2627 op1
= gen_reg_rtx (mode
);
2628 op2
= gen_reg_rtx (mode
);
2630 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2632 /* Compute into op1 and op2. */
2633 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2635 else if (TARGET_HAS_SINCOS
)
2637 tree call
, fn
= NULL_TREE
;
2641 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2642 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2643 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2644 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2645 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2646 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2650 op1
= assign_temp (TREE_TYPE (arg
), 0, 1, 1);
2651 op2
= assign_temp (TREE_TYPE (arg
), 0, 1, 1);
2652 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2653 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2654 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2655 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2657 /* Make sure not to fold the sincos call again. */
2658 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2659 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2660 call
, 3, arg
, top1
, top2
));
2664 tree call
, fn
= NULL_TREE
, narg
;
2665 tree ctype
= build_complex_type (type
);
2667 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2668 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2669 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2670 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2671 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2672 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2676 /* If we don't have a decl for cexp create one. This is the
2677 friendliest fallback if the user calls __builtin_cexpi
2678 without full target C99 function support. */
2679 if (fn
== NULL_TREE
)
2682 const char *name
= NULL
;
2684 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2686 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2688 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2691 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2692 fn
= build_fn_decl (name
, fntype
);
2695 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2696 build_real (type
, dconst0
), arg
);
2698 /* Make sure not to fold the cexp call again. */
2699 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2700 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2701 target
, VOIDmode
, EXPAND_NORMAL
);
2704 /* Now build the proper return type. */
2705 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2706 make_tree (TREE_TYPE (arg
), op2
),
2707 make_tree (TREE_TYPE (arg
), op1
)),
2708 target
, VOIDmode
, EXPAND_NORMAL
);
2711 /* Conveniently construct a function call expression. FNDECL names the
2712 function to be called, N is the number of arguments, and the "..."
2713 parameters are the argument expressions. Unlike build_call_exr
2714 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2717 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2720 tree fntype
= TREE_TYPE (fndecl
);
2721 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2724 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2726 SET_EXPR_LOCATION (fn
, loc
);
2730 /* Expand a call to one of the builtin rounding functions gcc defines
2731 as an extension (lfloor and lceil). As these are gcc extensions we
2732 do not need to worry about setting errno to EDOM.
2733 If expanding via optab fails, lower expression to (int)(floor(x)).
2734 EXP is the expression that is a call to the builtin function;
2735 if convenient, the result should be placed in TARGET. */
2738 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2740 convert_optab builtin_optab
;
2741 rtx op0
, insns
, tmp
;
2742 tree fndecl
= get_callee_fndecl (exp
);
2743 enum built_in_function fallback_fn
;
2744 tree fallback_fndecl
;
2745 enum machine_mode mode
;
2748 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2751 arg
= CALL_EXPR_ARG (exp
, 0);
2753 switch (DECL_FUNCTION_CODE (fndecl
))
2755 CASE_FLT_FN (BUILT_IN_ICEIL
):
2756 CASE_FLT_FN (BUILT_IN_LCEIL
):
2757 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2758 builtin_optab
= lceil_optab
;
2759 fallback_fn
= BUILT_IN_CEIL
;
2762 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2763 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2764 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2765 builtin_optab
= lfloor_optab
;
2766 fallback_fn
= BUILT_IN_FLOOR
;
2773 /* Make a suitable register to place result in. */
2774 mode
= TYPE_MODE (TREE_TYPE (exp
));
2776 target
= gen_reg_rtx (mode
);
2778 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2779 need to expand the argument again. This way, we will not perform
2780 side-effects more the once. */
2781 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2783 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2787 /* Compute into TARGET. */
2788 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2790 /* Output the entire sequence. */
2791 insns
= get_insns ();
2797 /* If we were unable to expand via the builtin, stop the sequence
2798 (without outputting the insns). */
2801 /* Fall back to floating point rounding optab. */
2802 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2804 /* For non-C99 targets we may end up without a fallback fndecl here
2805 if the user called __builtin_lfloor directly. In this case emit
2806 a call to the floor/ceil variants nevertheless. This should result
2807 in the best user experience for not full C99 targets. */
2808 if (fallback_fndecl
== NULL_TREE
)
2811 const char *name
= NULL
;
2813 switch (DECL_FUNCTION_CODE (fndecl
))
2815 case BUILT_IN_ICEIL
:
2816 case BUILT_IN_LCEIL
:
2817 case BUILT_IN_LLCEIL
:
2820 case BUILT_IN_ICEILF
:
2821 case BUILT_IN_LCEILF
:
2822 case BUILT_IN_LLCEILF
:
2825 case BUILT_IN_ICEILL
:
2826 case BUILT_IN_LCEILL
:
2827 case BUILT_IN_LLCEILL
:
2830 case BUILT_IN_IFLOOR
:
2831 case BUILT_IN_LFLOOR
:
2832 case BUILT_IN_LLFLOOR
:
2835 case BUILT_IN_IFLOORF
:
2836 case BUILT_IN_LFLOORF
:
2837 case BUILT_IN_LLFLOORF
:
2840 case BUILT_IN_IFLOORL
:
2841 case BUILT_IN_LFLOORL
:
2842 case BUILT_IN_LLFLOORL
:
2849 fntype
= build_function_type_list (TREE_TYPE (arg
),
2850 TREE_TYPE (arg
), NULL_TREE
);
2851 fallback_fndecl
= build_fn_decl (name
, fntype
);
2854 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2856 tmp
= expand_normal (exp
);
2858 /* Truncate the result of floating point optab to integer
2859 via expand_fix (). */
2860 target
= gen_reg_rtx (mode
);
2861 expand_fix (target
, tmp
, 0);
2866 /* Expand a call to one of the builtin math functions doing integer
2868 Return 0 if a normal call should be emitted rather than expanding the
2869 function in-line. EXP is the expression that is a call to the builtin
2870 function; if convenient, the result should be placed in TARGET. */
2873 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2875 convert_optab builtin_optab
;
2877 tree fndecl
= get_callee_fndecl (exp
);
2879 enum machine_mode mode
;
2880 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2882 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2885 arg
= CALL_EXPR_ARG (exp
, 0);
2887 switch (DECL_FUNCTION_CODE (fndecl
))
2889 CASE_FLT_FN (BUILT_IN_IRINT
):
2890 fallback_fn
= BUILT_IN_LRINT
;
2892 CASE_FLT_FN (BUILT_IN_LRINT
):
2893 CASE_FLT_FN (BUILT_IN_LLRINT
):
2894 builtin_optab
= lrint_optab
;
2897 CASE_FLT_FN (BUILT_IN_IROUND
):
2898 fallback_fn
= BUILT_IN_LROUND
;
2900 CASE_FLT_FN (BUILT_IN_LROUND
):
2901 CASE_FLT_FN (BUILT_IN_LLROUND
):
2902 builtin_optab
= lround_optab
;
2909 /* There's no easy way to detect the case we need to set EDOM. */
2910 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2913 /* Make a suitable register to place result in. */
2914 mode
= TYPE_MODE (TREE_TYPE (exp
));
2916 /* There's no easy way to detect the case we need to set EDOM. */
2917 if (!flag_errno_math
)
2919 target
= gen_reg_rtx (mode
);
2921 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2922 need to expand the argument again. This way, we will not perform
2923 side-effects more the once. */
2924 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2926 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2930 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2932 /* Output the entire sequence. */
2933 insns
= get_insns ();
2939 /* If we were unable to expand via the builtin, stop the sequence
2940 (without outputting the insns) and call to the library function
2941 with the stabilized argument list. */
2945 if (fallback_fn
!= BUILT_IN_NONE
)
2947 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2948 targets, (int) round (x) should never be transformed into
2949 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2950 a call to lround in the hope that the target provides at least some
2951 C99 functions. This should result in the best user experience for
2952 not full C99 targets. */
2953 tree fallback_fndecl
= mathfn_built_in_1 (TREE_TYPE (arg
),
2956 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2957 fallback_fndecl
, 1, arg
);
2959 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2960 return convert_to_mode (mode
, target
, 0);
2963 target
= expand_call (exp
, target
, target
== const0_rtx
);
2968 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2969 a normal call should be emitted rather than expanding the function
2970 in-line. EXP is the expression that is a call to the builtin
2971 function; if convenient, the result should be placed in TARGET. */
2974 expand_builtin_powi (tree exp
, rtx target
)
2978 enum machine_mode mode
;
2979 enum machine_mode mode2
;
2981 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2984 arg0
= CALL_EXPR_ARG (exp
, 0);
2985 arg1
= CALL_EXPR_ARG (exp
, 1);
2986 mode
= TYPE_MODE (TREE_TYPE (exp
));
2988 /* Emit a libcall to libgcc. */
2990 /* Mode of the 2nd argument must match that of an int. */
2991 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2993 if (target
== NULL_RTX
)
2994 target
= gen_reg_rtx (mode
);
2996 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2997 if (GET_MODE (op0
) != mode
)
2998 op0
= convert_to_mode (mode
, op0
, 0);
2999 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
3000 if (GET_MODE (op1
) != mode2
)
3001 op1
= convert_to_mode (mode2
, op1
, 0);
3003 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
3004 target
, LCT_CONST
, mode
, 2,
3005 op0
, mode
, op1
, mode2
);
3010 /* Expand expression EXP which is a call to the strlen builtin. Return
3011 NULL_RTX if we failed the caller should emit a normal call, otherwise
3012 try to get the result in TARGET, if convenient. */
3015 expand_builtin_strlen (tree exp
, rtx target
,
3016 enum machine_mode target_mode
)
3018 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
3022 struct expand_operand ops
[4];
3025 tree src
= CALL_EXPR_ARG (exp
, 0);
3026 rtx src_reg
, before_strlen
;
3027 enum machine_mode insn_mode
= target_mode
;
3028 enum insn_code icode
= CODE_FOR_nothing
;
3031 /* If the length can be computed at compile-time, return it. */
3032 len
= c_strlen (src
, 0);
3034 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3036 /* If the length can be computed at compile-time and is constant
3037 integer, but there are side-effects in src, evaluate
3038 src for side-effects, then return len.
3039 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3040 can be optimized into: i++; x = 3; */
3041 len
= c_strlen (src
, 1);
3042 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3044 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3045 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3048 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
3050 /* If SRC is not a pointer type, don't do this operation inline. */
3054 /* Bail out if we can't compute strlen in the right mode. */
3055 while (insn_mode
!= VOIDmode
)
3057 icode
= optab_handler (strlen_optab
, insn_mode
);
3058 if (icode
!= CODE_FOR_nothing
)
3061 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3063 if (insn_mode
== VOIDmode
)
3066 /* Make a place to hold the source address. We will not expand
3067 the actual source until we are sure that the expansion will
3068 not fail -- there are trees that cannot be expanded twice. */
3069 src_reg
= gen_reg_rtx (Pmode
);
3071 /* Mark the beginning of the strlen sequence so we can emit the
3072 source operand later. */
3073 before_strlen
= get_last_insn ();
3075 create_output_operand (&ops
[0], target
, insn_mode
);
3076 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3077 create_integer_operand (&ops
[2], 0);
3078 create_integer_operand (&ops
[3], align
);
3079 if (!maybe_expand_insn (icode
, 4, ops
))
3082 /* Now that we are assured of success, expand the source. */
3084 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3087 #ifdef POINTERS_EXTEND_UNSIGNED
3088 if (GET_MODE (pat
) != Pmode
)
3089 pat
= convert_to_mode (Pmode
, pat
,
3090 POINTERS_EXTEND_UNSIGNED
);
3092 emit_move_insn (src_reg
, pat
);
3098 emit_insn_after (pat
, before_strlen
);
3100 emit_insn_before (pat
, get_insns ());
3102 /* Return the value in the proper mode for this function. */
3103 if (GET_MODE (ops
[0].value
) == target_mode
)
3104 target
= ops
[0].value
;
3105 else if (target
!= 0)
3106 convert_move (target
, ops
[0].value
, 0);
3108 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3114 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3115 bytes from constant string DATA + OFFSET and return it as target
3119 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3120 enum machine_mode mode
)
3122 const char *str
= (const char *) data
;
3124 gcc_assert (offset
>= 0
3125 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3126 <= strlen (str
) + 1));
3128 return c_readstr (str
+ offset
, mode
);
3131 /* Expand a call EXP to the memcpy builtin.
3132 Return NULL_RTX if we failed, the caller should emit a normal call,
3133 otherwise try to get the result in TARGET, if convenient (and in
3134 mode MODE if that's convenient). */
3137 expand_builtin_memcpy (tree exp
, rtx target
)
3139 if (!validate_arglist (exp
,
3140 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3144 tree dest
= CALL_EXPR_ARG (exp
, 0);
3145 tree src
= CALL_EXPR_ARG (exp
, 1);
3146 tree len
= CALL_EXPR_ARG (exp
, 2);
3147 const char *src_str
;
3148 unsigned int src_align
= get_pointer_alignment (src
);
3149 unsigned int dest_align
= get_pointer_alignment (dest
);
3150 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3151 HOST_WIDE_INT expected_size
= -1;
3152 unsigned int expected_align
= 0;
3154 /* If DEST is not a pointer type, call the normal function. */
3155 if (dest_align
== 0)
3158 /* If either SRC is not a pointer type, don't do this
3159 operation in-line. */
3163 if (currently_expanding_gimple_stmt
)
3164 stringop_block_profile (currently_expanding_gimple_stmt
,
3165 &expected_align
, &expected_size
);
3167 if (expected_align
< dest_align
)
3168 expected_align
= dest_align
;
3169 dest_mem
= get_memory_rtx (dest
, len
);
3170 set_mem_align (dest_mem
, dest_align
);
3171 len_rtx
= expand_normal (len
);
3172 src_str
= c_getstr (src
);
3174 /* If SRC is a string constant and block move would be done
3175 by pieces, we can avoid loading the string from memory
3176 and only stored the computed constants. */
3178 && CONST_INT_P (len_rtx
)
3179 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3180 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3181 CONST_CAST (char *, src_str
),
3184 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3185 builtin_memcpy_read_str
,
3186 CONST_CAST (char *, src_str
),
3187 dest_align
, false, 0);
3188 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3189 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3193 src_mem
= get_memory_rtx (src
, len
);
3194 set_mem_align (src_mem
, src_align
);
3196 /* Copy word part most expediently. */
3197 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3198 CALL_EXPR_TAILCALL (exp
)
3199 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3200 expected_align
, expected_size
);
3204 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3205 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3211 /* Expand a call EXP to the mempcpy builtin.
3212 Return NULL_RTX if we failed; the caller should emit a normal call,
3213 otherwise try to get the result in TARGET, if convenient (and in
3214 mode MODE if that's convenient). If ENDP is 0 return the
3215 destination pointer, if ENDP is 1 return the end pointer ala
3216 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3220 expand_builtin_mempcpy (tree exp
, rtx target
, enum machine_mode mode
)
3222 if (!validate_arglist (exp
,
3223 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3227 tree dest
= CALL_EXPR_ARG (exp
, 0);
3228 tree src
= CALL_EXPR_ARG (exp
, 1);
3229 tree len
= CALL_EXPR_ARG (exp
, 2);
3230 return expand_builtin_mempcpy_args (dest
, src
, len
,
3231 target
, mode
, /*endp=*/ 1);
3235 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3236 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3237 so that this can also be called without constructing an actual CALL_EXPR.
3238 The other arguments and return value are the same as for
3239 expand_builtin_mempcpy. */
3242 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3243 rtx target
, enum machine_mode mode
, int endp
)
3245 /* If return value is ignored, transform mempcpy into memcpy. */
3246 if (target
== const0_rtx
&& builtin_decl_implicit_p (BUILT_IN_MEMCPY
))
3248 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3249 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3251 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3255 const char *src_str
;
3256 unsigned int src_align
= get_pointer_alignment (src
);
3257 unsigned int dest_align
= get_pointer_alignment (dest
);
3258 rtx dest_mem
, src_mem
, len_rtx
;
3260 /* If either SRC or DEST is not a pointer type, don't do this
3261 operation in-line. */
3262 if (dest_align
== 0 || src_align
== 0)
3265 /* If LEN is not constant, call the normal function. */
3266 if (! host_integerp (len
, 1))
3269 len_rtx
= expand_normal (len
);
3270 src_str
= c_getstr (src
);
3272 /* If SRC is a string constant and block move would be done
3273 by pieces, we can avoid loading the string from memory
3274 and only stored the computed constants. */
3276 && CONST_INT_P (len_rtx
)
3277 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3278 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3279 CONST_CAST (char *, src_str
),
3282 dest_mem
= get_memory_rtx (dest
, len
);
3283 set_mem_align (dest_mem
, dest_align
);
3284 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3285 builtin_memcpy_read_str
,
3286 CONST_CAST (char *, src_str
),
3287 dest_align
, false, endp
);
3288 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3289 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3293 if (CONST_INT_P (len_rtx
)
3294 && can_move_by_pieces (INTVAL (len_rtx
),
3295 MIN (dest_align
, src_align
)))
3297 dest_mem
= get_memory_rtx (dest
, len
);
3298 set_mem_align (dest_mem
, dest_align
);
3299 src_mem
= get_memory_rtx (src
, len
);
3300 set_mem_align (src_mem
, src_align
);
3301 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3302 MIN (dest_align
, src_align
), endp
);
3303 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3304 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3313 # define HAVE_movstr 0
3314 # define CODE_FOR_movstr CODE_FOR_nothing
3317 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3318 we failed, the caller should emit a normal call, otherwise try to
3319 get the result in TARGET, if convenient. If ENDP is 0 return the
3320 destination pointer, if ENDP is 1 return the end pointer ala
3321 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3325 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3327 struct expand_operand ops
[3];
3334 dest_mem
= get_memory_rtx (dest
, NULL
);
3335 src_mem
= get_memory_rtx (src
, NULL
);
3338 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3339 dest_mem
= replace_equiv_address (dest_mem
, target
);
3342 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3343 create_fixed_operand (&ops
[1], dest_mem
);
3344 create_fixed_operand (&ops
[2], src_mem
);
3345 expand_insn (CODE_FOR_movstr
, 3, ops
);
3347 if (endp
&& target
!= const0_rtx
)
3349 target
= ops
[0].value
;
3350 /* movstr is supposed to set end to the address of the NUL
3351 terminator. If the caller requested a mempcpy-like return value,
3355 rtx tem
= plus_constant (gen_lowpart (GET_MODE (target
), target
), 1);
3356 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3362 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3363 NULL_RTX if we failed the caller should emit a normal call, otherwise
3364 try to get the result in TARGET, if convenient (and in mode MODE if that's
3368 expand_builtin_strcpy (tree exp
, rtx target
)
3370 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3372 tree dest
= CALL_EXPR_ARG (exp
, 0);
3373 tree src
= CALL_EXPR_ARG (exp
, 1);
3374 return expand_builtin_strcpy_args (dest
, src
, target
);
3379 /* Helper function to do the actual work for expand_builtin_strcpy. The
3380 arguments to the builtin_strcpy call DEST and SRC are broken out
3381 so that this can also be called without constructing an actual CALL_EXPR.
3382 The other arguments and return value are the same as for
3383 expand_builtin_strcpy. */
3386 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3388 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3391 /* Expand a call EXP to the stpcpy builtin.
3392 Return NULL_RTX if we failed the caller should emit a normal call,
3393 otherwise try to get the result in TARGET, if convenient (and in
3394 mode MODE if that's convenient). */
3397 expand_builtin_stpcpy (tree exp
, rtx target
, enum machine_mode mode
)
3400 location_t loc
= EXPR_LOCATION (exp
);
3402 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3405 dst
= CALL_EXPR_ARG (exp
, 0);
3406 src
= CALL_EXPR_ARG (exp
, 1);
3408 /* If return value is ignored, transform stpcpy into strcpy. */
3409 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3411 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3412 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3413 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3420 /* Ensure we get an actual string whose length can be evaluated at
3421 compile-time, not an expression containing a string. This is
3422 because the latter will potentially produce pessimized code
3423 when used to produce the return value. */
3424 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3425 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3427 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3428 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3429 target
, mode
, /*endp=*/2);
3434 if (TREE_CODE (len
) == INTEGER_CST
)
3436 rtx len_rtx
= expand_normal (len
);
3438 if (CONST_INT_P (len_rtx
))
3440 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3446 if (mode
!= VOIDmode
)
3447 target
= gen_reg_rtx (mode
);
3449 target
= gen_reg_rtx (GET_MODE (ret
));
3451 if (GET_MODE (target
) != GET_MODE (ret
))
3452 ret
= gen_lowpart (GET_MODE (target
), ret
);
3454 ret
= plus_constant (ret
, INTVAL (len_rtx
));
3455 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3463 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3467 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3468 bytes from constant string DATA + OFFSET and return it as target
3472 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3473 enum machine_mode mode
)
3475 const char *str
= (const char *) data
;
3477 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3480 return c_readstr (str
+ offset
, mode
);
3483 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3484 NULL_RTX if we failed the caller should emit a normal call. */
3487 expand_builtin_strncpy (tree exp
, rtx target
)
3489 location_t loc
= EXPR_LOCATION (exp
);
3491 if (validate_arglist (exp
,
3492 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3494 tree dest
= CALL_EXPR_ARG (exp
, 0);
3495 tree src
= CALL_EXPR_ARG (exp
, 1);
3496 tree len
= CALL_EXPR_ARG (exp
, 2);
3497 tree slen
= c_strlen (src
, 1);
3499 /* We must be passed a constant len and src parameter. */
3500 if (!host_integerp (len
, 1) || !slen
|| !host_integerp (slen
, 1))
3503 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3505 /* We're required to pad with trailing zeros if the requested
3506 len is greater than strlen(s2)+1. In that case try to
3507 use store_by_pieces, if it fails, punt. */
3508 if (tree_int_cst_lt (slen
, len
))
3510 unsigned int dest_align
= get_pointer_alignment (dest
);
3511 const char *p
= c_getstr (src
);
3514 if (!p
|| dest_align
== 0 || !host_integerp (len
, 1)
3515 || !can_store_by_pieces (tree_low_cst (len
, 1),
3516 builtin_strncpy_read_str
,
3517 CONST_CAST (char *, p
),
3521 dest_mem
= get_memory_rtx (dest
, len
);
3522 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3523 builtin_strncpy_read_str
,
3524 CONST_CAST (char *, p
), dest_align
, false, 0);
3525 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3526 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3533 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3534 bytes from constant string DATA + OFFSET and return it as target
3538 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3539 enum machine_mode mode
)
3541 const char *c
= (const char *) data
;
3542 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3544 memset (p
, *c
, GET_MODE_SIZE (mode
));
3546 return c_readstr (p
, mode
);
3549 /* Callback routine for store_by_pieces. Return the RTL of a register
3550 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3551 char value given in the RTL register data. For example, if mode is
3552 4 bytes wide, return the RTL for 0x01010101*data. */
3555 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3556 enum machine_mode mode
)
3562 size
= GET_MODE_SIZE (mode
);
3566 p
= XALLOCAVEC (char, size
);
3567 memset (p
, 1, size
);
3568 coeff
= c_readstr (p
, mode
);
3570 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3571 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3572 return force_reg (mode
, target
);
3575 /* Expand expression EXP, which is a call to the memset builtin. Return
3576 NULL_RTX if we failed the caller should emit a normal call, otherwise
3577 try to get the result in TARGET, if convenient (and in mode MODE if that's
3581 expand_builtin_memset (tree exp
, rtx target
, enum machine_mode mode
)
3583 if (!validate_arglist (exp
,
3584 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3588 tree dest
= CALL_EXPR_ARG (exp
, 0);
3589 tree val
= CALL_EXPR_ARG (exp
, 1);
3590 tree len
= CALL_EXPR_ARG (exp
, 2);
3591 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3595 /* Helper function to do the actual work for expand_builtin_memset. The
3596 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3597 so that this can also be called without constructing an actual CALL_EXPR.
3598 The other arguments and return value are the same as for
3599 expand_builtin_memset. */
3602 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3603 rtx target
, enum machine_mode mode
, tree orig_exp
)
3606 enum built_in_function fcode
;
3607 enum machine_mode val_mode
;
3609 unsigned int dest_align
;
3610 rtx dest_mem
, dest_addr
, len_rtx
;
3611 HOST_WIDE_INT expected_size
= -1;
3612 unsigned int expected_align
= 0;
3614 dest_align
= get_pointer_alignment (dest
);
3616 /* If DEST is not a pointer type, don't do this operation in-line. */
3617 if (dest_align
== 0)
3620 if (currently_expanding_gimple_stmt
)
3621 stringop_block_profile (currently_expanding_gimple_stmt
,
3622 &expected_align
, &expected_size
);
3624 if (expected_align
< dest_align
)
3625 expected_align
= dest_align
;
3627 /* If the LEN parameter is zero, return DEST. */
3628 if (integer_zerop (len
))
3630 /* Evaluate and ignore VAL in case it has side-effects. */
3631 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3632 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3635 /* Stabilize the arguments in case we fail. */
3636 dest
= builtin_save_expr (dest
);
3637 val
= builtin_save_expr (val
);
3638 len
= builtin_save_expr (len
);
3640 len_rtx
= expand_normal (len
);
3641 dest_mem
= get_memory_rtx (dest
, len
);
3642 val_mode
= TYPE_MODE (unsigned_char_type_node
);
3644 if (TREE_CODE (val
) != INTEGER_CST
)
3648 val_rtx
= expand_normal (val
);
3649 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
3651 /* Assume that we can memset by pieces if we can store
3652 * the coefficients by pieces (in the required modes).
3653 * We can't pass builtin_memset_gen_str as that emits RTL. */
3655 if (host_integerp (len
, 1)
3656 && can_store_by_pieces (tree_low_cst (len
, 1),
3657 builtin_memset_read_str
, &c
, dest_align
,
3660 val_rtx
= force_reg (val_mode
, val_rtx
);
3661 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3662 builtin_memset_gen_str
, val_rtx
, dest_align
,
3665 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3666 dest_align
, expected_align
,
3670 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3671 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3675 if (target_char_cast (val
, &c
))
3680 if (host_integerp (len
, 1)
3681 && can_store_by_pieces (tree_low_cst (len
, 1),
3682 builtin_memset_read_str
, &c
, dest_align
,
3684 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3685 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3686 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
3687 gen_int_mode (c
, val_mode
),
3688 dest_align
, expected_align
,
3692 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3693 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3697 set_mem_align (dest_mem
, dest_align
);
3698 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3699 CALL_EXPR_TAILCALL (orig_exp
)
3700 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3701 expected_align
, expected_size
);
3705 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3706 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3712 fndecl
= get_callee_fndecl (orig_exp
);
3713 fcode
= DECL_FUNCTION_CODE (fndecl
);
3714 if (fcode
== BUILT_IN_MEMSET
)
3715 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
3717 else if (fcode
== BUILT_IN_BZERO
)
3718 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
3722 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3723 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3724 return expand_call (fn
, target
, target
== const0_rtx
);
3727 /* Expand expression EXP, which is a call to the bzero builtin. Return
3728 NULL_RTX if we failed the caller should emit a normal call. */
3731 expand_builtin_bzero (tree exp
)
3734 location_t loc
= EXPR_LOCATION (exp
);
3736 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3739 dest
= CALL_EXPR_ARG (exp
, 0);
3740 size
= CALL_EXPR_ARG (exp
, 1);
3742 /* New argument list transforming bzero(ptr x, int y) to
3743 memset(ptr x, int 0, size_t y). This is done this way
3744 so that if it isn't expanded inline, we fallback to
3745 calling bzero instead of memset. */
3747 return expand_builtin_memset_args (dest
, integer_zero_node
,
3748 fold_convert_loc (loc
,
3749 size_type_node
, size
),
3750 const0_rtx
, VOIDmode
, exp
);
3753 /* Expand expression EXP, which is a call to the memcmp built-in function.
3754 Return NULL_RTX if we failed and the caller should emit a normal call,
3755 otherwise try to get the result in TARGET, if convenient (and in mode
3756 MODE, if that's convenient). */
3759 expand_builtin_memcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
3760 ATTRIBUTE_UNUSED
enum machine_mode mode
)
3762 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
3764 if (!validate_arglist (exp
,
3765 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3768 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3769 implementing memcmp because it will stop if it encounters two
3771 #if defined HAVE_cmpmemsi
3773 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3776 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3777 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3778 tree len
= CALL_EXPR_ARG (exp
, 2);
3780 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3781 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3782 enum machine_mode insn_mode
;
3785 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
3789 /* If we don't have POINTER_TYPE, call the function. */
3790 if (arg1_align
== 0 || arg2_align
== 0)
3793 /* Make a place to write the result of the instruction. */
3796 && REG_P (result
) && GET_MODE (result
) == insn_mode
3797 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3798 result
= gen_reg_rtx (insn_mode
);
3800 arg1_rtx
= get_memory_rtx (arg1
, len
);
3801 arg2_rtx
= get_memory_rtx (arg2
, len
);
3802 arg3_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
3804 /* Set MEM_SIZE as appropriate. */
3805 if (CONST_INT_P (arg3_rtx
))
3807 set_mem_size (arg1_rtx
, INTVAL (arg3_rtx
));
3808 set_mem_size (arg2_rtx
, INTVAL (arg3_rtx
));
3812 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3813 GEN_INT (MIN (arg1_align
, arg2_align
)));
3820 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
3821 TYPE_MODE (integer_type_node
), 3,
3822 XEXP (arg1_rtx
, 0), Pmode
,
3823 XEXP (arg2_rtx
, 0), Pmode
,
3824 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
3825 TYPE_UNSIGNED (sizetype
)),
3826 TYPE_MODE (sizetype
));
3828 /* Return the value in the proper mode for this function. */
3829 mode
= TYPE_MODE (TREE_TYPE (exp
));
3830 if (GET_MODE (result
) == mode
)
3832 else if (target
!= 0)
3834 convert_move (target
, result
, 0);
3838 return convert_to_mode (mode
, result
, 0);
3840 #endif /* HAVE_cmpmemsi. */
3845 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3846 if we failed the caller should emit a normal call, otherwise try to get
3847 the result in TARGET, if convenient. */
3850 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
3852 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3855 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3856 if (direct_optab_handler (cmpstr_optab
, SImode
) != CODE_FOR_nothing
3857 || direct_optab_handler (cmpstrn_optab
, SImode
) != CODE_FOR_nothing
)
3859 rtx arg1_rtx
, arg2_rtx
;
3860 rtx result
, insn
= NULL_RTX
;
3862 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3863 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3865 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3866 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3868 /* If we don't have POINTER_TYPE, call the function. */
3869 if (arg1_align
== 0 || arg2_align
== 0)
3872 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3873 arg1
= builtin_save_expr (arg1
);
3874 arg2
= builtin_save_expr (arg2
);
3876 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
3877 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
3879 #ifdef HAVE_cmpstrsi
3880 /* Try to call cmpstrsi. */
3883 enum machine_mode insn_mode
3884 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
3886 /* Make a place to write the result of the instruction. */
3889 && REG_P (result
) && GET_MODE (result
) == insn_mode
3890 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3891 result
= gen_reg_rtx (insn_mode
);
3893 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
3894 GEN_INT (MIN (arg1_align
, arg2_align
)));
3897 #ifdef HAVE_cmpstrnsi
3898 /* Try to determine at least one length and call cmpstrnsi. */
3899 if (!insn
&& HAVE_cmpstrnsi
)
3904 enum machine_mode insn_mode
3905 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
3906 tree len1
= c_strlen (arg1
, 1);
3907 tree len2
= c_strlen (arg2
, 1);
3910 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
3912 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
3914 /* If we don't have a constant length for the first, use the length
3915 of the second, if we know it. We don't require a constant for
3916 this case; some cost analysis could be done if both are available
3917 but neither is constant. For now, assume they're equally cheap,
3918 unless one has side effects. If both strings have constant lengths,
3925 else if (TREE_SIDE_EFFECTS (len1
))
3927 else if (TREE_SIDE_EFFECTS (len2
))
3929 else if (TREE_CODE (len1
) != INTEGER_CST
)
3931 else if (TREE_CODE (len2
) != INTEGER_CST
)
3933 else if (tree_int_cst_lt (len1
, len2
))
3938 /* If both arguments have side effects, we cannot optimize. */
3939 if (!len
|| TREE_SIDE_EFFECTS (len
))
3942 arg3_rtx
= expand_normal (len
);
3944 /* Make a place to write the result of the instruction. */
3947 && REG_P (result
) && GET_MODE (result
) == insn_mode
3948 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3949 result
= gen_reg_rtx (insn_mode
);
3951 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3952 GEN_INT (MIN (arg1_align
, arg2_align
)));
3958 enum machine_mode mode
;
3961 /* Return the value in the proper mode for this function. */
3962 mode
= TYPE_MODE (TREE_TYPE (exp
));
3963 if (GET_MODE (result
) == mode
)
3966 return convert_to_mode (mode
, result
, 0);
3967 convert_move (target
, result
, 0);
3971 /* Expand the library call ourselves using a stabilized argument
3972 list to avoid re-evaluating the function's arguments twice. */
3973 #ifdef HAVE_cmpstrnsi
3976 fndecl
= get_callee_fndecl (exp
);
3977 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
3978 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3979 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
3980 return expand_call (fn
, target
, target
== const0_rtx
);
3986 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3987 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3988 the result in TARGET, if convenient. */
3991 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
3992 ATTRIBUTE_UNUSED
enum machine_mode mode
)
3994 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
3996 if (!validate_arglist (exp
,
3997 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4000 /* If c_strlen can determine an expression for one of the string
4001 lengths, and it doesn't have side effects, then emit cmpstrnsi
4002 using length MIN(strlen(string)+1, arg3). */
4003 #ifdef HAVE_cmpstrnsi
4006 tree len
, len1
, len2
;
4007 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4010 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4011 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4012 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4014 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4015 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4016 enum machine_mode insn_mode
4017 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4019 len1
= c_strlen (arg1
, 1);
4020 len2
= c_strlen (arg2
, 1);
4023 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4025 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4027 /* If we don't have a constant length for the first, use the length
4028 of the second, if we know it. We don't require a constant for
4029 this case; some cost analysis could be done if both are available
4030 but neither is constant. For now, assume they're equally cheap,
4031 unless one has side effects. If both strings have constant lengths,
4038 else if (TREE_SIDE_EFFECTS (len1
))
4040 else if (TREE_SIDE_EFFECTS (len2
))
4042 else if (TREE_CODE (len1
) != INTEGER_CST
)
4044 else if (TREE_CODE (len2
) != INTEGER_CST
)
4046 else if (tree_int_cst_lt (len1
, len2
))
4051 /* If both arguments have side effects, we cannot optimize. */
4052 if (!len
|| TREE_SIDE_EFFECTS (len
))
4055 /* The actual new length parameter is MIN(len,arg3). */
4056 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
4057 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
4059 /* If we don't have POINTER_TYPE, call the function. */
4060 if (arg1_align
== 0 || arg2_align
== 0)
4063 /* Make a place to write the result of the instruction. */
4066 && REG_P (result
) && GET_MODE (result
) == insn_mode
4067 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4068 result
= gen_reg_rtx (insn_mode
);
4070 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4071 arg1
= builtin_save_expr (arg1
);
4072 arg2
= builtin_save_expr (arg2
);
4073 len
= builtin_save_expr (len
);
4075 arg1_rtx
= get_memory_rtx (arg1
, len
);
4076 arg2_rtx
= get_memory_rtx (arg2
, len
);
4077 arg3_rtx
= expand_normal (len
);
4078 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4079 GEN_INT (MIN (arg1_align
, arg2_align
)));
4084 /* Return the value in the proper mode for this function. */
4085 mode
= TYPE_MODE (TREE_TYPE (exp
));
4086 if (GET_MODE (result
) == mode
)
4089 return convert_to_mode (mode
, result
, 0);
4090 convert_move (target
, result
, 0);
4094 /* Expand the library call ourselves using a stabilized argument
4095 list to avoid re-evaluating the function's arguments twice. */
4096 fndecl
= get_callee_fndecl (exp
);
4097 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4099 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4100 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4101 return expand_call (fn
, target
, target
== const0_rtx
);
4107 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4108 if that's convenient. */
4111 expand_builtin_saveregs (void)
4115 /* Don't do __builtin_saveregs more than once in a function.
4116 Save the result of the first call and reuse it. */
4117 if (saveregs_value
!= 0)
4118 return saveregs_value
;
4120 /* When this function is called, it means that registers must be
4121 saved on entry to this function. So we migrate the call to the
4122 first insn of this function. */
4126 /* Do whatever the machine needs done in this case. */
4127 val
= targetm
.calls
.expand_builtin_saveregs ();
4132 saveregs_value
= val
;
4134 /* Put the insns after the NOTE that starts the function. If this
4135 is inside a start_sequence, make the outer-level insn chain current, so
4136 the code is placed at the start of the function. */
4137 push_topmost_sequence ();
4138 emit_insn_after (seq
, entry_of_function ());
4139 pop_topmost_sequence ();
4144 /* Expand a call to __builtin_next_arg. */
4147 expand_builtin_next_arg (void)
4149 /* Checking arguments is already done in fold_builtin_next_arg
4150 that must be called before this function. */
4151 return expand_binop (ptr_mode
, add_optab
,
4152 crtl
->args
.internal_arg_pointer
,
4153 crtl
->args
.arg_offset_rtx
,
4154 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4157 /* Make it easier for the backends by protecting the valist argument
4158 from multiple evaluations. */
4161 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4163 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4165 /* The current way of determining the type of valist is completely
4166 bogus. We should have the information on the va builtin instead. */
4168 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4170 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4172 if (TREE_SIDE_EFFECTS (valist
))
4173 valist
= save_expr (valist
);
4175 /* For this case, the backends will be expecting a pointer to
4176 vatype, but it's possible we've actually been given an array
4177 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4179 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4181 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4182 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4187 tree pt
= build_pointer_type (vatype
);
4191 if (! TREE_SIDE_EFFECTS (valist
))
4194 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4195 TREE_SIDE_EFFECTS (valist
) = 1;
4198 if (TREE_SIDE_EFFECTS (valist
))
4199 valist
= save_expr (valist
);
4200 valist
= fold_build2_loc (loc
, MEM_REF
,
4201 vatype
, valist
, build_int_cst (pt
, 0));
4207 /* The "standard" definition of va_list is void*. */
4210 std_build_builtin_va_list (void)
4212 return ptr_type_node
;
4215 /* The "standard" abi va_list is va_list_type_node. */
4218 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4220 return va_list_type_node
;
4223 /* The "standard" type of va_list is va_list_type_node. */
4226 std_canonical_va_list_type (tree type
)
4230 if (INDIRECT_REF_P (type
))
4231 type
= TREE_TYPE (type
);
4232 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE(type
)))
4233 type
= TREE_TYPE (type
);
4234 wtype
= va_list_type_node
;
4236 /* Treat structure va_list types. */
4237 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4238 htype
= TREE_TYPE (htype
);
4239 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4241 /* If va_list is an array type, the argument may have decayed
4242 to a pointer type, e.g. by being passed to another function.
4243 In that case, unwrap both types so that we can compare the
4244 underlying records. */
4245 if (TREE_CODE (htype
) == ARRAY_TYPE
4246 || POINTER_TYPE_P (htype
))
4248 wtype
= TREE_TYPE (wtype
);
4249 htype
= TREE_TYPE (htype
);
4252 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4253 return va_list_type_node
;
4258 /* The "standard" implementation of va_start: just assign `nextarg' to
4262 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4264 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4265 convert_move (va_r
, nextarg
, 0);
4268 /* Expand EXP, a call to __builtin_va_start. */
4271 expand_builtin_va_start (tree exp
)
4275 location_t loc
= EXPR_LOCATION (exp
);
4277 if (call_expr_nargs (exp
) < 2)
4279 error_at (loc
, "too few arguments to function %<va_start%>");
4283 if (fold_builtin_next_arg (exp
, true))
4286 nextarg
= expand_builtin_next_arg ();
4287 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4289 if (targetm
.expand_builtin_va_start
)
4290 targetm
.expand_builtin_va_start (valist
, nextarg
);
4292 std_expand_builtin_va_start (valist
, nextarg
);
4297 /* The "standard" implementation of va_arg: read the value from the
4298 current (padded) address and increment by the (padded) size. */
4301 std_gimplify_va_arg_expr (tree valist
, tree type
, gimple_seq
*pre_p
,
4304 tree addr
, t
, type_size
, rounded_size
, valist_tmp
;
4305 unsigned HOST_WIDE_INT align
, boundary
;
4308 #ifdef ARGS_GROW_DOWNWARD
4309 /* All of the alignment and movement below is for args-grow-up machines.
4310 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4311 implement their own specialized gimplify_va_arg_expr routines. */
4315 indirect
= pass_by_reference (NULL
, TYPE_MODE (type
), type
, false);
4317 type
= build_pointer_type (type
);
4319 align
= PARM_BOUNDARY
/ BITS_PER_UNIT
;
4320 boundary
= targetm
.calls
.function_arg_boundary (TYPE_MODE (type
), type
);
4322 /* When we align parameter on stack for caller, if the parameter
4323 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4324 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4325 here with caller. */
4326 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
4327 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
4329 boundary
/= BITS_PER_UNIT
;
4331 /* Hoist the valist value into a temporary for the moment. */
4332 valist_tmp
= get_initialized_tmp_var (valist
, pre_p
, NULL
);
4334 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4335 requires greater alignment, we must perform dynamic alignment. */
4336 if (boundary
> align
4337 && !integer_zerop (TYPE_SIZE (type
)))
4339 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4340 fold_build_pointer_plus_hwi (valist_tmp
, boundary
- 1));
4341 gimplify_and_add (t
, pre_p
);
4343 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4344 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (valist
),
4346 build_int_cst (TREE_TYPE (valist
), -boundary
)));
4347 gimplify_and_add (t
, pre_p
);
4352 /* If the actual alignment is less than the alignment of the type,
4353 adjust the type accordingly so that we don't assume strict alignment
4354 when dereferencing the pointer. */
4355 boundary
*= BITS_PER_UNIT
;
4356 if (boundary
< TYPE_ALIGN (type
))
4358 type
= build_variant_type_copy (type
);
4359 TYPE_ALIGN (type
) = boundary
;
4362 /* Compute the rounded size of the type. */
4363 type_size
= size_in_bytes (type
);
4364 rounded_size
= round_up (type_size
, align
);
4366 /* Reduce rounded_size so it's sharable with the postqueue. */
4367 gimplify_expr (&rounded_size
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4371 if (PAD_VARARGS_DOWN
&& !integer_zerop (rounded_size
))
4373 /* Small args are padded downward. */
4374 t
= fold_build2_loc (input_location
, GT_EXPR
, sizetype
,
4375 rounded_size
, size_int (align
));
4376 t
= fold_build3 (COND_EXPR
, sizetype
, t
, size_zero_node
,
4377 size_binop (MINUS_EXPR
, rounded_size
, type_size
));
4378 addr
= fold_build_pointer_plus (addr
, t
);
4381 /* Compute new value for AP. */
4382 t
= fold_build_pointer_plus (valist_tmp
, rounded_size
);
4383 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist
, t
);
4384 gimplify_and_add (t
, pre_p
);
4386 addr
= fold_convert (build_pointer_type (type
), addr
);
4389 addr
= build_va_arg_indirect_ref (addr
);
4391 return build_va_arg_indirect_ref (addr
);
4394 /* Build an indirect-ref expression over the given TREE, which represents a
4395 piece of a va_arg() expansion. */
4397 build_va_arg_indirect_ref (tree addr
)
4399 addr
= build_simple_mem_ref_loc (EXPR_LOCATION (addr
), addr
);
4401 if (flag_mudflap
) /* Don't instrument va_arg INDIRECT_REF. */
4407 /* Return a dummy expression of type TYPE in order to keep going after an
4411 dummy_object (tree type
)
4413 tree t
= build_int_cst (build_pointer_type (type
), 0);
4414 return build2 (MEM_REF
, type
, t
, t
);
4417 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4418 builtin function, but a very special sort of operator. */
4420 enum gimplify_status
4421 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
4423 tree promoted_type
, have_va_type
;
4424 tree valist
= TREE_OPERAND (*expr_p
, 0);
4425 tree type
= TREE_TYPE (*expr_p
);
4427 location_t loc
= EXPR_LOCATION (*expr_p
);
4429 /* Verify that valist is of the proper type. */
4430 have_va_type
= TREE_TYPE (valist
);
4431 if (have_va_type
== error_mark_node
)
4433 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
4435 if (have_va_type
== NULL_TREE
)
4437 error_at (loc
, "first argument to %<va_arg%> not of type %<va_list%>");
4441 /* Generate a diagnostic for requesting data of a type that cannot
4442 be passed through `...' due to type promotion at the call site. */
4443 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
4446 static bool gave_help
;
4449 /* Unfortunately, this is merely undefined, rather than a constraint
4450 violation, so we cannot make this an error. If this call is never
4451 executed, the program is still strictly conforming. */
4452 warned
= warning_at (loc
, 0,
4453 "%qT is promoted to %qT when passed through %<...%>",
4454 type
, promoted_type
);
4455 if (!gave_help
&& warned
)
4458 inform (loc
, "(so you should pass %qT not %qT to %<va_arg%>)",
4459 promoted_type
, type
);
4462 /* We can, however, treat "undefined" any way we please.
4463 Call abort to encourage the user to fix the program. */
4465 inform (loc
, "if this code is reached, the program will abort");
4466 /* Before the abort, allow the evaluation of the va_list
4467 expression to exit or longjmp. */
4468 gimplify_and_add (valist
, pre_p
);
4469 t
= build_call_expr_loc (loc
,
4470 builtin_decl_implicit (BUILT_IN_TRAP
), 0);
4471 gimplify_and_add (t
, pre_p
);
4473 /* This is dead code, but go ahead and finish so that the
4474 mode of the result comes out right. */
4475 *expr_p
= dummy_object (type
);
4480 /* Make it easier for the backends by protecting the valist argument
4481 from multiple evaluations. */
4482 if (TREE_CODE (have_va_type
) == ARRAY_TYPE
)
4484 /* For this case, the backends will be expecting a pointer to
4485 TREE_TYPE (abi), but it's possible we've
4486 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4488 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4490 tree p1
= build_pointer_type (TREE_TYPE (have_va_type
));
4491 valist
= fold_convert_loc (loc
, p1
,
4492 build_fold_addr_expr_loc (loc
, valist
));
4495 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4498 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_min_lval
, fb_lvalue
);
4500 if (!targetm
.gimplify_va_arg_expr
)
4501 /* FIXME: Once most targets are converted we should merely
4502 assert this is non-null. */
4505 *expr_p
= targetm
.gimplify_va_arg_expr (valist
, type
, pre_p
, post_p
);
4510 /* Expand EXP, a call to __builtin_va_end. */
4513 expand_builtin_va_end (tree exp
)
4515 tree valist
= CALL_EXPR_ARG (exp
, 0);
4517 /* Evaluate for side effects, if needed. I hate macros that don't
4519 if (TREE_SIDE_EFFECTS (valist
))
4520 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4525 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4526 builtin rather than just as an assignment in stdarg.h because of the
4527 nastiness of array-type va_list types. */
4530 expand_builtin_va_copy (tree exp
)
4533 location_t loc
= EXPR_LOCATION (exp
);
4535 dst
= CALL_EXPR_ARG (exp
, 0);
4536 src
= CALL_EXPR_ARG (exp
, 1);
4538 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4539 src
= stabilize_va_list_loc (loc
, src
, 0);
4541 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4543 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4545 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4546 TREE_SIDE_EFFECTS (t
) = 1;
4547 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4551 rtx dstb
, srcb
, size
;
4553 /* Evaluate to pointers. */
4554 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4555 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4556 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4557 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4559 dstb
= convert_memory_address (Pmode
, dstb
);
4560 srcb
= convert_memory_address (Pmode
, srcb
);
4562 /* "Dereference" to BLKmode memories. */
4563 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4564 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4565 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4566 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4567 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4568 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4571 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4577 /* Expand a call to one of the builtin functions __builtin_frame_address or
4578 __builtin_return_address. */
4581 expand_builtin_frame_address (tree fndecl
, tree exp
)
4583 /* The argument must be a nonnegative integer constant.
4584 It counts the number of frames to scan up the stack.
4585 The value is the return address saved in that frame. */
4586 if (call_expr_nargs (exp
) == 0)
4587 /* Warning about missing arg was already issued. */
4589 else if (! host_integerp (CALL_EXPR_ARG (exp
, 0), 1))
4591 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4592 error ("invalid argument to %<__builtin_frame_address%>");
4594 error ("invalid argument to %<__builtin_return_address%>");
4600 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
4601 tree_low_cst (CALL_EXPR_ARG (exp
, 0), 1));
4603 /* Some ports cannot access arbitrary stack frames. */
4606 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4607 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4609 warning (0, "unsupported argument to %<__builtin_return_address%>");
4613 /* For __builtin_frame_address, return what we've got. */
4614 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4618 && ! CONSTANT_P (tem
))
4619 tem
= copy_addr_to_reg (tem
);
4624 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4625 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4626 is the same as for allocate_dynamic_stack_space. */
4629 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
4635 bool alloca_with_align
= (DECL_FUNCTION_CODE (get_callee_fndecl (exp
))
4636 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4638 /* Emit normal call if we use mudflap. */
4643 = (alloca_with_align
4644 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4645 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4650 /* Compute the argument. */
4651 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4653 /* Compute the alignment. */
4654 align
= (alloca_with_align
4655 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1))
4656 : BIGGEST_ALIGNMENT
);
4658 /* Allocate the desired space. */
4659 result
= allocate_dynamic_stack_space (op0
, 0, align
, cannot_accumulate
);
4660 result
= convert_memory_address (ptr_mode
, result
);
4665 /* Expand a call to bswap builtin in EXP.
4666 Return NULL_RTX if a normal call should be emitted rather than expanding the
4667 function in-line. If convenient, the result should be placed in TARGET.
4668 SUBTARGET may be used as the target for computing one of EXP's operands. */
4671 expand_builtin_bswap (enum machine_mode target_mode
, tree exp
, rtx target
,
4677 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4680 arg
= CALL_EXPR_ARG (exp
, 0);
4681 op0
= expand_expr (arg
,
4682 subtarget
&& GET_MODE (subtarget
) == target_mode
4683 ? subtarget
: NULL_RTX
,
4684 target_mode
, EXPAND_NORMAL
);
4685 if (GET_MODE (op0
) != target_mode
)
4686 op0
= convert_to_mode (target_mode
, op0
, 1);
4688 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4690 gcc_assert (target
);
4692 return convert_to_mode (target_mode
, target
, 1);
4695 /* Expand a call to a unary builtin in EXP.
4696 Return NULL_RTX if a normal call should be emitted rather than expanding the
4697 function in-line. If convenient, the result should be placed in TARGET.
4698 SUBTARGET may be used as the target for computing one of EXP's operands. */
4701 expand_builtin_unop (enum machine_mode target_mode
, tree exp
, rtx target
,
4702 rtx subtarget
, optab op_optab
)
4706 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4709 /* Compute the argument. */
4710 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4712 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4713 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4714 VOIDmode
, EXPAND_NORMAL
);
4715 /* Compute op, into TARGET if possible.
4716 Set TARGET to wherever the result comes back. */
4717 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4718 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4719 gcc_assert (target
);
4721 return convert_to_mode (target_mode
, target
, 0);
4724 /* Expand a call to __builtin_expect. We just return our argument
4725 as the builtin_expect semantic should've been already executed by
4726 tree branch prediction pass. */
4729 expand_builtin_expect (tree exp
, rtx target
)
4733 if (call_expr_nargs (exp
) < 2)
4735 arg
= CALL_EXPR_ARG (exp
, 0);
4737 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4738 /* When guessing was done, the hints should be already stripped away. */
4739 gcc_assert (!flag_guess_branch_prob
4740 || optimize
== 0 || seen_error ());
4744 /* Expand a call to __builtin_assume_aligned. We just return our first
4745 argument as the builtin_assume_aligned semantic should've been already
4749 expand_builtin_assume_aligned (tree exp
, rtx target
)
4751 if (call_expr_nargs (exp
) < 2)
4753 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
4755 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
4756 && (call_expr_nargs (exp
) < 3
4757 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
4762 expand_builtin_trap (void)
4766 emit_insn (gen_trap ());
4769 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4773 /* Expand a call to __builtin_unreachable. We do nothing except emit
4774 a barrier saying that control flow will not pass here.
4776 It is the responsibility of the program being compiled to ensure
4777 that control flow does never reach __builtin_unreachable. */
4779 expand_builtin_unreachable (void)
4784 /* Expand EXP, a call to fabs, fabsf or fabsl.
4785 Return NULL_RTX if a normal call should be emitted rather than expanding
4786 the function inline. If convenient, the result should be placed
4787 in TARGET. SUBTARGET may be used as the target for computing
4791 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
4793 enum machine_mode mode
;
4797 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4800 arg
= CALL_EXPR_ARG (exp
, 0);
4801 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
4802 mode
= TYPE_MODE (TREE_TYPE (arg
));
4803 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4804 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4807 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4808 Return NULL is a normal call should be emitted rather than expanding the
4809 function inline. If convenient, the result should be placed in TARGET.
4810 SUBTARGET may be used as the target for computing the operand. */
4813 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
4818 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4821 arg
= CALL_EXPR_ARG (exp
, 0);
4822 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4824 arg
= CALL_EXPR_ARG (exp
, 1);
4825 op1
= expand_normal (arg
);
4827 return expand_copysign (op0
, op1
, target
);
4830 /* Create a new constant string literal and return a char* pointer to it.
4831 The STRING_CST value is the LEN characters at STR. */
4833 build_string_literal (int len
, const char *str
)
4835 tree t
, elem
, index
, type
;
4837 t
= build_string (len
, str
);
4838 elem
= build_type_variant (char_type_node
, 1, 0);
4839 index
= build_index_type (size_int (len
- 1));
4840 type
= build_array_type (elem
, index
);
4841 TREE_TYPE (t
) = type
;
4842 TREE_CONSTANT (t
) = 1;
4843 TREE_READONLY (t
) = 1;
4844 TREE_STATIC (t
) = 1;
4846 type
= build_pointer_type (elem
);
4847 t
= build1 (ADDR_EXPR
, type
,
4848 build4 (ARRAY_REF
, elem
,
4849 t
, integer_zero_node
, NULL_TREE
, NULL_TREE
));
4853 /* Expand a call to __builtin___clear_cache. */
4856 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED
)
4858 #ifndef HAVE_clear_cache
4859 #ifdef CLEAR_INSN_CACHE
4860 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4861 does something. Just do the default expansion to a call to
4865 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4866 does nothing. There is no need to call it. Do nothing. */
4868 #endif /* CLEAR_INSN_CACHE */
4870 /* We have a "clear_cache" insn, and it will handle everything. */
4872 rtx begin_rtx
, end_rtx
;
4874 /* We must not expand to a library call. If we did, any
4875 fallback library function in libgcc that might contain a call to
4876 __builtin___clear_cache() would recurse infinitely. */
4877 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4879 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4883 if (HAVE_clear_cache
)
4885 struct expand_operand ops
[2];
4887 begin
= CALL_EXPR_ARG (exp
, 0);
4888 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4890 end
= CALL_EXPR_ARG (exp
, 1);
4891 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4893 create_address_operand (&ops
[0], begin_rtx
);
4894 create_address_operand (&ops
[1], end_rtx
);
4895 if (maybe_expand_insn (CODE_FOR_clear_cache
, 2, ops
))
4899 #endif /* HAVE_clear_cache */
4902 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4905 round_trampoline_addr (rtx tramp
)
4907 rtx temp
, addend
, mask
;
4909 /* If we don't need too much alignment, we'll have been guaranteed
4910 proper alignment by get_trampoline_type. */
4911 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
4914 /* Round address up to desired boundary. */
4915 temp
= gen_reg_rtx (Pmode
);
4916 addend
= GEN_INT (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1);
4917 mask
= GEN_INT (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
);
4919 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
4920 temp
, 0, OPTAB_LIB_WIDEN
);
4921 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
4922 temp
, 0, OPTAB_LIB_WIDEN
);
4928 expand_builtin_init_trampoline (tree exp
, bool onstack
)
4930 tree t_tramp
, t_func
, t_chain
;
4931 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
4933 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
4934 POINTER_TYPE
, VOID_TYPE
))
4937 t_tramp
= CALL_EXPR_ARG (exp
, 0);
4938 t_func
= CALL_EXPR_ARG (exp
, 1);
4939 t_chain
= CALL_EXPR_ARG (exp
, 2);
4941 r_tramp
= expand_normal (t_tramp
);
4942 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
4943 MEM_NOTRAP_P (m_tramp
) = 1;
4945 /* If ONSTACK, the TRAMP argument should be the address of a field
4946 within the local function's FRAME decl. Either way, let's see if
4947 we can fill in the MEM_ATTRs for this memory. */
4948 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
4949 set_mem_attributes_minus_bitpos (m_tramp
, TREE_OPERAND (t_tramp
, 0),
4952 /* Creator of a heap trampoline is responsible for making sure the
4953 address is aligned to at least STACK_BOUNDARY. Normally malloc
4954 will ensure this anyhow. */
4955 tmp
= round_trampoline_addr (r_tramp
);
4958 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
4959 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
4960 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
4963 /* The FUNC argument should be the address of the nested function.
4964 Extract the actual function decl to pass to the hook. */
4965 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
4966 t_func
= TREE_OPERAND (t_func
, 0);
4967 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
4969 r_chain
= expand_normal (t_chain
);
4971 /* Generate insns to initialize the trampoline. */
4972 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
4976 trampolines_created
= 1;
4978 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
4979 "trampoline generated for nested function %qD", t_func
);
4986 expand_builtin_adjust_trampoline (tree exp
)
4990 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4993 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4994 tramp
= round_trampoline_addr (tramp
);
4995 if (targetm
.calls
.trampoline_adjust_address
)
4996 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
5001 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5002 function. The function first checks whether the back end provides
5003 an insn to implement signbit for the respective mode. If not, it
5004 checks whether the floating point format of the value is such that
5005 the sign bit can be extracted. If that is not the case, the
5006 function returns NULL_RTX to indicate that a normal call should be
5007 emitted rather than expanding the function in-line. EXP is the
5008 expression that is a call to the builtin function; if convenient,
5009 the result should be placed in TARGET. */
5011 expand_builtin_signbit (tree exp
, rtx target
)
5013 const struct real_format
*fmt
;
5014 enum machine_mode fmode
, imode
, rmode
;
5017 enum insn_code icode
;
5019 location_t loc
= EXPR_LOCATION (exp
);
5021 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5024 arg
= CALL_EXPR_ARG (exp
, 0);
5025 fmode
= TYPE_MODE (TREE_TYPE (arg
));
5026 rmode
= TYPE_MODE (TREE_TYPE (exp
));
5027 fmt
= REAL_MODE_FORMAT (fmode
);
5029 arg
= builtin_save_expr (arg
);
5031 /* Expand the argument yielding a RTX expression. */
5032 temp
= expand_normal (arg
);
5034 /* Check if the back end provides an insn that handles signbit for the
5036 icode
= optab_handler (signbit_optab
, fmode
);
5037 if (icode
!= CODE_FOR_nothing
)
5039 rtx last
= get_last_insn ();
5040 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5041 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
5043 delete_insns_since (last
);
5046 /* For floating point formats without a sign bit, implement signbit
5048 bitpos
= fmt
->signbit_ro
;
5051 /* But we can't do this if the format supports signed zero. */
5052 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
5055 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
5056 build_real (TREE_TYPE (arg
), dconst0
));
5057 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5060 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5062 imode
= int_mode_for_mode (fmode
);
5063 if (imode
== BLKmode
)
5065 temp
= gen_lowpart (imode
, temp
);
5070 /* Handle targets with different FP word orders. */
5071 if (FLOAT_WORDS_BIG_ENDIAN
)
5072 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5074 word
= bitpos
/ BITS_PER_WORD
;
5075 temp
= operand_subword_force (temp
, word
, fmode
);
5076 bitpos
= bitpos
% BITS_PER_WORD
;
5079 /* Force the intermediate word_mode (or narrower) result into a
5080 register. This avoids attempting to create paradoxical SUBREGs
5081 of floating point modes below. */
5082 temp
= force_reg (imode
, temp
);
5084 /* If the bitpos is within the "result mode" lowpart, the operation
5085 can be implement with a single bitwise AND. Otherwise, we need
5086 a right shift and an AND. */
5088 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5090 double_int mask
= double_int_setbit (double_int_zero
, bitpos
);
5092 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5093 temp
= gen_lowpart (rmode
, temp
);
5094 temp
= expand_binop (rmode
, and_optab
, temp
,
5095 immed_double_int_const (mask
, rmode
),
5096 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5100 /* Perform a logical right shift to place the signbit in the least
5101 significant bit, then truncate the result to the desired mode
5102 and mask just this bit. */
5103 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5104 temp
= gen_lowpart (rmode
, temp
);
5105 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5106 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5112 /* Expand fork or exec calls. TARGET is the desired target of the
5113 call. EXP is the call. FN is the
5114 identificator of the actual function. IGNORE is nonzero if the
5115 value is to be ignored. */
5118 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5123 /* If we are not profiling, just call the function. */
5124 if (!profile_arc_flag
)
5127 /* Otherwise call the wrapper. This should be equivalent for the rest of
5128 compiler, so the code does not diverge, and the wrapper may run the
5129 code necessary for keeping the profiling sane. */
5131 switch (DECL_FUNCTION_CODE (fn
))
5134 id
= get_identifier ("__gcov_fork");
5137 case BUILT_IN_EXECL
:
5138 id
= get_identifier ("__gcov_execl");
5141 case BUILT_IN_EXECV
:
5142 id
= get_identifier ("__gcov_execv");
5145 case BUILT_IN_EXECLP
:
5146 id
= get_identifier ("__gcov_execlp");
5149 case BUILT_IN_EXECLE
:
5150 id
= get_identifier ("__gcov_execle");
5153 case BUILT_IN_EXECVP
:
5154 id
= get_identifier ("__gcov_execvp");
5157 case BUILT_IN_EXECVE
:
5158 id
= get_identifier ("__gcov_execve");
5165 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5166 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5167 DECL_EXTERNAL (decl
) = 1;
5168 TREE_PUBLIC (decl
) = 1;
5169 DECL_ARTIFICIAL (decl
) = 1;
5170 TREE_NOTHROW (decl
) = 1;
5171 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5172 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5173 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5174 return expand_call (call
, target
, ignore
);
5179 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5180 the pointer in these functions is void*, the tree optimizers may remove
5181 casts. The mode computed in expand_builtin isn't reliable either, due
5182 to __sync_bool_compare_and_swap.
5184 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5185 group of builtins. This gives us log2 of the mode size. */
5187 static inline enum machine_mode
5188 get_builtin_sync_mode (int fcode_diff
)
5190 /* The size is not negotiable, so ask not to get BLKmode in return
5191 if the target indicates that a smaller size would be better. */
5192 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5195 /* Expand the memory expression LOC and return the appropriate memory operand
5196 for the builtin_sync operations. */
5199 get_builtin_sync_mem (tree loc
, enum machine_mode mode
)
5203 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5204 addr
= convert_memory_address (Pmode
, addr
);
5206 /* Note that we explicitly do not want any alias information for this
5207 memory, so that we kill all other live memories. Otherwise we don't
5208 satisfy the full barrier semantics of the intrinsic. */
5209 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5211 /* The alignment needs to be at least according to that of the mode. */
5212 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5213 get_pointer_alignment (loc
)));
5214 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5215 MEM_VOLATILE_P (mem
) = 1;
5220 /* Make sure an argument is in the right mode.
5221 EXP is the tree argument.
5222 MODE is the mode it should be in. */
5225 expand_expr_force_mode (tree exp
, enum machine_mode mode
)
5228 enum machine_mode old_mode
;
5230 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5231 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5232 of CONST_INTs, where we know the old_mode only from the call argument. */
5234 old_mode
= GET_MODE (val
);
5235 if (old_mode
== VOIDmode
)
5236 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5237 val
= convert_modes (mode
, old_mode
, val
, 1);
5242 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5243 EXP is the CALL_EXPR. CODE is the rtx code
5244 that corresponds to the arithmetic or logical operation from the name;
5245 an exception here is that NOT actually means NAND. TARGET is an optional
5246 place for us to store the results; AFTER is true if this is the
5247 fetch_and_xxx form. */
5250 expand_builtin_sync_operation (enum machine_mode mode
, tree exp
,
5251 enum rtx_code code
, bool after
,
5255 location_t loc
= EXPR_LOCATION (exp
);
5257 if (code
== NOT
&& warn_sync_nand
)
5259 tree fndecl
= get_callee_fndecl (exp
);
5260 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5262 static bool warned_f_a_n
, warned_n_a_f
;
5266 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5267 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5268 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5269 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5270 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5274 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5275 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5276 warned_f_a_n
= true;
5279 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5280 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5281 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5282 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5283 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5287 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5288 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5289 warned_n_a_f
= true;
5297 /* Expand the operands. */
5298 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5299 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5301 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SEQ_CST
,
5305 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5306 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5307 true if this is the boolean form. TARGET is a place for us to store the
5308 results; this is NOT optional if IS_BOOL is true. */
5311 expand_builtin_compare_and_swap (enum machine_mode mode
, tree exp
,
5312 bool is_bool
, rtx target
)
5314 rtx old_val
, new_val
, mem
;
5317 /* Expand the operands. */
5318 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5319 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5320 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5322 pbool
= poval
= NULL
;
5323 if (target
!= const0_rtx
)
5330 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5331 false, MEMMODEL_SEQ_CST
,
5338 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5339 general form is actually an atomic exchange, and some targets only
5340 support a reduced form with the second argument being a constant 1.
5341 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5345 expand_builtin_sync_lock_test_and_set (enum machine_mode mode
, tree exp
,
5350 /* Expand the operands. */
5351 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5352 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5354 return expand_sync_lock_test_and_set (target
, mem
, val
);
5357 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5360 expand_builtin_sync_lock_release (enum machine_mode mode
, tree exp
)
5364 /* Expand the operands. */
5365 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5367 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_RELEASE
, true);
5370 /* Given an integer representing an ``enum memmodel'', verify its
5371 correctness and return the memory model enum. */
5373 static enum memmodel
5374 get_memmodel (tree exp
)
5377 unsigned HOST_WIDE_INT val
;
5379 /* If the parameter is not a constant, it's a run time value so we'll just
5380 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5381 if (TREE_CODE (exp
) != INTEGER_CST
)
5382 return MEMMODEL_SEQ_CST
;
5384 op
= expand_normal (exp
);
5387 if (targetm
.memmodel_check
)
5388 val
= targetm
.memmodel_check (val
);
5389 else if (val
& ~MEMMODEL_MASK
)
5391 warning (OPT_Winvalid_memory_model
,
5392 "Unknown architecture specifier in memory model to builtin.");
5393 return MEMMODEL_SEQ_CST
;
5396 if ((INTVAL(op
) & MEMMODEL_MASK
) >= MEMMODEL_LAST
)
5398 warning (OPT_Winvalid_memory_model
,
5399 "invalid memory model argument to builtin");
5400 return MEMMODEL_SEQ_CST
;
5403 return (enum memmodel
) val
;
5406 /* Expand the __atomic_exchange intrinsic:
5407 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5408 EXP is the CALL_EXPR.
5409 TARGET is an optional place for us to store the results. */
5412 expand_builtin_atomic_exchange (enum machine_mode mode
, tree exp
, rtx target
)
5415 enum memmodel model
;
5417 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5418 if ((model
& MEMMODEL_MASK
) == MEMMODEL_CONSUME
)
5420 error ("invalid memory model for %<__atomic_exchange%>");
5424 if (!flag_inline_atomics
)
5427 /* Expand the operands. */
5428 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5429 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5431 return expand_atomic_exchange (target
, mem
, val
, model
);
5434 /* Expand the __atomic_compare_exchange intrinsic:
5435 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5436 TYPE desired, BOOL weak,
5437 enum memmodel success,
5438 enum memmodel failure)
5439 EXP is the CALL_EXPR.
5440 TARGET is an optional place for us to store the results. */
5443 expand_builtin_atomic_compare_exchange (enum machine_mode mode
, tree exp
,
5446 rtx expect
, desired
, mem
, oldval
;
5447 enum memmodel success
, failure
;
5451 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5452 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5454 if ((failure
& MEMMODEL_MASK
) == MEMMODEL_RELEASE
5455 || (failure
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5457 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5461 if (failure
> success
)
5463 error ("failure memory model cannot be stronger than success "
5464 "memory model for %<__atomic_compare_exchange%>");
5468 if (!flag_inline_atomics
)
5471 /* Expand the operands. */
5472 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5474 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5475 expect
= convert_memory_address (Pmode
, expect
);
5476 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5478 weak
= CALL_EXPR_ARG (exp
, 3);
5480 if (host_integerp (weak
, 0) && tree_low_cst (weak
, 0) != 0)
5483 oldval
= copy_to_reg (gen_rtx_MEM (mode
, expect
));
5485 if (!expand_atomic_compare_and_swap ((target
== const0_rtx
? NULL
: &target
),
5486 &oldval
, mem
, oldval
, desired
,
5487 is_weak
, success
, failure
))
5490 emit_move_insn (gen_rtx_MEM (mode
, expect
), oldval
);
5494 /* Expand the __atomic_load intrinsic:
5495 TYPE __atomic_load (TYPE *object, enum memmodel)
5496 EXP is the CALL_EXPR.
5497 TARGET is an optional place for us to store the results. */
5500 expand_builtin_atomic_load (enum machine_mode mode
, tree exp
, rtx target
)
5503 enum memmodel model
;
5505 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5506 if ((model
& MEMMODEL_MASK
) == MEMMODEL_RELEASE
5507 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5509 error ("invalid memory model for %<__atomic_load%>");
5513 if (!flag_inline_atomics
)
5516 /* Expand the operand. */
5517 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5519 return expand_atomic_load (target
, mem
, model
);
5523 /* Expand the __atomic_store intrinsic:
5524 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5525 EXP is the CALL_EXPR.
5526 TARGET is an optional place for us to store the results. */
5529 expand_builtin_atomic_store (enum machine_mode mode
, tree exp
)
5532 enum memmodel model
;
5534 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5535 if ((model
& MEMMODEL_MASK
) != MEMMODEL_RELAXED
5536 && (model
& MEMMODEL_MASK
) != MEMMODEL_SEQ_CST
5537 && (model
& MEMMODEL_MASK
) != MEMMODEL_RELEASE
)
5539 error ("invalid memory model for %<__atomic_store%>");
5543 if (!flag_inline_atomics
)
5546 /* Expand the operands. */
5547 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5548 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5550 return expand_atomic_store (mem
, val
, model
, false);
5553 /* Expand the __atomic_fetch_XXX intrinsic:
5554 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5555 EXP is the CALL_EXPR.
5556 TARGET is an optional place for us to store the results.
5557 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5558 FETCH_AFTER is true if returning the result of the operation.
5559 FETCH_AFTER is false if returning the value before the operation.
5560 IGNORE is true if the result is not used.
5561 EXT_CALL is the correct builtin for an external call if this cannot be
5562 resolved to an instruction sequence. */
5565 expand_builtin_atomic_fetch_op (enum machine_mode mode
, tree exp
, rtx target
,
5566 enum rtx_code code
, bool fetch_after
,
5567 bool ignore
, enum built_in_function ext_call
)
5570 enum memmodel model
;
5574 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5576 /* Expand the operands. */
5577 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5578 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5580 /* Only try generating instructions if inlining is turned on. */
5581 if (flag_inline_atomics
)
5583 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
5588 /* Return if a different routine isn't needed for the library call. */
5589 if (ext_call
== BUILT_IN_NONE
)
5592 /* Change the call to the specified function. */
5593 fndecl
= get_callee_fndecl (exp
);
5594 addr
= CALL_EXPR_FN (exp
);
5597 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
5598 TREE_OPERAND (addr
, 0) = builtin_decl_explicit(ext_call
);
5600 /* Expand the call here so we can emit trailing code. */
5601 ret
= expand_call (exp
, target
, ignore
);
5603 /* Replace the original function just in case it matters. */
5604 TREE_OPERAND (addr
, 0) = fndecl
;
5606 /* Then issue the arithmetic correction to return the right result. */
5611 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
5613 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
5616 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
5623 #ifndef HAVE_atomic_clear
5624 # define HAVE_atomic_clear 0
5625 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5628 /* Expand an atomic clear operation.
5629 void _atomic_clear (BOOL *obj, enum memmodel)
5630 EXP is the call expression. */
5633 expand_builtin_atomic_clear (tree exp
)
5635 enum machine_mode mode
;
5637 enum memmodel model
;
5639 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5640 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5641 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5643 if ((model
& MEMMODEL_MASK
) == MEMMODEL_ACQUIRE
5644 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5646 error ("invalid memory model for %<__atomic_store%>");
5650 if (HAVE_atomic_clear
)
5652 emit_insn (gen_atomic_clear (mem
, model
));
5656 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5657 Failing that, a store is issued by __atomic_store. The only way this can
5658 fail is if the bool type is larger than a word size. Unlikely, but
5659 handle it anyway for completeness. Assume a single threaded model since
5660 there is no atomic support in this case, and no barriers are required. */
5661 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
5663 emit_move_insn (mem
, const0_rtx
);
5667 /* Expand an atomic test_and_set operation.
5668 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5669 EXP is the call expression. */
5672 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
5675 enum memmodel model
;
5676 enum machine_mode mode
;
5678 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5679 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5680 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5682 return expand_atomic_test_and_set (target
, mem
, model
);
5686 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5687 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5690 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
5693 enum machine_mode mode
;
5694 unsigned int mode_align
, type_align
;
5696 if (TREE_CODE (arg0
) != INTEGER_CST
)
5699 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
5700 mode
= mode_for_size (size
, MODE_INT
, 0);
5701 mode_align
= GET_MODE_ALIGNMENT (mode
);
5703 if (TREE_CODE (arg1
) == INTEGER_CST
&& INTVAL (expand_normal (arg1
)) == 0)
5704 type_align
= mode_align
;
5707 tree ttype
= TREE_TYPE (arg1
);
5709 /* This function is usually invoked and folded immediately by the front
5710 end before anything else has a chance to look at it. The pointer
5711 parameter at this point is usually cast to a void *, so check for that
5712 and look past the cast. */
5713 if (TREE_CODE (arg1
) == NOP_EXPR
&& POINTER_TYPE_P (ttype
)
5714 && VOID_TYPE_P (TREE_TYPE (ttype
)))
5715 arg1
= TREE_OPERAND (arg1
, 0);
5717 ttype
= TREE_TYPE (arg1
);
5718 gcc_assert (POINTER_TYPE_P (ttype
));
5720 /* Get the underlying type of the object. */
5721 ttype
= TREE_TYPE (ttype
);
5722 type_align
= TYPE_ALIGN (ttype
);
5725 /* If the object has smaller alignment, the the lock free routines cannot
5727 if (type_align
< mode_align
)
5728 return boolean_false_node
;
5730 /* Check if a compare_and_swap pattern exists for the mode which represents
5731 the required size. The pattern is not allowed to fail, so the existence
5732 of the pattern indicates support is present. */
5733 if (can_compare_and_swap_p (mode
, true))
5734 return boolean_true_node
;
5736 return boolean_false_node
;
5739 /* Return true if the parameters to call EXP represent an object which will
5740 always generate lock free instructions. The first argument represents the
5741 size of the object, and the second parameter is a pointer to the object
5742 itself. If NULL is passed for the object, then the result is based on
5743 typical alignment for an object of the specified size. Otherwise return
5747 expand_builtin_atomic_always_lock_free (tree exp
)
5750 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5751 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5753 if (TREE_CODE (arg0
) != INTEGER_CST
)
5755 error ("non-constant argument 1 to __atomic_always_lock_free");
5759 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
5760 if (size
== boolean_true_node
)
5765 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5766 is lock free on this architecture. */
5769 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
5771 if (!flag_inline_atomics
)
5774 /* If it isn't always lock free, don't generate a result. */
5775 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
5776 return boolean_true_node
;
5781 /* Return true if the parameters to call EXP represent an object which will
5782 always generate lock free instructions. The first argument represents the
5783 size of the object, and the second parameter is a pointer to the object
5784 itself. If NULL is passed for the object, then the result is based on
5785 typical alignment for an object of the specified size. Otherwise return
5789 expand_builtin_atomic_is_lock_free (tree exp
)
5792 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5793 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5795 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5797 error ("non-integer argument 1 to __atomic_is_lock_free");
5801 if (!flag_inline_atomics
)
5804 /* If the value is known at compile time, return the RTX for it. */
5805 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
5806 if (size
== boolean_true_node
)
5812 /* Expand the __atomic_thread_fence intrinsic:
5813 void __atomic_thread_fence (enum memmodel)
5814 EXP is the CALL_EXPR. */
5817 expand_builtin_atomic_thread_fence (tree exp
)
5819 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5820 expand_mem_thread_fence (model
);
5823 /* Expand the __atomic_signal_fence intrinsic:
5824 void __atomic_signal_fence (enum memmodel)
5825 EXP is the CALL_EXPR. */
5828 expand_builtin_atomic_signal_fence (tree exp
)
5830 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5831 expand_mem_signal_fence (model
);
5834 /* Expand the __sync_synchronize intrinsic. */
5837 expand_builtin_sync_synchronize (void)
5839 expand_mem_thread_fence (MEMMODEL_SEQ_CST
);
5843 /* Expand an expression EXP that calls a built-in function,
5844 with result going to TARGET if that's convenient
5845 (and in mode MODE if that's convenient).
5846 SUBTARGET may be used as the target for computing one of EXP's operands.
5847 IGNORE is nonzero if the value is to be ignored. */
5850 expand_builtin (tree exp
, rtx target
, rtx subtarget
, enum machine_mode mode
,
5853 tree fndecl
= get_callee_fndecl (exp
);
5854 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5855 enum machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5858 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5859 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5861 /* When not optimizing, generate calls to library functions for a certain
5864 && !called_as_built_in (fndecl
)
5865 && fcode
!= BUILT_IN_ALLOCA
5866 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
5867 && fcode
!= BUILT_IN_FREE
)
5868 return expand_call (exp
, target
, ignore
);
5870 /* The built-in function expanders test for target == const0_rtx
5871 to determine whether the function's result will be ignored. */
5873 target
= const0_rtx
;
5875 /* If the result of a pure or const built-in function is ignored, and
5876 none of its arguments are volatile, we can avoid expanding the
5877 built-in call and just evaluate the arguments for side-effects. */
5878 if (target
== const0_rtx
5879 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
5880 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
5882 bool volatilep
= false;
5884 call_expr_arg_iterator iter
;
5886 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5887 if (TREE_THIS_VOLATILE (arg
))
5895 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5896 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5903 CASE_FLT_FN (BUILT_IN_FABS
):
5904 target
= expand_builtin_fabs (exp
, target
, subtarget
);
5909 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
5910 target
= expand_builtin_copysign (exp
, target
, subtarget
);
5915 /* Just do a normal library call if we were unable to fold
5917 CASE_FLT_FN (BUILT_IN_CABS
):
5920 CASE_FLT_FN (BUILT_IN_EXP
):
5921 CASE_FLT_FN (BUILT_IN_EXP10
):
5922 CASE_FLT_FN (BUILT_IN_POW10
):
5923 CASE_FLT_FN (BUILT_IN_EXP2
):
5924 CASE_FLT_FN (BUILT_IN_EXPM1
):
5925 CASE_FLT_FN (BUILT_IN_LOGB
):
5926 CASE_FLT_FN (BUILT_IN_LOG
):
5927 CASE_FLT_FN (BUILT_IN_LOG10
):
5928 CASE_FLT_FN (BUILT_IN_LOG2
):
5929 CASE_FLT_FN (BUILT_IN_LOG1P
):
5930 CASE_FLT_FN (BUILT_IN_TAN
):
5931 CASE_FLT_FN (BUILT_IN_ASIN
):
5932 CASE_FLT_FN (BUILT_IN_ACOS
):
5933 CASE_FLT_FN (BUILT_IN_ATAN
):
5934 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
5935 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5936 because of possible accuracy problems. */
5937 if (! flag_unsafe_math_optimizations
)
5939 CASE_FLT_FN (BUILT_IN_SQRT
):
5940 CASE_FLT_FN (BUILT_IN_FLOOR
):
5941 CASE_FLT_FN (BUILT_IN_CEIL
):
5942 CASE_FLT_FN (BUILT_IN_TRUNC
):
5943 CASE_FLT_FN (BUILT_IN_ROUND
):
5944 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
5945 CASE_FLT_FN (BUILT_IN_RINT
):
5946 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
5951 CASE_FLT_FN (BUILT_IN_FMA
):
5952 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
5957 CASE_FLT_FN (BUILT_IN_ILOGB
):
5958 if (! flag_unsafe_math_optimizations
)
5960 CASE_FLT_FN (BUILT_IN_ISINF
):
5961 CASE_FLT_FN (BUILT_IN_FINITE
):
5962 case BUILT_IN_ISFINITE
:
5963 case BUILT_IN_ISNORMAL
:
5964 target
= expand_builtin_interclass_mathfn (exp
, target
);
5969 CASE_FLT_FN (BUILT_IN_ICEIL
):
5970 CASE_FLT_FN (BUILT_IN_LCEIL
):
5971 CASE_FLT_FN (BUILT_IN_LLCEIL
):
5972 CASE_FLT_FN (BUILT_IN_LFLOOR
):
5973 CASE_FLT_FN (BUILT_IN_IFLOOR
):
5974 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
5975 target
= expand_builtin_int_roundingfn (exp
, target
);
5980 CASE_FLT_FN (BUILT_IN_IRINT
):
5981 CASE_FLT_FN (BUILT_IN_LRINT
):
5982 CASE_FLT_FN (BUILT_IN_LLRINT
):
5983 CASE_FLT_FN (BUILT_IN_IROUND
):
5984 CASE_FLT_FN (BUILT_IN_LROUND
):
5985 CASE_FLT_FN (BUILT_IN_LLROUND
):
5986 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
5991 CASE_FLT_FN (BUILT_IN_POWI
):
5992 target
= expand_builtin_powi (exp
, target
);
5997 CASE_FLT_FN (BUILT_IN_ATAN2
):
5998 CASE_FLT_FN (BUILT_IN_LDEXP
):
5999 CASE_FLT_FN (BUILT_IN_SCALB
):
6000 CASE_FLT_FN (BUILT_IN_SCALBN
):
6001 CASE_FLT_FN (BUILT_IN_SCALBLN
):
6002 if (! flag_unsafe_math_optimizations
)
6005 CASE_FLT_FN (BUILT_IN_FMOD
):
6006 CASE_FLT_FN (BUILT_IN_REMAINDER
):
6007 CASE_FLT_FN (BUILT_IN_DREM
):
6008 CASE_FLT_FN (BUILT_IN_POW
):
6009 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
6014 CASE_FLT_FN (BUILT_IN_CEXPI
):
6015 target
= expand_builtin_cexpi (exp
, target
);
6016 gcc_assert (target
);
6019 CASE_FLT_FN (BUILT_IN_SIN
):
6020 CASE_FLT_FN (BUILT_IN_COS
):
6021 if (! flag_unsafe_math_optimizations
)
6023 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6028 CASE_FLT_FN (BUILT_IN_SINCOS
):
6029 if (! flag_unsafe_math_optimizations
)
6031 target
= expand_builtin_sincos (exp
);
6036 case BUILT_IN_APPLY_ARGS
:
6037 return expand_builtin_apply_args ();
6039 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6040 FUNCTION with a copy of the parameters described by
6041 ARGUMENTS, and ARGSIZE. It returns a block of memory
6042 allocated on the stack into which is stored all the registers
6043 that might possibly be used for returning the result of a
6044 function. ARGUMENTS is the value returned by
6045 __builtin_apply_args. ARGSIZE is the number of bytes of
6046 arguments that must be copied. ??? How should this value be
6047 computed? We'll also need a safe worst case value for varargs
6049 case BUILT_IN_APPLY
:
6050 if (!validate_arglist (exp
, POINTER_TYPE
,
6051 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6052 && !validate_arglist (exp
, REFERENCE_TYPE
,
6053 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6059 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6060 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6061 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6063 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6066 /* __builtin_return (RESULT) causes the function to return the
6067 value described by RESULT. RESULT is address of the block of
6068 memory returned by __builtin_apply. */
6069 case BUILT_IN_RETURN
:
6070 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6071 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6074 case BUILT_IN_SAVEREGS
:
6075 return expand_builtin_saveregs ();
6077 case BUILT_IN_VA_ARG_PACK
:
6078 /* All valid uses of __builtin_va_arg_pack () are removed during
6080 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6083 case BUILT_IN_VA_ARG_PACK_LEN
:
6084 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6086 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6089 /* Return the address of the first anonymous stack arg. */
6090 case BUILT_IN_NEXT_ARG
:
6091 if (fold_builtin_next_arg (exp
, false))
6093 return expand_builtin_next_arg ();
6095 case BUILT_IN_CLEAR_CACHE
:
6096 target
= expand_builtin___clear_cache (exp
);
6101 case BUILT_IN_CLASSIFY_TYPE
:
6102 return expand_builtin_classify_type (exp
);
6104 case BUILT_IN_CONSTANT_P
:
6107 case BUILT_IN_FRAME_ADDRESS
:
6108 case BUILT_IN_RETURN_ADDRESS
:
6109 return expand_builtin_frame_address (fndecl
, exp
);
6111 /* Returns the address of the area where the structure is returned.
6113 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6114 if (call_expr_nargs (exp
) != 0
6115 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6116 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6119 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6121 case BUILT_IN_ALLOCA
:
6122 case BUILT_IN_ALLOCA_WITH_ALIGN
:
6123 /* If the allocation stems from the declaration of a variable-sized
6124 object, it cannot accumulate. */
6125 target
= expand_builtin_alloca (exp
, CALL_ALLOCA_FOR_VAR_P (exp
));
6130 case BUILT_IN_STACK_SAVE
:
6131 return expand_stack_save ();
6133 case BUILT_IN_STACK_RESTORE
:
6134 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6137 case BUILT_IN_BSWAP16
:
6138 case BUILT_IN_BSWAP32
:
6139 case BUILT_IN_BSWAP64
:
6140 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6145 CASE_INT_FN (BUILT_IN_FFS
):
6146 case BUILT_IN_FFSIMAX
:
6147 target
= expand_builtin_unop (target_mode
, exp
, target
,
6148 subtarget
, ffs_optab
);
6153 CASE_INT_FN (BUILT_IN_CLZ
):
6154 case BUILT_IN_CLZIMAX
:
6155 target
= expand_builtin_unop (target_mode
, exp
, target
,
6156 subtarget
, clz_optab
);
6161 CASE_INT_FN (BUILT_IN_CTZ
):
6162 case BUILT_IN_CTZIMAX
:
6163 target
= expand_builtin_unop (target_mode
, exp
, target
,
6164 subtarget
, ctz_optab
);
6169 CASE_INT_FN (BUILT_IN_CLRSB
):
6170 case BUILT_IN_CLRSBIMAX
:
6171 target
= expand_builtin_unop (target_mode
, exp
, target
,
6172 subtarget
, clrsb_optab
);
6177 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6178 case BUILT_IN_POPCOUNTIMAX
:
6179 target
= expand_builtin_unop (target_mode
, exp
, target
,
6180 subtarget
, popcount_optab
);
6185 CASE_INT_FN (BUILT_IN_PARITY
):
6186 case BUILT_IN_PARITYIMAX
:
6187 target
= expand_builtin_unop (target_mode
, exp
, target
,
6188 subtarget
, parity_optab
);
6193 case BUILT_IN_STRLEN
:
6194 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6199 case BUILT_IN_STRCPY
:
6200 target
= expand_builtin_strcpy (exp
, target
);
6205 case BUILT_IN_STRNCPY
:
6206 target
= expand_builtin_strncpy (exp
, target
);
6211 case BUILT_IN_STPCPY
:
6212 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6217 case BUILT_IN_MEMCPY
:
6218 target
= expand_builtin_memcpy (exp
, target
);
6223 case BUILT_IN_MEMPCPY
:
6224 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6229 case BUILT_IN_MEMSET
:
6230 target
= expand_builtin_memset (exp
, target
, mode
);
6235 case BUILT_IN_BZERO
:
6236 target
= expand_builtin_bzero (exp
);
6241 case BUILT_IN_STRCMP
:
6242 target
= expand_builtin_strcmp (exp
, target
);
6247 case BUILT_IN_STRNCMP
:
6248 target
= expand_builtin_strncmp (exp
, target
, mode
);
6254 case BUILT_IN_MEMCMP
:
6255 target
= expand_builtin_memcmp (exp
, target
, mode
);
6260 case BUILT_IN_SETJMP
:
6261 /* This should have been lowered to the builtins below. */
6264 case BUILT_IN_SETJMP_SETUP
:
6265 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6266 and the receiver label. */
6267 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6269 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6270 VOIDmode
, EXPAND_NORMAL
);
6271 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6272 rtx label_r
= label_rtx (label
);
6274 /* This is copied from the handling of non-local gotos. */
6275 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6276 nonlocal_goto_handler_labels
6277 = gen_rtx_EXPR_LIST (VOIDmode
, label_r
,
6278 nonlocal_goto_handler_labels
);
6279 /* ??? Do not let expand_label treat us as such since we would
6280 not want to be both on the list of non-local labels and on
6281 the list of forced labels. */
6282 FORCED_LABEL (label
) = 0;
6287 case BUILT_IN_SETJMP_DISPATCHER
:
6288 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6289 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6291 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6292 rtx label_r
= label_rtx (label
);
6294 /* Remove the dispatcher label from the list of non-local labels
6295 since the receiver labels have been added to it above. */
6296 remove_node_from_expr_list (label_r
, &nonlocal_goto_handler_labels
);
6301 case BUILT_IN_SETJMP_RECEIVER
:
6302 /* __builtin_setjmp_receiver is passed the receiver label. */
6303 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6305 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6306 rtx label_r
= label_rtx (label
);
6308 expand_builtin_setjmp_receiver (label_r
);
6313 /* __builtin_longjmp is passed a pointer to an array of five words.
6314 It's similar to the C library longjmp function but works with
6315 __builtin_setjmp above. */
6316 case BUILT_IN_LONGJMP
:
6317 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6319 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6320 VOIDmode
, EXPAND_NORMAL
);
6321 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6323 if (value
!= const1_rtx
)
6325 error ("%<__builtin_longjmp%> second argument must be 1");
6329 expand_builtin_longjmp (buf_addr
, value
);
6334 case BUILT_IN_NONLOCAL_GOTO
:
6335 target
= expand_builtin_nonlocal_goto (exp
);
6340 /* This updates the setjmp buffer that is its argument with the value
6341 of the current stack pointer. */
6342 case BUILT_IN_UPDATE_SETJMP_BUF
:
6343 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6346 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6348 expand_builtin_update_setjmp_buf (buf_addr
);
6354 expand_builtin_trap ();
6357 case BUILT_IN_UNREACHABLE
:
6358 expand_builtin_unreachable ();
6361 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6362 case BUILT_IN_SIGNBITD32
:
6363 case BUILT_IN_SIGNBITD64
:
6364 case BUILT_IN_SIGNBITD128
:
6365 target
= expand_builtin_signbit (exp
, target
);
6370 /* Various hooks for the DWARF 2 __throw routine. */
6371 case BUILT_IN_UNWIND_INIT
:
6372 expand_builtin_unwind_init ();
6374 case BUILT_IN_DWARF_CFA
:
6375 return virtual_cfa_rtx
;
6376 #ifdef DWARF2_UNWIND_INFO
6377 case BUILT_IN_DWARF_SP_COLUMN
:
6378 return expand_builtin_dwarf_sp_column ();
6379 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6380 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6383 case BUILT_IN_FROB_RETURN_ADDR
:
6384 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6385 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6386 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6387 case BUILT_IN_EH_RETURN
:
6388 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6389 CALL_EXPR_ARG (exp
, 1));
6391 #ifdef EH_RETURN_DATA_REGNO
6392 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6393 return expand_builtin_eh_return_data_regno (exp
);
6395 case BUILT_IN_EXTEND_POINTER
:
6396 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6397 case BUILT_IN_EH_POINTER
:
6398 return expand_builtin_eh_pointer (exp
);
6399 case BUILT_IN_EH_FILTER
:
6400 return expand_builtin_eh_filter (exp
);
6401 case BUILT_IN_EH_COPY_VALUES
:
6402 return expand_builtin_eh_copy_values (exp
);
6404 case BUILT_IN_VA_START
:
6405 return expand_builtin_va_start (exp
);
6406 case BUILT_IN_VA_END
:
6407 return expand_builtin_va_end (exp
);
6408 case BUILT_IN_VA_COPY
:
6409 return expand_builtin_va_copy (exp
);
6410 case BUILT_IN_EXPECT
:
6411 return expand_builtin_expect (exp
, target
);
6412 case BUILT_IN_ASSUME_ALIGNED
:
6413 return expand_builtin_assume_aligned (exp
, target
);
6414 case BUILT_IN_PREFETCH
:
6415 expand_builtin_prefetch (exp
);
6418 case BUILT_IN_INIT_TRAMPOLINE
:
6419 return expand_builtin_init_trampoline (exp
, true);
6420 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
6421 return expand_builtin_init_trampoline (exp
, false);
6422 case BUILT_IN_ADJUST_TRAMPOLINE
:
6423 return expand_builtin_adjust_trampoline (exp
);
6426 case BUILT_IN_EXECL
:
6427 case BUILT_IN_EXECV
:
6428 case BUILT_IN_EXECLP
:
6429 case BUILT_IN_EXECLE
:
6430 case BUILT_IN_EXECVP
:
6431 case BUILT_IN_EXECVE
:
6432 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6437 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
6438 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
6439 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
6440 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
6441 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
6442 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
6443 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
6448 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
6449 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
6450 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
6451 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
6452 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
6453 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
6454 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
6459 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
6460 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
6461 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
6462 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
6463 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
6464 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
6465 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
6470 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
6471 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
6472 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
6473 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
6474 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
6475 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
6476 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
6481 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
6482 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
6483 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
6484 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
6485 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
6486 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
6487 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
6492 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6493 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6494 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6495 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6496 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6497 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
6498 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
6503 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
6504 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
6505 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
6506 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
6507 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
6508 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
6509 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
6514 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
6515 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
6516 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
6517 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
6518 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
6519 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
6520 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
6525 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
6526 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
6527 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
6528 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
6529 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
6530 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
6531 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
6536 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
6537 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
6538 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
6539 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
6540 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
6541 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
6542 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
6547 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
6548 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
6549 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
6550 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
6551 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
6552 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
6553 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
6558 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6559 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6560 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6561 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6562 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6563 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
6564 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
6569 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
6570 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
6571 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
6572 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
6573 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
6574 if (mode
== VOIDmode
)
6575 mode
= TYPE_MODE (boolean_type_node
);
6576 if (!target
|| !register_operand (target
, mode
))
6577 target
= gen_reg_rtx (mode
);
6579 mode
= get_builtin_sync_mode
6580 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
6581 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6586 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
6587 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
6588 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
6589 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
6590 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
6591 mode
= get_builtin_sync_mode
6592 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
6593 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6598 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
6599 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
6600 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
6601 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
6602 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
6603 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
6604 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
6609 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
6610 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
6611 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
6612 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
6613 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
6614 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
6615 expand_builtin_sync_lock_release (mode
, exp
);
6618 case BUILT_IN_SYNC_SYNCHRONIZE
:
6619 expand_builtin_sync_synchronize ();
6622 case BUILT_IN_ATOMIC_EXCHANGE_1
:
6623 case BUILT_IN_ATOMIC_EXCHANGE_2
:
6624 case BUILT_IN_ATOMIC_EXCHANGE_4
:
6625 case BUILT_IN_ATOMIC_EXCHANGE_8
:
6626 case BUILT_IN_ATOMIC_EXCHANGE_16
:
6627 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
6628 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
6633 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
6634 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
6635 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
6636 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
6637 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
6639 unsigned int nargs
, z
;
6643 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
6644 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
6648 /* If this is turned into an external library call, the weak parameter
6649 must be dropped to match the expected parameter list. */
6650 nargs
= call_expr_nargs (exp
);
6651 vec
= VEC_alloc (tree
, gc
, nargs
- 1);
6652 for (z
= 0; z
< 3; z
++)
6653 VEC_quick_push (tree
, vec
, CALL_EXPR_ARG (exp
, z
));
6654 /* Skip the boolean weak parameter. */
6655 for (z
= 4; z
< 6; z
++)
6656 VEC_quick_push (tree
, vec
, CALL_EXPR_ARG (exp
, z
));
6657 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
6661 case BUILT_IN_ATOMIC_LOAD_1
:
6662 case BUILT_IN_ATOMIC_LOAD_2
:
6663 case BUILT_IN_ATOMIC_LOAD_4
:
6664 case BUILT_IN_ATOMIC_LOAD_8
:
6665 case BUILT_IN_ATOMIC_LOAD_16
:
6666 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
6667 target
= expand_builtin_atomic_load (mode
, exp
, target
);
6672 case BUILT_IN_ATOMIC_STORE_1
:
6673 case BUILT_IN_ATOMIC_STORE_2
:
6674 case BUILT_IN_ATOMIC_STORE_4
:
6675 case BUILT_IN_ATOMIC_STORE_8
:
6676 case BUILT_IN_ATOMIC_STORE_16
:
6677 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
6678 target
= expand_builtin_atomic_store (mode
, exp
);
6683 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
6684 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
6685 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
6686 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
6687 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
6689 enum built_in_function lib
;
6690 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
6691 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
6692 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
6693 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
6699 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
6700 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
6701 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
6702 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
6703 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
6705 enum built_in_function lib
;
6706 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
6707 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
6708 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
6709 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
6715 case BUILT_IN_ATOMIC_AND_FETCH_1
:
6716 case BUILT_IN_ATOMIC_AND_FETCH_2
:
6717 case BUILT_IN_ATOMIC_AND_FETCH_4
:
6718 case BUILT_IN_ATOMIC_AND_FETCH_8
:
6719 case BUILT_IN_ATOMIC_AND_FETCH_16
:
6721 enum built_in_function lib
;
6722 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
6723 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
6724 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
6725 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
6731 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
6732 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
6733 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
6734 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
6735 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
6737 enum built_in_function lib
;
6738 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
6739 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
6740 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
6741 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
6747 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
6748 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
6749 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
6750 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
6751 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
6753 enum built_in_function lib
;
6754 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
6755 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
6756 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
6757 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
6763 case BUILT_IN_ATOMIC_OR_FETCH_1
:
6764 case BUILT_IN_ATOMIC_OR_FETCH_2
:
6765 case BUILT_IN_ATOMIC_OR_FETCH_4
:
6766 case BUILT_IN_ATOMIC_OR_FETCH_8
:
6767 case BUILT_IN_ATOMIC_OR_FETCH_16
:
6769 enum built_in_function lib
;
6770 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
6771 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
6772 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
6773 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
6779 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
6780 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
6781 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
6782 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
6783 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
6784 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
6785 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
6786 ignore
, BUILT_IN_NONE
);
6791 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
6792 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
6793 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
6794 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
6795 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
6796 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
6797 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
6798 ignore
, BUILT_IN_NONE
);
6803 case BUILT_IN_ATOMIC_FETCH_AND_1
:
6804 case BUILT_IN_ATOMIC_FETCH_AND_2
:
6805 case BUILT_IN_ATOMIC_FETCH_AND_4
:
6806 case BUILT_IN_ATOMIC_FETCH_AND_8
:
6807 case BUILT_IN_ATOMIC_FETCH_AND_16
:
6808 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
6809 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
6810 ignore
, BUILT_IN_NONE
);
6815 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
6816 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
6817 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
6818 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
6819 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
6820 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
6821 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
6822 ignore
, BUILT_IN_NONE
);
6827 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
6828 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
6829 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
6830 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
6831 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
6832 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
6833 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
6834 ignore
, BUILT_IN_NONE
);
6839 case BUILT_IN_ATOMIC_FETCH_OR_1
:
6840 case BUILT_IN_ATOMIC_FETCH_OR_2
:
6841 case BUILT_IN_ATOMIC_FETCH_OR_4
:
6842 case BUILT_IN_ATOMIC_FETCH_OR_8
:
6843 case BUILT_IN_ATOMIC_FETCH_OR_16
:
6844 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
6845 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
6846 ignore
, BUILT_IN_NONE
);
6851 case BUILT_IN_ATOMIC_TEST_AND_SET
:
6852 return expand_builtin_atomic_test_and_set (exp
, target
);
6854 case BUILT_IN_ATOMIC_CLEAR
:
6855 return expand_builtin_atomic_clear (exp
);
6857 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
6858 return expand_builtin_atomic_always_lock_free (exp
);
6860 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
6861 target
= expand_builtin_atomic_is_lock_free (exp
);
6866 case BUILT_IN_ATOMIC_THREAD_FENCE
:
6867 expand_builtin_atomic_thread_fence (exp
);
6870 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
6871 expand_builtin_atomic_signal_fence (exp
);
6874 case BUILT_IN_OBJECT_SIZE
:
6875 return expand_builtin_object_size (exp
);
6877 case BUILT_IN_MEMCPY_CHK
:
6878 case BUILT_IN_MEMPCPY_CHK
:
6879 case BUILT_IN_MEMMOVE_CHK
:
6880 case BUILT_IN_MEMSET_CHK
:
6881 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6886 case BUILT_IN_STRCPY_CHK
:
6887 case BUILT_IN_STPCPY_CHK
:
6888 case BUILT_IN_STRNCPY_CHK
:
6889 case BUILT_IN_STPNCPY_CHK
:
6890 case BUILT_IN_STRCAT_CHK
:
6891 case BUILT_IN_STRNCAT_CHK
:
6892 case BUILT_IN_SNPRINTF_CHK
:
6893 case BUILT_IN_VSNPRINTF_CHK
:
6894 maybe_emit_chk_warning (exp
, fcode
);
6897 case BUILT_IN_SPRINTF_CHK
:
6898 case BUILT_IN_VSPRINTF_CHK
:
6899 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6903 if (warn_free_nonheap_object
)
6904 maybe_emit_free_warning (exp
);
6907 default: /* just do library call, if unknown builtin */
6911 /* The switch statement above can drop through to cause the function
6912 to be called normally. */
6913 return expand_call (exp
, target
, ignore
);
6916 /* Determine whether a tree node represents a call to a built-in
6917 function. If the tree T is a call to a built-in function with
6918 the right number of arguments of the appropriate types, return
6919 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6920 Otherwise the return value is END_BUILTINS. */
6922 enum built_in_function
6923 builtin_mathfn_code (const_tree t
)
6925 const_tree fndecl
, arg
, parmlist
;
6926 const_tree argtype
, parmtype
;
6927 const_call_expr_arg_iterator iter
;
6929 if (TREE_CODE (t
) != CALL_EXPR
6930 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
6931 return END_BUILTINS
;
6933 fndecl
= get_callee_fndecl (t
);
6934 if (fndecl
== NULL_TREE
6935 || TREE_CODE (fndecl
) != FUNCTION_DECL
6936 || ! DECL_BUILT_IN (fndecl
)
6937 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6938 return END_BUILTINS
;
6940 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
6941 init_const_call_expr_arg_iterator (t
, &iter
);
6942 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
6944 /* If a function doesn't take a variable number of arguments,
6945 the last element in the list will have type `void'. */
6946 parmtype
= TREE_VALUE (parmlist
);
6947 if (VOID_TYPE_P (parmtype
))
6949 if (more_const_call_expr_args_p (&iter
))
6950 return END_BUILTINS
;
6951 return DECL_FUNCTION_CODE (fndecl
);
6954 if (! more_const_call_expr_args_p (&iter
))
6955 return END_BUILTINS
;
6957 arg
= next_const_call_expr_arg (&iter
);
6958 argtype
= TREE_TYPE (arg
);
6960 if (SCALAR_FLOAT_TYPE_P (parmtype
))
6962 if (! SCALAR_FLOAT_TYPE_P (argtype
))
6963 return END_BUILTINS
;
6965 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
6967 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
6968 return END_BUILTINS
;
6970 else if (POINTER_TYPE_P (parmtype
))
6972 if (! POINTER_TYPE_P (argtype
))
6973 return END_BUILTINS
;
6975 else if (INTEGRAL_TYPE_P (parmtype
))
6977 if (! INTEGRAL_TYPE_P (argtype
))
6978 return END_BUILTINS
;
6981 return END_BUILTINS
;
6984 /* Variable-length argument list. */
6985 return DECL_FUNCTION_CODE (fndecl
);
6988 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6989 evaluate to a constant. */
6992 fold_builtin_constant_p (tree arg
)
6994 /* We return 1 for a numeric type that's known to be a constant
6995 value at compile-time or for an aggregate type that's a
6996 literal constant. */
6999 /* If we know this is a constant, emit the constant of one. */
7000 if (CONSTANT_CLASS_P (arg
)
7001 || (TREE_CODE (arg
) == CONSTRUCTOR
7002 && TREE_CONSTANT (arg
)))
7003 return integer_one_node
;
7004 if (TREE_CODE (arg
) == ADDR_EXPR
)
7006 tree op
= TREE_OPERAND (arg
, 0);
7007 if (TREE_CODE (op
) == STRING_CST
7008 || (TREE_CODE (op
) == ARRAY_REF
7009 && integer_zerop (TREE_OPERAND (op
, 1))
7010 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
7011 return integer_one_node
;
7014 /* If this expression has side effects, show we don't know it to be a
7015 constant. Likewise if it's a pointer or aggregate type since in
7016 those case we only want literals, since those are only optimized
7017 when generating RTL, not later.
7018 And finally, if we are compiling an initializer, not code, we
7019 need to return a definite result now; there's not going to be any
7020 more optimization done. */
7021 if (TREE_SIDE_EFFECTS (arg
)
7022 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
7023 || POINTER_TYPE_P (TREE_TYPE (arg
))
7025 || folding_initializer
)
7026 return integer_zero_node
;
7031 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7032 return it as a truthvalue. */
7035 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
)
7037 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
7039 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
7040 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
7041 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
7042 pred_type
= TREE_VALUE (arg_types
);
7043 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
7045 pred
= fold_convert_loc (loc
, pred_type
, pred
);
7046 expected
= fold_convert_loc (loc
, expected_type
, expected
);
7047 call_expr
= build_call_expr_loc (loc
, fn
, 2, pred
, expected
);
7049 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7050 build_int_cst (ret_type
, 0));
7053 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7054 NULL_TREE if no simplification is possible. */
7057 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
)
7059 tree inner
, fndecl
, inner_arg0
;
7060 enum tree_code code
;
7062 /* Distribute the expected value over short-circuiting operators.
7063 See through the cast from truthvalue_type_node to long. */
7065 while (TREE_CODE (inner_arg0
) == NOP_EXPR
7066 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
7067 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
7068 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
7070 /* If this is a builtin_expect within a builtin_expect keep the
7071 inner one. See through a comparison against a constant. It
7072 might have been added to create a thruthvalue. */
7075 if (COMPARISON_CLASS_P (inner
)
7076 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7077 inner
= TREE_OPERAND (inner
, 0);
7079 if (TREE_CODE (inner
) == CALL_EXPR
7080 && (fndecl
= get_callee_fndecl (inner
))
7081 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7082 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7086 code
= TREE_CODE (inner
);
7087 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7089 tree op0
= TREE_OPERAND (inner
, 0);
7090 tree op1
= TREE_OPERAND (inner
, 1);
7092 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
);
7093 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
);
7094 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7096 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
7099 /* If the argument isn't invariant then there's nothing else we can do. */
7100 if (!TREE_CONSTANT (inner_arg0
))
7103 /* If we expect that a comparison against the argument will fold to
7104 a constant return the constant. In practice, this means a true
7105 constant or the address of a non-weak symbol. */
7108 if (TREE_CODE (inner
) == ADDR_EXPR
)
7112 inner
= TREE_OPERAND (inner
, 0);
7114 while (TREE_CODE (inner
) == COMPONENT_REF
7115 || TREE_CODE (inner
) == ARRAY_REF
);
7116 if ((TREE_CODE (inner
) == VAR_DECL
7117 || TREE_CODE (inner
) == FUNCTION_DECL
)
7118 && DECL_WEAK (inner
))
7122 /* Otherwise, ARG0 already has the proper type for the return value. */
7126 /* Fold a call to __builtin_classify_type with argument ARG. */
7129 fold_builtin_classify_type (tree arg
)
7132 return build_int_cst (integer_type_node
, no_type_class
);
7134 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7137 /* Fold a call to __builtin_strlen with argument ARG. */
7140 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7142 if (!validate_arg (arg
, POINTER_TYPE
))
7146 tree len
= c_strlen (arg
, 0);
7149 return fold_convert_loc (loc
, type
, len
);
7155 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7158 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7160 REAL_VALUE_TYPE real
;
7162 /* __builtin_inff is intended to be usable to define INFINITY on all
7163 targets. If an infinity is not available, INFINITY expands "to a
7164 positive constant of type float that overflows at translation
7165 time", footnote "In this case, using INFINITY will violate the
7166 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7167 Thus we pedwarn to ensure this constraint violation is
7169 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7170 pedwarn (loc
, 0, "target format does not support infinity");
7173 return build_real (type
, real
);
7176 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7179 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7181 REAL_VALUE_TYPE real
;
7184 if (!validate_arg (arg
, POINTER_TYPE
))
7186 str
= c_getstr (arg
);
7190 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7193 return build_real (type
, real
);
7196 /* Return true if the floating point expression T has an integer value.
7197 We also allow +Inf, -Inf and NaN to be considered integer values. */
7200 integer_valued_real_p (tree t
)
7202 switch (TREE_CODE (t
))
7209 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7214 return integer_valued_real_p (TREE_OPERAND (t
, 1));
7221 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7222 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7225 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7226 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7229 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7233 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7234 if (TREE_CODE (type
) == INTEGER_TYPE
)
7236 if (TREE_CODE (type
) == REAL_TYPE
)
7237 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7242 switch (builtin_mathfn_code (t
))
7244 CASE_FLT_FN (BUILT_IN_CEIL
):
7245 CASE_FLT_FN (BUILT_IN_FLOOR
):
7246 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7247 CASE_FLT_FN (BUILT_IN_RINT
):
7248 CASE_FLT_FN (BUILT_IN_ROUND
):
7249 CASE_FLT_FN (BUILT_IN_TRUNC
):
7252 CASE_FLT_FN (BUILT_IN_FMIN
):
7253 CASE_FLT_FN (BUILT_IN_FMAX
):
7254 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7255 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7268 /* FNDECL is assumed to be a builtin where truncation can be propagated
7269 across (for instance floor((double)f) == (double)floorf (f).
7270 Do the transformation for a call with argument ARG. */
7273 fold_trunc_transparent_mathfn (location_t loc
, tree fndecl
, tree arg
)
7275 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7277 if (!validate_arg (arg
, REAL_TYPE
))
7280 /* Integer rounding functions are idempotent. */
7281 if (fcode
== builtin_mathfn_code (arg
))
7284 /* If argument is already integer valued, and we don't need to worry
7285 about setting errno, there's no need to perform rounding. */
7286 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7291 tree arg0
= strip_float_extensions (arg
);
7292 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7293 tree newtype
= TREE_TYPE (arg0
);
7296 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7297 && (decl
= mathfn_built_in (newtype
, fcode
)))
7298 return fold_convert_loc (loc
, ftype
,
7299 build_call_expr_loc (loc
, decl
, 1,
7300 fold_convert_loc (loc
,
7307 /* FNDECL is assumed to be builtin which can narrow the FP type of
7308 the argument, for instance lround((double)f) -> lroundf (f).
7309 Do the transformation for a call with argument ARG. */
7312 fold_fixed_mathfn (location_t loc
, tree fndecl
, tree arg
)
7314 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7316 if (!validate_arg (arg
, REAL_TYPE
))
7319 /* If argument is already integer valued, and we don't need to worry
7320 about setting errno, there's no need to perform rounding. */
7321 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7322 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7323 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7327 tree ftype
= TREE_TYPE (arg
);
7328 tree arg0
= strip_float_extensions (arg
);
7329 tree newtype
= TREE_TYPE (arg0
);
7332 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7333 && (decl
= mathfn_built_in (newtype
, fcode
)))
7334 return build_call_expr_loc (loc
, decl
, 1,
7335 fold_convert_loc (loc
, newtype
, arg0
));
7338 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7339 sizeof (int) == sizeof (long). */
7340 if (TYPE_PRECISION (integer_type_node
)
7341 == TYPE_PRECISION (long_integer_type_node
))
7343 tree newfn
= NULL_TREE
;
7346 CASE_FLT_FN (BUILT_IN_ICEIL
):
7347 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7350 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7351 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7354 CASE_FLT_FN (BUILT_IN_IROUND
):
7355 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7358 CASE_FLT_FN (BUILT_IN_IRINT
):
7359 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7368 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7369 return fold_convert_loc (loc
,
7370 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7374 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7375 sizeof (long long) == sizeof (long). */
7376 if (TYPE_PRECISION (long_long_integer_type_node
)
7377 == TYPE_PRECISION (long_integer_type_node
))
7379 tree newfn
= NULL_TREE
;
7382 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7383 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7386 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7387 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7390 CASE_FLT_FN (BUILT_IN_LLROUND
):
7391 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7394 CASE_FLT_FN (BUILT_IN_LLRINT
):
7395 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7404 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7405 return fold_convert_loc (loc
,
7406 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7413 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7414 return type. Return NULL_TREE if no simplification can be made. */
7417 fold_builtin_cabs (location_t loc
, tree arg
, tree type
, tree fndecl
)
7421 if (!validate_arg (arg
, COMPLEX_TYPE
)
7422 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7425 /* Calculate the result when the argument is a constant. */
7426 if (TREE_CODE (arg
) == COMPLEX_CST
7427 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7431 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7433 tree real
= TREE_OPERAND (arg
, 0);
7434 tree imag
= TREE_OPERAND (arg
, 1);
7436 /* If either part is zero, cabs is fabs of the other. */
7437 if (real_zerop (real
))
7438 return fold_build1_loc (loc
, ABS_EXPR
, type
, imag
);
7439 if (real_zerop (imag
))
7440 return fold_build1_loc (loc
, ABS_EXPR
, type
, real
);
7442 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7443 if (flag_unsafe_math_optimizations
7444 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7446 const REAL_VALUE_TYPE sqrt2_trunc
7447 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
7449 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7450 fold_build1_loc (loc
, ABS_EXPR
, type
, real
),
7451 build_real (type
, sqrt2_trunc
));
7455 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7456 if (TREE_CODE (arg
) == NEGATE_EXPR
7457 || TREE_CODE (arg
) == CONJ_EXPR
)
7458 return build_call_expr_loc (loc
, fndecl
, 1, TREE_OPERAND (arg
, 0));
7460 /* Don't do this when optimizing for size. */
7461 if (flag_unsafe_math_optimizations
7462 && optimize
&& optimize_function_for_speed_p (cfun
))
7464 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7466 if (sqrtfn
!= NULL_TREE
)
7468 tree rpart
, ipart
, result
;
7470 arg
= builtin_save_expr (arg
);
7472 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, type
, arg
);
7473 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg
);
7475 rpart
= builtin_save_expr (rpart
);
7476 ipart
= builtin_save_expr (ipart
);
7478 result
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
7479 fold_build2_loc (loc
, MULT_EXPR
, type
,
7481 fold_build2_loc (loc
, MULT_EXPR
, type
,
7484 return build_call_expr_loc (loc
, sqrtfn
, 1, result
);
7491 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7492 complex tree type of the result. If NEG is true, the imaginary
7493 zero is negative. */
7496 build_complex_cproj (tree type
, bool neg
)
7498 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
7502 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
7503 build_real (TREE_TYPE (type
), rzero
));
7506 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7507 return type. Return NULL_TREE if no simplification can be made. */
7510 fold_builtin_cproj (location_t loc
, tree arg
, tree type
)
7512 if (!validate_arg (arg
, COMPLEX_TYPE
)
7513 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7516 /* If there are no infinities, return arg. */
7517 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type
))))
7518 return non_lvalue_loc (loc
, arg
);
7520 /* Calculate the result when the argument is a constant. */
7521 if (TREE_CODE (arg
) == COMPLEX_CST
)
7523 const REAL_VALUE_TYPE
*real
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
7524 const REAL_VALUE_TYPE
*imag
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
7526 if (real_isinf (real
) || real_isinf (imag
))
7527 return build_complex_cproj (type
, imag
->sign
);
7531 else if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7533 tree real
= TREE_OPERAND (arg
, 0);
7534 tree imag
= TREE_OPERAND (arg
, 1);
7539 /* If the real part is inf and the imag part is known to be
7540 nonnegative, return (inf + 0i). Remember side-effects are
7541 possible in the imag part. */
7542 if (TREE_CODE (real
) == REAL_CST
7543 && real_isinf (TREE_REAL_CST_PTR (real
))
7544 && tree_expr_nonnegative_p (imag
))
7545 return omit_one_operand_loc (loc
, type
,
7546 build_complex_cproj (type
, false),
7549 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7550 Remember side-effects are possible in the real part. */
7551 if (TREE_CODE (imag
) == REAL_CST
7552 && real_isinf (TREE_REAL_CST_PTR (imag
)))
7554 omit_one_operand_loc (loc
, type
,
7555 build_complex_cproj (type
, TREE_REAL_CST_PTR
7556 (imag
)->sign
), arg
);
7562 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7563 Return NULL_TREE if no simplification can be made. */
7566 fold_builtin_sqrt (location_t loc
, tree arg
, tree type
)
7569 enum built_in_function fcode
;
7572 if (!validate_arg (arg
, REAL_TYPE
))
7575 /* Calculate the result when the argument is a constant. */
7576 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7579 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7580 fcode
= builtin_mathfn_code (arg
);
7581 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7583 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7584 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7585 CALL_EXPR_ARG (arg
, 0),
7586 build_real (type
, dconsthalf
));
7587 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7590 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7591 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7593 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7597 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7599 /* The inner root was either sqrt or cbrt. */
7600 /* This was a conditional expression but it triggered a bug
7602 REAL_VALUE_TYPE dconstroot
;
7603 if (BUILTIN_SQRT_P (fcode
))
7604 dconstroot
= dconsthalf
;
7606 dconstroot
= dconst_third ();
7608 /* Adjust for the outer root. */
7609 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7610 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7611 tree_root
= build_real (type
, dconstroot
);
7612 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7616 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7617 if (flag_unsafe_math_optimizations
7618 && (fcode
== BUILT_IN_POW
7619 || fcode
== BUILT_IN_POWF
7620 || fcode
== BUILT_IN_POWL
))
7622 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7623 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7624 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7626 if (!tree_expr_nonnegative_p (arg0
))
7627 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7628 narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
7629 build_real (type
, dconsthalf
));
7630 return build_call_expr_loc (loc
, powfn
, 2, arg0
, narg1
);
7636 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7637 Return NULL_TREE if no simplification can be made. */
7640 fold_builtin_cbrt (location_t loc
, tree arg
, tree type
)
7642 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7645 if (!validate_arg (arg
, REAL_TYPE
))
7648 /* Calculate the result when the argument is a constant. */
7649 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7652 if (flag_unsafe_math_optimizations
)
7654 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7655 if (BUILTIN_EXPONENT_P (fcode
))
7657 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7658 const REAL_VALUE_TYPE third_trunc
=
7659 real_value_truncate (TYPE_MODE (type
), dconst_third ());
7660 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7661 CALL_EXPR_ARG (arg
, 0),
7662 build_real (type
, third_trunc
));
7663 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7666 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7667 if (BUILTIN_SQRT_P (fcode
))
7669 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7673 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7675 REAL_VALUE_TYPE dconstroot
= dconst_third ();
7677 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7678 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7679 tree_root
= build_real (type
, dconstroot
);
7680 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7684 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7685 if (BUILTIN_CBRT_P (fcode
))
7687 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7688 if (tree_expr_nonnegative_p (arg0
))
7690 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7695 REAL_VALUE_TYPE dconstroot
;
7697 real_arithmetic (&dconstroot
, MULT_EXPR
,
7698 dconst_third_ptr (), dconst_third_ptr ());
7699 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7700 tree_root
= build_real (type
, dconstroot
);
7701 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7706 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7707 if (fcode
== BUILT_IN_POW
7708 || fcode
== BUILT_IN_POWF
7709 || fcode
== BUILT_IN_POWL
)
7711 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7712 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7713 if (tree_expr_nonnegative_p (arg00
))
7715 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7716 const REAL_VALUE_TYPE dconstroot
7717 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
7718 tree narg01
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
,
7719 build_real (type
, dconstroot
));
7720 return build_call_expr_loc (loc
, powfn
, 2, arg00
, narg01
);
7727 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7728 TYPE is the type of the return value. Return NULL_TREE if no
7729 simplification can be made. */
7732 fold_builtin_cos (location_t loc
,
7733 tree arg
, tree type
, tree fndecl
)
7737 if (!validate_arg (arg
, REAL_TYPE
))
7740 /* Calculate the result when the argument is a constant. */
7741 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7744 /* Optimize cos(-x) into cos (x). */
7745 if ((narg
= fold_strip_sign_ops (arg
)))
7746 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7751 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7752 Return NULL_TREE if no simplification can be made. */
7755 fold_builtin_cosh (location_t loc
, tree arg
, tree type
, tree fndecl
)
7757 if (validate_arg (arg
, REAL_TYPE
))
7761 /* Calculate the result when the argument is a constant. */
7762 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7765 /* Optimize cosh(-x) into cosh (x). */
7766 if ((narg
= fold_strip_sign_ops (arg
)))
7767 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7773 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7774 argument ARG. TYPE is the type of the return value. Return
7775 NULL_TREE if no simplification can be made. */
7778 fold_builtin_ccos (location_t loc
, tree arg
, tree type
, tree fndecl
,
7781 if (validate_arg (arg
, COMPLEX_TYPE
)
7782 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
7786 /* Calculate the result when the argument is a constant. */
7787 if ((tmp
= do_mpc_arg1 (arg
, type
, (hyper
? mpc_cosh
: mpc_cos
))))
7790 /* Optimize fn(-x) into fn(x). */
7791 if ((tmp
= fold_strip_sign_ops (arg
)))
7792 return build_call_expr_loc (loc
, fndecl
, 1, tmp
);
7798 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7799 Return NULL_TREE if no simplification can be made. */
7802 fold_builtin_tan (tree arg
, tree type
)
7804 enum built_in_function fcode
;
7807 if (!validate_arg (arg
, REAL_TYPE
))
7810 /* Calculate the result when the argument is a constant. */
7811 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
7814 /* Optimize tan(atan(x)) = x. */
7815 fcode
= builtin_mathfn_code (arg
);
7816 if (flag_unsafe_math_optimizations
7817 && (fcode
== BUILT_IN_ATAN
7818 || fcode
== BUILT_IN_ATANF
7819 || fcode
== BUILT_IN_ATANL
))
7820 return CALL_EXPR_ARG (arg
, 0);
7825 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7826 NULL_TREE if no simplification can be made. */
7829 fold_builtin_sincos (location_t loc
,
7830 tree arg0
, tree arg1
, tree arg2
)
7835 if (!validate_arg (arg0
, REAL_TYPE
)
7836 || !validate_arg (arg1
, POINTER_TYPE
)
7837 || !validate_arg (arg2
, POINTER_TYPE
))
7840 type
= TREE_TYPE (arg0
);
7842 /* Calculate the result when the argument is a constant. */
7843 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7846 /* Canonicalize sincos to cexpi. */
7847 if (!TARGET_C99_FUNCTIONS
)
7849 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
7853 call
= build_call_expr_loc (loc
, fn
, 1, arg0
);
7854 call
= builtin_save_expr (call
);
7856 return build2 (COMPOUND_EXPR
, void_type_node
,
7857 build2 (MODIFY_EXPR
, void_type_node
,
7858 build_fold_indirect_ref_loc (loc
, arg1
),
7859 build1 (IMAGPART_EXPR
, type
, call
)),
7860 build2 (MODIFY_EXPR
, void_type_node
,
7861 build_fold_indirect_ref_loc (loc
, arg2
),
7862 build1 (REALPART_EXPR
, type
, call
)));
7865 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7866 NULL_TREE if no simplification can be made. */
7869 fold_builtin_cexp (location_t loc
, tree arg0
, tree type
)
7872 tree realp
, imagp
, ifn
;
7875 if (!validate_arg (arg0
, COMPLEX_TYPE
)
7876 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) != REAL_TYPE
)
7879 /* Calculate the result when the argument is a constant. */
7880 if ((res
= do_mpc_arg1 (arg0
, type
, mpc_exp
)))
7883 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
7885 /* In case we can figure out the real part of arg0 and it is constant zero
7887 if (!TARGET_C99_FUNCTIONS
)
7889 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
7893 if ((realp
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
))
7894 && real_zerop (realp
))
7896 tree narg
= fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7897 return build_call_expr_loc (loc
, ifn
, 1, narg
);
7900 /* In case we can easily decompose real and imaginary parts split cexp
7901 to exp (r) * cexpi (i). */
7902 if (flag_unsafe_math_optimizations
7905 tree rfn
, rcall
, icall
;
7907 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
7911 imagp
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7915 icall
= build_call_expr_loc (loc
, ifn
, 1, imagp
);
7916 icall
= builtin_save_expr (icall
);
7917 rcall
= build_call_expr_loc (loc
, rfn
, 1, realp
);
7918 rcall
= builtin_save_expr (rcall
);
7919 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
7920 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7922 fold_build1_loc (loc
, REALPART_EXPR
,
7924 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7926 fold_build1_loc (loc
, IMAGPART_EXPR
,
7933 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7934 Return NULL_TREE if no simplification can be made. */
7937 fold_builtin_trunc (location_t loc
, tree fndecl
, tree arg
)
7939 if (!validate_arg (arg
, REAL_TYPE
))
7942 /* Optimize trunc of constant value. */
7943 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7945 REAL_VALUE_TYPE r
, x
;
7946 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7948 x
= TREE_REAL_CST (arg
);
7949 real_trunc (&r
, TYPE_MODE (type
), &x
);
7950 return build_real (type
, r
);
7953 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7956 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7957 Return NULL_TREE if no simplification can be made. */
7960 fold_builtin_floor (location_t loc
, tree fndecl
, tree arg
)
7962 if (!validate_arg (arg
, REAL_TYPE
))
7965 /* Optimize floor of constant value. */
7966 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7970 x
= TREE_REAL_CST (arg
);
7971 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7973 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7976 real_floor (&r
, TYPE_MODE (type
), &x
);
7977 return build_real (type
, r
);
7981 /* Fold floor (x) where x is nonnegative to trunc (x). */
7982 if (tree_expr_nonnegative_p (arg
))
7984 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
7986 return build_call_expr_loc (loc
, truncfn
, 1, arg
);
7989 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7992 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7993 Return NULL_TREE if no simplification can be made. */
7996 fold_builtin_ceil (location_t loc
, tree fndecl
, tree arg
)
7998 if (!validate_arg (arg
, REAL_TYPE
))
8001 /* Optimize ceil of constant value. */
8002 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8006 x
= TREE_REAL_CST (arg
);
8007 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8009 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8012 real_ceil (&r
, TYPE_MODE (type
), &x
);
8013 return build_real (type
, r
);
8017 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8020 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8021 Return NULL_TREE if no simplification can be made. */
8024 fold_builtin_round (location_t loc
, tree fndecl
, tree arg
)
8026 if (!validate_arg (arg
, REAL_TYPE
))
8029 /* Optimize round of constant value. */
8030 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8034 x
= TREE_REAL_CST (arg
);
8035 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8037 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8040 real_round (&r
, TYPE_MODE (type
), &x
);
8041 return build_real (type
, r
);
8045 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8048 /* Fold function call to builtin lround, lroundf or lroundl (or the
8049 corresponding long long versions) and other rounding functions. ARG
8050 is the argument to the call. Return NULL_TREE if no simplification
8054 fold_builtin_int_roundingfn (location_t loc
, tree fndecl
, tree arg
)
8056 if (!validate_arg (arg
, REAL_TYPE
))
8059 /* Optimize lround of constant value. */
8060 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8062 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
8064 if (real_isfinite (&x
))
8066 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
8067 tree ftype
= TREE_TYPE (arg
);
8071 switch (DECL_FUNCTION_CODE (fndecl
))
8073 CASE_FLT_FN (BUILT_IN_IFLOOR
):
8074 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8075 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8076 real_floor (&r
, TYPE_MODE (ftype
), &x
);
8079 CASE_FLT_FN (BUILT_IN_ICEIL
):
8080 CASE_FLT_FN (BUILT_IN_LCEIL
):
8081 CASE_FLT_FN (BUILT_IN_LLCEIL
):
8082 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
8085 CASE_FLT_FN (BUILT_IN_IROUND
):
8086 CASE_FLT_FN (BUILT_IN_LROUND
):
8087 CASE_FLT_FN (BUILT_IN_LLROUND
):
8088 real_round (&r
, TYPE_MODE (ftype
), &x
);
8095 real_to_integer2 ((HOST_WIDE_INT
*)&val
.low
, &val
.high
, &r
);
8096 if (double_int_fits_to_tree_p (itype
, val
))
8097 return double_int_to_tree (itype
, val
);
8101 switch (DECL_FUNCTION_CODE (fndecl
))
8103 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8104 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8105 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8106 if (tree_expr_nonnegative_p (arg
))
8107 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
8108 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
8113 return fold_fixed_mathfn (loc
, fndecl
, arg
);
8116 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8117 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8118 the argument to the call. Return NULL_TREE if no simplification can
8122 fold_builtin_bitop (tree fndecl
, tree arg
)
8124 if (!validate_arg (arg
, INTEGER_TYPE
))
8127 /* Optimize for constant argument. */
8128 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8130 HOST_WIDE_INT hi
, width
, result
;
8131 unsigned HOST_WIDE_INT lo
;
8134 type
= TREE_TYPE (arg
);
8135 width
= TYPE_PRECISION (type
);
8136 lo
= TREE_INT_CST_LOW (arg
);
8138 /* Clear all the bits that are beyond the type's precision. */
8139 if (width
> HOST_BITS_PER_WIDE_INT
)
8141 hi
= TREE_INT_CST_HIGH (arg
);
8142 if (width
< 2 * HOST_BITS_PER_WIDE_INT
)
8143 hi
&= ~((unsigned HOST_WIDE_INT
) (-1)
8144 << (width
- HOST_BITS_PER_WIDE_INT
));
8149 if (width
< HOST_BITS_PER_WIDE_INT
)
8150 lo
&= ~((unsigned HOST_WIDE_INT
) (-1) << width
);
8153 switch (DECL_FUNCTION_CODE (fndecl
))
8155 CASE_INT_FN (BUILT_IN_FFS
):
8157 result
= ffs_hwi (lo
);
8159 result
= HOST_BITS_PER_WIDE_INT
+ ffs_hwi (hi
);
8164 CASE_INT_FN (BUILT_IN_CLZ
):
8166 result
= width
- floor_log2 (hi
) - 1 - HOST_BITS_PER_WIDE_INT
;
8168 result
= width
- floor_log2 (lo
) - 1;
8169 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8173 CASE_INT_FN (BUILT_IN_CTZ
):
8175 result
= ctz_hwi (lo
);
8177 result
= HOST_BITS_PER_WIDE_INT
+ ctz_hwi (hi
);
8178 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8182 CASE_INT_FN (BUILT_IN_CLRSB
):
8183 if (width
> HOST_BITS_PER_WIDE_INT
8184 && (hi
& ((unsigned HOST_WIDE_INT
) 1
8185 << (width
- HOST_BITS_PER_WIDE_INT
- 1))) != 0)
8187 hi
= ~hi
& ~((unsigned HOST_WIDE_INT
) (-1)
8188 << (width
- HOST_BITS_PER_WIDE_INT
- 1));
8191 else if (width
<= HOST_BITS_PER_WIDE_INT
8192 && (lo
& ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))) != 0)
8193 lo
= ~lo
& ~((unsigned HOST_WIDE_INT
) (-1) << (width
- 1));
8195 result
= width
- floor_log2 (hi
) - 2 - HOST_BITS_PER_WIDE_INT
;
8197 result
= width
- floor_log2 (lo
) - 2;
8202 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8205 result
++, lo
&= lo
- 1;
8207 result
++, hi
&= (unsigned HOST_WIDE_INT
) hi
- 1;
8210 CASE_INT_FN (BUILT_IN_PARITY
):
8213 result
++, lo
&= lo
- 1;
8215 result
++, hi
&= (unsigned HOST_WIDE_INT
) hi
- 1;
8223 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
8229 /* Fold function call to builtin_bswap and the short, long and long long
8230 variants. Return NULL_TREE if no simplification can be made. */
8232 fold_builtin_bswap (tree fndecl
, tree arg
)
8234 if (! validate_arg (arg
, INTEGER_TYPE
))
8237 /* Optimize constant value. */
8238 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8240 HOST_WIDE_INT hi
, width
, r_hi
= 0;
8241 unsigned HOST_WIDE_INT lo
, r_lo
= 0;
8242 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8244 width
= TYPE_PRECISION (type
);
8245 lo
= TREE_INT_CST_LOW (arg
);
8246 hi
= TREE_INT_CST_HIGH (arg
);
8248 switch (DECL_FUNCTION_CODE (fndecl
))
8250 case BUILT_IN_BSWAP16
:
8251 case BUILT_IN_BSWAP32
:
8252 case BUILT_IN_BSWAP64
:
8256 for (s
= 0; s
< width
; s
+= 8)
8258 int d
= width
- s
- 8;
8259 unsigned HOST_WIDE_INT byte
;
8261 if (s
< HOST_BITS_PER_WIDE_INT
)
8262 byte
= (lo
>> s
) & 0xff;
8264 byte
= (hi
>> (s
- HOST_BITS_PER_WIDE_INT
)) & 0xff;
8266 if (d
< HOST_BITS_PER_WIDE_INT
)
8269 r_hi
|= byte
<< (d
- HOST_BITS_PER_WIDE_INT
);
8279 if (width
< HOST_BITS_PER_WIDE_INT
)
8280 return build_int_cst (type
, r_lo
);
8282 return build_int_cst_wide (type
, r_lo
, r_hi
);
8288 /* A subroutine of fold_builtin to fold the various logarithmic
8289 functions. Return NULL_TREE if no simplification can me made.
8290 FUNC is the corresponding MPFR logarithm function. */
8293 fold_builtin_logarithm (location_t loc
, tree fndecl
, tree arg
,
8294 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8296 if (validate_arg (arg
, REAL_TYPE
))
8298 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8300 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8302 /* Calculate the result when the argument is a constant. */
8303 if ((res
= do_mpfr_arg1 (arg
, type
, func
, &dconst0
, NULL
, false)))
8306 /* Special case, optimize logN(expN(x)) = x. */
8307 if (flag_unsafe_math_optimizations
8308 && ((func
== mpfr_log
8309 && (fcode
== BUILT_IN_EXP
8310 || fcode
== BUILT_IN_EXPF
8311 || fcode
== BUILT_IN_EXPL
))
8312 || (func
== mpfr_log2
8313 && (fcode
== BUILT_IN_EXP2
8314 || fcode
== BUILT_IN_EXP2F
8315 || fcode
== BUILT_IN_EXP2L
))
8316 || (func
== mpfr_log10
&& (BUILTIN_EXP10_P (fcode
)))))
8317 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8319 /* Optimize logN(func()) for various exponential functions. We
8320 want to determine the value "x" and the power "exponent" in
8321 order to transform logN(x**exponent) into exponent*logN(x). */
8322 if (flag_unsafe_math_optimizations
)
8324 tree exponent
= 0, x
= 0;
8328 CASE_FLT_FN (BUILT_IN_EXP
):
8329 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8330 x
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8332 exponent
= CALL_EXPR_ARG (arg
, 0);
8334 CASE_FLT_FN (BUILT_IN_EXP2
):
8335 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8336 x
= build_real (type
, dconst2
);
8337 exponent
= CALL_EXPR_ARG (arg
, 0);
8339 CASE_FLT_FN (BUILT_IN_EXP10
):
8340 CASE_FLT_FN (BUILT_IN_POW10
):
8341 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8343 REAL_VALUE_TYPE dconst10
;
8344 real_from_integer (&dconst10
, VOIDmode
, 10, 0, 0);
8345 x
= build_real (type
, dconst10
);
8347 exponent
= CALL_EXPR_ARG (arg
, 0);
8349 CASE_FLT_FN (BUILT_IN_SQRT
):
8350 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8351 x
= CALL_EXPR_ARG (arg
, 0);
8352 exponent
= build_real (type
, dconsthalf
);
8354 CASE_FLT_FN (BUILT_IN_CBRT
):
8355 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8356 x
= CALL_EXPR_ARG (arg
, 0);
8357 exponent
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8360 CASE_FLT_FN (BUILT_IN_POW
):
8361 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8362 x
= CALL_EXPR_ARG (arg
, 0);
8363 exponent
= CALL_EXPR_ARG (arg
, 1);
8369 /* Now perform the optimization. */
8372 tree logfn
= build_call_expr_loc (loc
, fndecl
, 1, x
);
8373 return fold_build2_loc (loc
, MULT_EXPR
, type
, exponent
, logfn
);
8381 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8382 NULL_TREE if no simplification can be made. */
8385 fold_builtin_hypot (location_t loc
, tree fndecl
,
8386 tree arg0
, tree arg1
, tree type
)
8388 tree res
, narg0
, narg1
;
8390 if (!validate_arg (arg0
, REAL_TYPE
)
8391 || !validate_arg (arg1
, REAL_TYPE
))
8394 /* Calculate the result when the argument is a constant. */
8395 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8398 /* If either argument to hypot has a negate or abs, strip that off.
8399 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8400 narg0
= fold_strip_sign_ops (arg0
);
8401 narg1
= fold_strip_sign_ops (arg1
);
8404 return build_call_expr_loc (loc
, fndecl
, 2, narg0
? narg0
: arg0
,
8405 narg1
? narg1
: arg1
);
8408 /* If either argument is zero, hypot is fabs of the other. */
8409 if (real_zerop (arg0
))
8410 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
);
8411 else if (real_zerop (arg1
))
8412 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
);
8414 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8415 if (flag_unsafe_math_optimizations
8416 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8418 const REAL_VALUE_TYPE sqrt2_trunc
8419 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
8420 return fold_build2_loc (loc
, MULT_EXPR
, type
,
8421 fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
),
8422 build_real (type
, sqrt2_trunc
));
8429 /* Fold a builtin function call to pow, powf, or powl. Return
8430 NULL_TREE if no simplification can be made. */
8432 fold_builtin_pow (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, tree type
)
8436 if (!validate_arg (arg0
, REAL_TYPE
)
8437 || !validate_arg (arg1
, REAL_TYPE
))
8440 /* Calculate the result when the argument is a constant. */
8441 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8444 /* Optimize pow(1.0,y) = 1.0. */
8445 if (real_onep (arg0
))
8446 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8448 if (TREE_CODE (arg1
) == REAL_CST
8449 && !TREE_OVERFLOW (arg1
))
8451 REAL_VALUE_TYPE cint
;
8455 c
= TREE_REAL_CST (arg1
);
8457 /* Optimize pow(x,0.0) = 1.0. */
8458 if (REAL_VALUES_EQUAL (c
, dconst0
))
8459 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8462 /* Optimize pow(x,1.0) = x. */
8463 if (REAL_VALUES_EQUAL (c
, dconst1
))
8466 /* Optimize pow(x,-1.0) = 1.0/x. */
8467 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8468 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8469 build_real (type
, dconst1
), arg0
);
8471 /* Optimize pow(x,0.5) = sqrt(x). */
8472 if (flag_unsafe_math_optimizations
8473 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8475 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8477 if (sqrtfn
!= NULL_TREE
)
8478 return build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
8481 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8482 if (flag_unsafe_math_optimizations
)
8484 const REAL_VALUE_TYPE dconstroot
8485 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8487 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8489 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8490 if (cbrtfn
!= NULL_TREE
)
8491 return build_call_expr_loc (loc
, cbrtfn
, 1, arg0
);
8495 /* Check for an integer exponent. */
8496 n
= real_to_integer (&c
);
8497 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
8498 if (real_identical (&c
, &cint
))
8500 /* Attempt to evaluate pow at compile-time, unless this should
8501 raise an exception. */
8502 if (TREE_CODE (arg0
) == REAL_CST
8503 && !TREE_OVERFLOW (arg0
)
8505 || (!flag_trapping_math
&& !flag_errno_math
)
8506 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
), dconst0
)))
8511 x
= TREE_REAL_CST (arg0
);
8512 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8513 if (flag_unsafe_math_optimizations
|| !inexact
)
8514 return build_real (type
, x
);
8517 /* Strip sign ops from even integer powers. */
8518 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8520 tree narg0
= fold_strip_sign_ops (arg0
);
8522 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, arg1
);
8527 if (flag_unsafe_math_optimizations
)
8529 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8531 /* Optimize pow(expN(x),y) = expN(x*y). */
8532 if (BUILTIN_EXPONENT_P (fcode
))
8534 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8535 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8536 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg
, arg1
);
8537 return build_call_expr_loc (loc
, expfn
, 1, arg
);
8540 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8541 if (BUILTIN_SQRT_P (fcode
))
8543 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8544 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8545 build_real (type
, dconsthalf
));
8546 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, narg1
);
8549 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8550 if (BUILTIN_CBRT_P (fcode
))
8552 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8553 if (tree_expr_nonnegative_p (arg
))
8555 const REAL_VALUE_TYPE dconstroot
8556 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8557 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8558 build_real (type
, dconstroot
));
8559 return build_call_expr_loc (loc
, fndecl
, 2, arg
, narg1
);
8563 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8564 if (fcode
== BUILT_IN_POW
8565 || fcode
== BUILT_IN_POWF
8566 || fcode
== BUILT_IN_POWL
)
8568 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8569 if (tree_expr_nonnegative_p (arg00
))
8571 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8572 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
, arg1
);
8573 return build_call_expr_loc (loc
, fndecl
, 2, arg00
, narg1
);
8581 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8582 Return NULL_TREE if no simplification can be made. */
8584 fold_builtin_powi (location_t loc
, tree fndecl ATTRIBUTE_UNUSED
,
8585 tree arg0
, tree arg1
, tree type
)
8587 if (!validate_arg (arg0
, REAL_TYPE
)
8588 || !validate_arg (arg1
, INTEGER_TYPE
))
8591 /* Optimize pow(1.0,y) = 1.0. */
8592 if (real_onep (arg0
))
8593 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8595 if (host_integerp (arg1
, 0))
8597 HOST_WIDE_INT c
= TREE_INT_CST_LOW (arg1
);
8599 /* Evaluate powi at compile-time. */
8600 if (TREE_CODE (arg0
) == REAL_CST
8601 && !TREE_OVERFLOW (arg0
))
8604 x
= TREE_REAL_CST (arg0
);
8605 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8606 return build_real (type
, x
);
8609 /* Optimize pow(x,0) = 1.0. */
8611 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8614 /* Optimize pow(x,1) = x. */
8618 /* Optimize pow(x,-1) = 1.0/x. */
8620 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8621 build_real (type
, dconst1
), arg0
);
8627 /* A subroutine of fold_builtin to fold the various exponent
8628 functions. Return NULL_TREE if no simplification can be made.
8629 FUNC is the corresponding MPFR exponent function. */
8632 fold_builtin_exponent (location_t loc
, tree fndecl
, tree arg
,
8633 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8635 if (validate_arg (arg
, REAL_TYPE
))
8637 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8640 /* Calculate the result when the argument is a constant. */
8641 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8644 /* Optimize expN(logN(x)) = x. */
8645 if (flag_unsafe_math_optimizations
)
8647 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8649 if ((func
== mpfr_exp
8650 && (fcode
== BUILT_IN_LOG
8651 || fcode
== BUILT_IN_LOGF
8652 || fcode
== BUILT_IN_LOGL
))
8653 || (func
== mpfr_exp2
8654 && (fcode
== BUILT_IN_LOG2
8655 || fcode
== BUILT_IN_LOG2F
8656 || fcode
== BUILT_IN_LOG2L
))
8657 || (func
== mpfr_exp10
8658 && (fcode
== BUILT_IN_LOG10
8659 || fcode
== BUILT_IN_LOG10F
8660 || fcode
== BUILT_IN_LOG10L
)))
8661 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8668 /* Return true if VAR is a VAR_DECL or a component thereof. */
8671 var_decl_component_p (tree var
)
8674 while (handled_component_p (inner
))
8675 inner
= TREE_OPERAND (inner
, 0);
8676 return SSA_VAR_P (inner
);
8679 /* Fold function call to builtin memset. Return
8680 NULL_TREE if no simplification can be made. */
8683 fold_builtin_memset (location_t loc
, tree dest
, tree c
, tree len
,
8684 tree type
, bool ignore
)
8686 tree var
, ret
, etype
;
8687 unsigned HOST_WIDE_INT length
, cval
;
8689 if (! validate_arg (dest
, POINTER_TYPE
)
8690 || ! validate_arg (c
, INTEGER_TYPE
)
8691 || ! validate_arg (len
, INTEGER_TYPE
))
8694 if (! host_integerp (len
, 1))
8697 /* If the LEN parameter is zero, return DEST. */
8698 if (integer_zerop (len
))
8699 return omit_one_operand_loc (loc
, type
, dest
, c
);
8701 if (TREE_CODE (c
) != INTEGER_CST
|| TREE_SIDE_EFFECTS (dest
))
8706 if (TREE_CODE (var
) != ADDR_EXPR
)
8709 var
= TREE_OPERAND (var
, 0);
8710 if (TREE_THIS_VOLATILE (var
))
8713 etype
= TREE_TYPE (var
);
8714 if (TREE_CODE (etype
) == ARRAY_TYPE
)
8715 etype
= TREE_TYPE (etype
);
8717 if (!INTEGRAL_TYPE_P (etype
)
8718 && !POINTER_TYPE_P (etype
))
8721 if (! var_decl_component_p (var
))
8724 length
= tree_low_cst (len
, 1);
8725 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
8726 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
8729 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
8732 if (integer_zerop (c
))
8736 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
8739 cval
= TREE_INT_CST_LOW (c
);
8743 cval
|= (cval
<< 31) << 1;
8746 ret
= build_int_cst_type (etype
, cval
);
8747 var
= build_fold_indirect_ref_loc (loc
,
8748 fold_convert_loc (loc
,
8749 build_pointer_type (etype
),
8751 ret
= build2 (MODIFY_EXPR
, etype
, var
, ret
);
8755 return omit_one_operand_loc (loc
, type
, dest
, ret
);
8758 /* Fold function call to builtin memset. Return
8759 NULL_TREE if no simplification can be made. */
8762 fold_builtin_bzero (location_t loc
, tree dest
, tree size
, bool ignore
)
8764 if (! validate_arg (dest
, POINTER_TYPE
)
8765 || ! validate_arg (size
, INTEGER_TYPE
))
8771 /* New argument list transforming bzero(ptr x, int y) to
8772 memset(ptr x, int 0, size_t y). This is done this way
8773 so that if it isn't expanded inline, we fallback to
8774 calling bzero instead of memset. */
8776 return fold_builtin_memset (loc
, dest
, integer_zero_node
,
8777 fold_convert_loc (loc
, size_type_node
, size
),
8778 void_type_node
, ignore
);
8781 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8782 NULL_TREE if no simplification can be made.
8783 If ENDP is 0, return DEST (like memcpy).
8784 If ENDP is 1, return DEST+LEN (like mempcpy).
8785 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8786 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8790 fold_builtin_memory_op (location_t loc
, tree dest
, tree src
,
8791 tree len
, tree type
, bool ignore
, int endp
)
8793 tree destvar
, srcvar
, expr
;
8795 if (! validate_arg (dest
, POINTER_TYPE
)
8796 || ! validate_arg (src
, POINTER_TYPE
)
8797 || ! validate_arg (len
, INTEGER_TYPE
))
8800 /* If the LEN parameter is zero, return DEST. */
8801 if (integer_zerop (len
))
8802 return omit_one_operand_loc (loc
, type
, dest
, src
);
8804 /* If SRC and DEST are the same (and not volatile), return
8805 DEST{,+LEN,+LEN-1}. */
8806 if (operand_equal_p (src
, dest
, 0))
8810 tree srctype
, desttype
;
8811 unsigned int src_align
, dest_align
;
8816 src_align
= get_pointer_alignment (src
);
8817 dest_align
= get_pointer_alignment (dest
);
8819 /* Both DEST and SRC must be pointer types.
8820 ??? This is what old code did. Is the testing for pointer types
8823 If either SRC is readonly or length is 1, we can use memcpy. */
8824 if (!dest_align
|| !src_align
)
8826 if (readonly_data_expr (src
)
8827 || (host_integerp (len
, 1)
8828 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
8829 >= (unsigned HOST_WIDE_INT
) tree_low_cst (len
, 1))))
8831 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8834 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8837 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8838 if (TREE_CODE (src
) == ADDR_EXPR
8839 && TREE_CODE (dest
) == ADDR_EXPR
)
8841 tree src_base
, dest_base
, fn
;
8842 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
8843 HOST_WIDE_INT size
= -1;
8844 HOST_WIDE_INT maxsize
= -1;
8846 srcvar
= TREE_OPERAND (src
, 0);
8847 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
8849 destvar
= TREE_OPERAND (dest
, 0);
8850 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
8852 if (host_integerp (len
, 1))
8853 maxsize
= tree_low_cst (len
, 1);
8856 src_offset
/= BITS_PER_UNIT
;
8857 dest_offset
/= BITS_PER_UNIT
;
8858 if (SSA_VAR_P (src_base
)
8859 && SSA_VAR_P (dest_base
))
8861 if (operand_equal_p (src_base
, dest_base
, 0)
8862 && ranges_overlap_p (src_offset
, maxsize
,
8863 dest_offset
, maxsize
))
8866 else if (TREE_CODE (src_base
) == MEM_REF
8867 && TREE_CODE (dest_base
) == MEM_REF
)
8870 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
8871 TREE_OPERAND (dest_base
, 0), 0))
8873 off
= double_int_add (mem_ref_offset (src_base
),
8874 shwi_to_double_int (src_offset
));
8875 if (!double_int_fits_in_shwi_p (off
))
8877 src_offset
= off
.low
;
8878 off
= double_int_add (mem_ref_offset (dest_base
),
8879 shwi_to_double_int (dest_offset
));
8880 if (!double_int_fits_in_shwi_p (off
))
8882 dest_offset
= off
.low
;
8883 if (ranges_overlap_p (src_offset
, maxsize
,
8884 dest_offset
, maxsize
))
8890 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8893 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8896 /* If the destination and source do not alias optimize into
8898 if ((is_gimple_min_invariant (dest
)
8899 || TREE_CODE (dest
) == SSA_NAME
)
8900 && (is_gimple_min_invariant (src
)
8901 || TREE_CODE (src
) == SSA_NAME
))
8904 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
8905 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
8906 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
8909 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8912 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8919 if (!host_integerp (len
, 0))
8922 This logic lose for arguments like (type *)malloc (sizeof (type)),
8923 since we strip the casts of up to VOID return value from malloc.
8924 Perhaps we ought to inherit type from non-VOID argument here? */
8927 if (!POINTER_TYPE_P (TREE_TYPE (src
))
8928 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
8930 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8931 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
8933 tree tem
= TREE_OPERAND (src
, 0);
8935 if (tem
!= TREE_OPERAND (src
, 0))
8936 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
8938 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
8940 tree tem
= TREE_OPERAND (dest
, 0);
8942 if (tem
!= TREE_OPERAND (dest
, 0))
8943 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
8945 srctype
= TREE_TYPE (TREE_TYPE (src
));
8946 if (TREE_CODE (srctype
) == ARRAY_TYPE
8947 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
8949 srctype
= TREE_TYPE (srctype
);
8951 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
8953 desttype
= TREE_TYPE (TREE_TYPE (dest
));
8954 if (TREE_CODE (desttype
) == ARRAY_TYPE
8955 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8957 desttype
= TREE_TYPE (desttype
);
8959 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
8961 if (TREE_ADDRESSABLE (srctype
)
8962 || TREE_ADDRESSABLE (desttype
))
8965 src_align
= get_pointer_alignment (src
);
8966 dest_align
= get_pointer_alignment (dest
);
8967 if (dest_align
< TYPE_ALIGN (desttype
)
8968 || src_align
< TYPE_ALIGN (srctype
))
8972 dest
= builtin_save_expr (dest
);
8974 /* Build accesses at offset zero with a ref-all character type. */
8975 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
8976 ptr_mode
, true), 0);
8979 STRIP_NOPS (destvar
);
8980 if (TREE_CODE (destvar
) == ADDR_EXPR
8981 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
8982 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8983 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
8985 destvar
= NULL_TREE
;
8988 STRIP_NOPS (srcvar
);
8989 if (TREE_CODE (srcvar
) == ADDR_EXPR
8990 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
8991 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
8994 || src_align
>= TYPE_ALIGN (desttype
))
8995 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
8997 else if (!STRICT_ALIGNMENT
)
8999 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
9001 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
9009 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
9012 if (srcvar
== NULL_TREE
)
9015 if (src_align
>= TYPE_ALIGN (desttype
))
9016 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
9019 if (STRICT_ALIGNMENT
)
9021 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
9023 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
9026 else if (destvar
== NULL_TREE
)
9029 if (dest_align
>= TYPE_ALIGN (srctype
))
9030 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
9033 if (STRICT_ALIGNMENT
)
9035 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
9037 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
9041 expr
= build2 (MODIFY_EXPR
, TREE_TYPE (destvar
), destvar
, srcvar
);
9047 if (endp
== 0 || endp
== 3)
9048 return omit_one_operand_loc (loc
, type
, dest
, expr
);
9054 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
9057 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
9058 dest
= fold_convert_loc (loc
, type
, dest
);
9060 dest
= omit_one_operand_loc (loc
, type
, dest
, expr
);
9064 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9065 If LEN is not NULL, it represents the length of the string to be
9066 copied. Return NULL_TREE if no simplification can be made. */
9069 fold_builtin_strcpy (location_t loc
, tree fndecl
, tree dest
, tree src
, tree len
)
9073 if (!validate_arg (dest
, POINTER_TYPE
)
9074 || !validate_arg (src
, POINTER_TYPE
))
9077 /* If SRC and DEST are the same (and not volatile), return DEST. */
9078 if (operand_equal_p (src
, dest
, 0))
9079 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
9081 if (optimize_function_for_size_p (cfun
))
9084 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
9090 len
= c_strlen (src
, 1);
9091 if (! len
|| TREE_SIDE_EFFECTS (len
))
9095 len
= fold_convert_loc (loc
, size_type_node
, len
);
9096 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
9097 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
9098 build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
));
9101 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9102 Return NULL_TREE if no simplification can be made. */
9105 fold_builtin_stpcpy (location_t loc
, tree fndecl
, tree dest
, tree src
)
9107 tree fn
, len
, lenp1
, call
, type
;
9109 if (!validate_arg (dest
, POINTER_TYPE
)
9110 || !validate_arg (src
, POINTER_TYPE
))
9113 len
= c_strlen (src
, 1);
9115 || TREE_CODE (len
) != INTEGER_CST
)
9118 if (optimize_function_for_size_p (cfun
)
9119 /* If length is zero it's small enough. */
9120 && !integer_zerop (len
))
9123 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
9127 lenp1
= size_binop_loc (loc
, PLUS_EXPR
,
9128 fold_convert_loc (loc
, size_type_node
, len
),
9129 build_int_cst (size_type_node
, 1));
9130 /* We use dest twice in building our expression. Save it from
9131 multiple expansions. */
9132 dest
= builtin_save_expr (dest
);
9133 call
= build_call_expr_loc (loc
, fn
, 3, dest
, src
, lenp1
);
9135 type
= TREE_TYPE (TREE_TYPE (fndecl
));
9136 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
9137 dest
= fold_convert_loc (loc
, type
, dest
);
9138 dest
= omit_one_operand_loc (loc
, type
, dest
, call
);
9142 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9143 If SLEN is not NULL, it represents the length of the source string.
9144 Return NULL_TREE if no simplification can be made. */
9147 fold_builtin_strncpy (location_t loc
, tree fndecl
, tree dest
,
9148 tree src
, tree len
, tree slen
)
9152 if (!validate_arg (dest
, POINTER_TYPE
)
9153 || !validate_arg (src
, POINTER_TYPE
)
9154 || !validate_arg (len
, INTEGER_TYPE
))
9157 /* If the LEN parameter is zero, return DEST. */
9158 if (integer_zerop (len
))
9159 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
9161 /* We can't compare slen with len as constants below if len is not a
9163 if (len
== 0 || TREE_CODE (len
) != INTEGER_CST
)
9167 slen
= c_strlen (src
, 1);
9169 /* Now, we must be passed a constant src ptr parameter. */
9170 if (slen
== 0 || TREE_CODE (slen
) != INTEGER_CST
)
9173 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
9175 /* We do not support simplification of this case, though we do
9176 support it when expanding trees into RTL. */
9177 /* FIXME: generate a call to __builtin_memset. */
9178 if (tree_int_cst_lt (slen
, len
))
9181 /* OK transform into builtin memcpy. */
9182 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
9186 len
= fold_convert_loc (loc
, size_type_node
, len
);
9187 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
9188 build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
));
9191 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9192 arguments to the call, and TYPE is its return type.
9193 Return NULL_TREE if no simplification can be made. */
9196 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
9198 if (!validate_arg (arg1
, POINTER_TYPE
)
9199 || !validate_arg (arg2
, INTEGER_TYPE
)
9200 || !validate_arg (len
, INTEGER_TYPE
))
9206 if (TREE_CODE (arg2
) != INTEGER_CST
9207 || !host_integerp (len
, 1))
9210 p1
= c_getstr (arg1
);
9211 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
9217 if (target_char_cast (arg2
, &c
))
9220 r
= (const char *) memchr (p1
, c
, tree_low_cst (len
, 1));
9223 return build_int_cst (TREE_TYPE (arg1
), 0);
9225 tem
= fold_build_pointer_plus_hwi_loc (loc
, arg1
, r
- p1
);
9226 return fold_convert_loc (loc
, type
, tem
);
9232 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9233 Return NULL_TREE if no simplification can be made. */
9236 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
9238 const char *p1
, *p2
;
9240 if (!validate_arg (arg1
, POINTER_TYPE
)
9241 || !validate_arg (arg2
, POINTER_TYPE
)
9242 || !validate_arg (len
, INTEGER_TYPE
))
9245 /* If the LEN parameter is zero, return zero. */
9246 if (integer_zerop (len
))
9247 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
9250 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9251 if (operand_equal_p (arg1
, arg2
, 0))
9252 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
9254 p1
= c_getstr (arg1
);
9255 p2
= c_getstr (arg2
);
9257 /* If all arguments are constant, and the value of len is not greater
9258 than the lengths of arg1 and arg2, evaluate at compile-time. */
9259 if (host_integerp (len
, 1) && p1
&& p2
9260 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
9261 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
9263 const int r
= memcmp (p1
, p2
, tree_low_cst (len
, 1));
9266 return integer_one_node
;
9268 return integer_minus_one_node
;
9270 return integer_zero_node
;
9273 /* If len parameter is one, return an expression corresponding to
9274 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9275 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
9277 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9278 tree cst_uchar_ptr_node
9279 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9282 = fold_convert_loc (loc
, integer_type_node
,
9283 build1 (INDIRECT_REF
, cst_uchar_node
,
9284 fold_convert_loc (loc
,
9288 = fold_convert_loc (loc
, integer_type_node
,
9289 build1 (INDIRECT_REF
, cst_uchar_node
,
9290 fold_convert_loc (loc
,
9293 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9299 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9300 Return NULL_TREE if no simplification can be made. */
9303 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
9305 const char *p1
, *p2
;
9307 if (!validate_arg (arg1
, POINTER_TYPE
)
9308 || !validate_arg (arg2
, POINTER_TYPE
))
9311 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9312 if (operand_equal_p (arg1
, arg2
, 0))
9313 return integer_zero_node
;
9315 p1
= c_getstr (arg1
);
9316 p2
= c_getstr (arg2
);
9320 const int i
= strcmp (p1
, p2
);
9322 return integer_minus_one_node
;
9324 return integer_one_node
;
9326 return integer_zero_node
;
9329 /* If the second arg is "", return *(const unsigned char*)arg1. */
9330 if (p2
&& *p2
== '\0')
9332 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9333 tree cst_uchar_ptr_node
9334 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9336 return fold_convert_loc (loc
, integer_type_node
,
9337 build1 (INDIRECT_REF
, cst_uchar_node
,
9338 fold_convert_loc (loc
,
9343 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9344 if (p1
&& *p1
== '\0')
9346 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9347 tree cst_uchar_ptr_node
9348 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9351 = fold_convert_loc (loc
, integer_type_node
,
9352 build1 (INDIRECT_REF
, cst_uchar_node
,
9353 fold_convert_loc (loc
,
9356 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
9362 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9363 Return NULL_TREE if no simplification can be made. */
9366 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
9368 const char *p1
, *p2
;
9370 if (!validate_arg (arg1
, POINTER_TYPE
)
9371 || !validate_arg (arg2
, POINTER_TYPE
)
9372 || !validate_arg (len
, INTEGER_TYPE
))
9375 /* If the LEN parameter is zero, return zero. */
9376 if (integer_zerop (len
))
9377 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
9380 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9381 if (operand_equal_p (arg1
, arg2
, 0))
9382 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
9384 p1
= c_getstr (arg1
);
9385 p2
= c_getstr (arg2
);
9387 if (host_integerp (len
, 1) && p1
&& p2
)
9389 const int i
= strncmp (p1
, p2
, tree_low_cst (len
, 1));
9391 return integer_one_node
;
9393 return integer_minus_one_node
;
9395 return integer_zero_node
;
9398 /* If the second arg is "", and the length is greater than zero,
9399 return *(const unsigned char*)arg1. */
9400 if (p2
&& *p2
== '\0'
9401 && TREE_CODE (len
) == INTEGER_CST
9402 && tree_int_cst_sgn (len
) == 1)
9404 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9405 tree cst_uchar_ptr_node
9406 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9408 return fold_convert_loc (loc
, integer_type_node
,
9409 build1 (INDIRECT_REF
, cst_uchar_node
,
9410 fold_convert_loc (loc
,
9415 /* If the first arg is "", and the length is greater than zero,
9416 return -*(const unsigned char*)arg2. */
9417 if (p1
&& *p1
== '\0'
9418 && TREE_CODE (len
) == INTEGER_CST
9419 && tree_int_cst_sgn (len
) == 1)
9421 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9422 tree cst_uchar_ptr_node
9423 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9425 tree temp
= fold_convert_loc (loc
, integer_type_node
,
9426 build1 (INDIRECT_REF
, cst_uchar_node
,
9427 fold_convert_loc (loc
,
9430 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
9433 /* If len parameter is one, return an expression corresponding to
9434 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9435 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
9437 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9438 tree cst_uchar_ptr_node
9439 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9441 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
9442 build1 (INDIRECT_REF
, cst_uchar_node
,
9443 fold_convert_loc (loc
,
9446 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
9447 build1 (INDIRECT_REF
, cst_uchar_node
,
9448 fold_convert_loc (loc
,
9451 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9457 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9458 ARG. Return NULL_TREE if no simplification can be made. */
9461 fold_builtin_signbit (location_t loc
, tree arg
, tree type
)
9463 if (!validate_arg (arg
, REAL_TYPE
))
9466 /* If ARG is a compile-time constant, determine the result. */
9467 if (TREE_CODE (arg
) == REAL_CST
9468 && !TREE_OVERFLOW (arg
))
9472 c
= TREE_REAL_CST (arg
);
9473 return (REAL_VALUE_NEGATIVE (c
)
9474 ? build_one_cst (type
)
9475 : build_zero_cst (type
));
9478 /* If ARG is non-negative, the result is always zero. */
9479 if (tree_expr_nonnegative_p (arg
))
9480 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9482 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9483 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg
))))
9484 return fold_convert (type
,
9485 fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, arg
,
9486 build_real (TREE_TYPE (arg
), dconst0
)));
9491 /* Fold function call to builtin copysign, copysignf or copysignl with
9492 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9496 fold_builtin_copysign (location_t loc
, tree fndecl
,
9497 tree arg1
, tree arg2
, tree type
)
9501 if (!validate_arg (arg1
, REAL_TYPE
)
9502 || !validate_arg (arg2
, REAL_TYPE
))
9505 /* copysign(X,X) is X. */
9506 if (operand_equal_p (arg1
, arg2
, 0))
9507 return fold_convert_loc (loc
, type
, arg1
);
9509 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9510 if (TREE_CODE (arg1
) == REAL_CST
9511 && TREE_CODE (arg2
) == REAL_CST
9512 && !TREE_OVERFLOW (arg1
)
9513 && !TREE_OVERFLOW (arg2
))
9515 REAL_VALUE_TYPE c1
, c2
;
9517 c1
= TREE_REAL_CST (arg1
);
9518 c2
= TREE_REAL_CST (arg2
);
9519 /* c1.sign := c2.sign. */
9520 real_copysign (&c1
, &c2
);
9521 return build_real (type
, c1
);
9524 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9525 Remember to evaluate Y for side-effects. */
9526 if (tree_expr_nonnegative_p (arg2
))
9527 return omit_one_operand_loc (loc
, type
,
9528 fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
),
9531 /* Strip sign changing operations for the first argument. */
9532 tem
= fold_strip_sign_ops (arg1
);
9534 return build_call_expr_loc (loc
, fndecl
, 2, tem
, arg2
);
9539 /* Fold a call to builtin isascii with argument ARG. */
9542 fold_builtin_isascii (location_t loc
, tree arg
)
9544 if (!validate_arg (arg
, INTEGER_TYPE
))
9548 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9549 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9550 build_int_cst (integer_type_node
,
9551 ~ (unsigned HOST_WIDE_INT
) 0x7f));
9552 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
9553 arg
, integer_zero_node
);
9557 /* Fold a call to builtin toascii with argument ARG. */
9560 fold_builtin_toascii (location_t loc
, tree arg
)
9562 if (!validate_arg (arg
, INTEGER_TYPE
))
9565 /* Transform toascii(c) -> (c & 0x7f). */
9566 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
9567 build_int_cst (integer_type_node
, 0x7f));
9570 /* Fold a call to builtin isdigit with argument ARG. */
9573 fold_builtin_isdigit (location_t loc
, tree arg
)
9575 if (!validate_arg (arg
, INTEGER_TYPE
))
9579 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9580 /* According to the C standard, isdigit is unaffected by locale.
9581 However, it definitely is affected by the target character set. */
9582 unsigned HOST_WIDE_INT target_digit0
9583 = lang_hooks
.to_target_charset ('0');
9585 if (target_digit0
== 0)
9588 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
9589 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
9590 build_int_cst (unsigned_type_node
, target_digit0
));
9591 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
9592 build_int_cst (unsigned_type_node
, 9));
9596 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9599 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
9601 if (!validate_arg (arg
, REAL_TYPE
))
9604 arg
= fold_convert_loc (loc
, type
, arg
);
9605 if (TREE_CODE (arg
) == REAL_CST
)
9606 return fold_abs_const (arg
, type
);
9607 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9610 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9613 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
9615 if (!validate_arg (arg
, INTEGER_TYPE
))
9618 arg
= fold_convert_loc (loc
, type
, arg
);
9619 if (TREE_CODE (arg
) == INTEGER_CST
)
9620 return fold_abs_const (arg
, type
);
9621 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9624 /* Fold a fma operation with arguments ARG[012]. */
9627 fold_fma (location_t loc ATTRIBUTE_UNUSED
,
9628 tree type
, tree arg0
, tree arg1
, tree arg2
)
9630 if (TREE_CODE (arg0
) == REAL_CST
9631 && TREE_CODE (arg1
) == REAL_CST
9632 && TREE_CODE (arg2
) == REAL_CST
)
9633 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
9638 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9641 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
9643 if (validate_arg (arg0
, REAL_TYPE
)
9644 && validate_arg(arg1
, REAL_TYPE
)
9645 && validate_arg(arg2
, REAL_TYPE
))
9647 tree tem
= fold_fma (loc
, type
, arg0
, arg1
, arg2
);
9651 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9652 if (optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
9653 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
9658 /* Fold a call to builtin fmin or fmax. */
9661 fold_builtin_fmin_fmax (location_t loc
, tree arg0
, tree arg1
,
9662 tree type
, bool max
)
9664 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9666 /* Calculate the result when the argument is a constant. */
9667 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9672 /* If either argument is NaN, return the other one. Avoid the
9673 transformation if we get (and honor) a signalling NaN. Using
9674 omit_one_operand() ensures we create a non-lvalue. */
9675 if (TREE_CODE (arg0
) == REAL_CST
9676 && real_isnan (&TREE_REAL_CST (arg0
))
9677 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9678 || ! TREE_REAL_CST (arg0
).signalling
))
9679 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
9680 if (TREE_CODE (arg1
) == REAL_CST
9681 && real_isnan (&TREE_REAL_CST (arg1
))
9682 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
9683 || ! TREE_REAL_CST (arg1
).signalling
))
9684 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9686 /* Transform fmin/fmax(x,x) -> x. */
9687 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9688 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9690 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9691 functions to return the numeric arg if the other one is NaN.
9692 These tree codes don't honor that, so only transform if
9693 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9694 handled, so we don't have to worry about it either. */
9695 if (flag_finite_math_only
)
9696 return fold_build2_loc (loc
, (max
? MAX_EXPR
: MIN_EXPR
), type
,
9697 fold_convert_loc (loc
, type
, arg0
),
9698 fold_convert_loc (loc
, type
, arg1
));
9703 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9706 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
9708 if (validate_arg (arg
, COMPLEX_TYPE
)
9709 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
9711 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9715 tree new_arg
= builtin_save_expr (arg
);
9716 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
9717 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
9718 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
9725 /* Fold a call to builtin logb/ilogb. */
9728 fold_builtin_logb (location_t loc
, tree arg
, tree rettype
)
9730 if (! validate_arg (arg
, REAL_TYPE
))
9735 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9737 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9743 /* If arg is Inf or NaN and we're logb, return it. */
9744 if (TREE_CODE (rettype
) == REAL_TYPE
)
9745 return fold_convert_loc (loc
, rettype
, arg
);
9746 /* Fall through... */
9748 /* Zero may set errno and/or raise an exception for logb, also
9749 for ilogb we don't know FP_ILOGB0. */
9752 /* For normal numbers, proceed iff radix == 2. In GCC,
9753 normalized significands are in the range [0.5, 1.0). We
9754 want the exponent as if they were [1.0, 2.0) so get the
9755 exponent and subtract 1. */
9756 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9757 return fold_convert_loc (loc
, rettype
,
9758 build_int_cst (integer_type_node
,
9759 REAL_EXP (value
)-1));
9767 /* Fold a call to builtin significand, if radix == 2. */
9770 fold_builtin_significand (location_t loc
, tree arg
, tree rettype
)
9772 if (! validate_arg (arg
, REAL_TYPE
))
9777 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9779 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9786 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9787 return fold_convert_loc (loc
, rettype
, arg
);
9789 /* For normal numbers, proceed iff radix == 2. */
9790 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9792 REAL_VALUE_TYPE result
= *value
;
9793 /* In GCC, normalized significands are in the range [0.5,
9794 1.0). We want them to be [1.0, 2.0) so set the
9796 SET_REAL_EXP (&result
, 1);
9797 return build_real (rettype
, result
);
9806 /* Fold a call to builtin frexp, we can assume the base is 2. */
9809 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9811 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9816 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9819 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9821 /* Proceed if a valid pointer type was passed in. */
9822 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9824 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9830 /* For +-0, return (*exp = 0, +-0). */
9831 exp
= integer_zero_node
;
9836 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9837 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
9840 /* Since the frexp function always expects base 2, and in
9841 GCC normalized significands are already in the range
9842 [0.5, 1.0), we have exactly what frexp wants. */
9843 REAL_VALUE_TYPE frac_rvt
= *value
;
9844 SET_REAL_EXP (&frac_rvt
, 0);
9845 frac
= build_real (rettype
, frac_rvt
);
9846 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
9853 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9854 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
9855 TREE_SIDE_EFFECTS (arg1
) = 1;
9856 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
9862 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9863 then we can assume the base is two. If it's false, then we have to
9864 check the mode of the TYPE parameter in certain cases. */
9867 fold_builtin_load_exponent (location_t loc
, tree arg0
, tree arg1
,
9868 tree type
, bool ldexp
)
9870 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9875 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9876 if (real_zerop (arg0
) || integer_zerop (arg1
)
9877 || (TREE_CODE (arg0
) == REAL_CST
9878 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9879 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9881 /* If both arguments are constant, then try to evaluate it. */
9882 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9883 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9884 && host_integerp (arg1
, 0))
9886 /* Bound the maximum adjustment to twice the range of the
9887 mode's valid exponents. Use abs to ensure the range is
9888 positive as a sanity check. */
9889 const long max_exp_adj
= 2 *
9890 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9891 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9893 /* Get the user-requested adjustment. */
9894 const HOST_WIDE_INT req_exp_adj
= tree_low_cst (arg1
, 0);
9896 /* The requested adjustment must be inside this range. This
9897 is a preliminary cap to avoid things like overflow, we
9898 may still fail to compute the result for other reasons. */
9899 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9901 REAL_VALUE_TYPE initial_result
;
9903 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9905 /* Ensure we didn't overflow. */
9906 if (! real_isinf (&initial_result
))
9908 const REAL_VALUE_TYPE trunc_result
9909 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9911 /* Only proceed if the target mode can hold the
9913 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9914 return build_real (type
, trunc_result
);
9923 /* Fold a call to builtin modf. */
9926 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9928 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9933 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9936 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9938 /* Proceed if a valid pointer type was passed in. */
9939 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9941 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9942 REAL_VALUE_TYPE trunc
, frac
;
9948 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9949 trunc
= frac
= *value
;
9952 /* For +-Inf, return (*arg1 = arg0, +-0). */
9954 frac
.sign
= value
->sign
;
9958 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9959 real_trunc (&trunc
, VOIDmode
, value
);
9960 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9961 /* If the original number was negative and already
9962 integral, then the fractional part is -0.0. */
9963 if (value
->sign
&& frac
.cl
== rvc_zero
)
9964 frac
.sign
= value
->sign
;
9968 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9969 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
9970 build_real (rettype
, trunc
));
9971 TREE_SIDE_EFFECTS (arg1
) = 1;
9972 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
9973 build_real (rettype
, frac
));
9979 /* Given a location LOC, an interclass builtin function decl FNDECL
9980 and its single argument ARG, return an folded expression computing
9981 the same, or NULL_TREE if we either couldn't or didn't want to fold
9982 (the latter happen if there's an RTL instruction available). */
9985 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
9987 enum machine_mode mode
;
9989 if (!validate_arg (arg
, REAL_TYPE
))
9992 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9995 mode
= TYPE_MODE (TREE_TYPE (arg
));
9997 /* If there is no optab, try generic code. */
9998 switch (DECL_FUNCTION_CODE (fndecl
))
10002 CASE_FLT_FN (BUILT_IN_ISINF
):
10004 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
10005 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
10006 tree
const type
= TREE_TYPE (arg
);
10010 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
10011 real_from_string (&r
, buf
);
10012 result
= build_call_expr (isgr_fn
, 2,
10013 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
10014 build_real (type
, r
));
10017 CASE_FLT_FN (BUILT_IN_FINITE
):
10018 case BUILT_IN_ISFINITE
:
10020 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
10021 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
10022 tree
const type
= TREE_TYPE (arg
);
10026 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
10027 real_from_string (&r
, buf
);
10028 result
= build_call_expr (isle_fn
, 2,
10029 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
10030 build_real (type
, r
));
10031 /*result = fold_build2_loc (loc, UNGT_EXPR,
10032 TREE_TYPE (TREE_TYPE (fndecl)),
10033 fold_build1_loc (loc, ABS_EXPR, type, arg),
10034 build_real (type, r));
10035 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
10036 TREE_TYPE (TREE_TYPE (fndecl)),
10040 case BUILT_IN_ISNORMAL
:
10042 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10043 islessequal(fabs(x),DBL_MAX). */
10044 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
10045 tree
const isge_fn
= builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
10046 tree
const type
= TREE_TYPE (arg
);
10047 REAL_VALUE_TYPE rmax
, rmin
;
10050 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
10051 real_from_string (&rmax
, buf
);
10052 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
10053 real_from_string (&rmin
, buf
);
10054 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
10055 result
= build_call_expr (isle_fn
, 2, arg
,
10056 build_real (type
, rmax
));
10057 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
10058 build_call_expr (isge_fn
, 2, arg
,
10059 build_real (type
, rmin
)));
10069 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10070 ARG is the argument for the call. */
10073 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
10075 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10078 if (!validate_arg (arg
, REAL_TYPE
))
10081 switch (builtin_index
)
10083 case BUILT_IN_ISINF
:
10084 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
10085 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
10087 if (TREE_CODE (arg
) == REAL_CST
)
10089 r
= TREE_REAL_CST (arg
);
10090 if (real_isinf (&r
))
10091 return real_compare (GT_EXPR
, &r
, &dconst0
)
10092 ? integer_one_node
: integer_minus_one_node
;
10094 return integer_zero_node
;
10099 case BUILT_IN_ISINF_SIGN
:
10101 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10102 /* In a boolean context, GCC will fold the inner COND_EXPR to
10103 1. So e.g. "if (isinf_sign(x))" would be folded to just
10104 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10105 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
10106 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
10107 tree tmp
= NULL_TREE
;
10109 arg
= builtin_save_expr (arg
);
10111 if (signbit_fn
&& isinf_fn
)
10113 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
10114 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
10116 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
10117 signbit_call
, integer_zero_node
);
10118 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
10119 isinf_call
, integer_zero_node
);
10121 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
10122 integer_minus_one_node
, integer_one_node
);
10123 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
10125 integer_zero_node
);
10131 case BUILT_IN_ISFINITE
:
10132 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
)))
10133 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
10134 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
10136 if (TREE_CODE (arg
) == REAL_CST
)
10138 r
= TREE_REAL_CST (arg
);
10139 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
10144 case BUILT_IN_ISNAN
:
10145 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
))))
10146 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
10148 if (TREE_CODE (arg
) == REAL_CST
)
10150 r
= TREE_REAL_CST (arg
);
10151 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
10154 arg
= builtin_save_expr (arg
);
10155 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
10158 gcc_unreachable ();
10162 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10163 This builtin will generate code to return the appropriate floating
10164 point classification depending on the value of the floating point
10165 number passed in. The possible return values must be supplied as
10166 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10167 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10168 one floating point argument which is "type generic". */
10171 fold_builtin_fpclassify (location_t loc
, tree exp
)
10173 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
10174 arg
, type
, res
, tmp
;
10175 enum machine_mode mode
;
10179 /* Verify the required arguments in the original call. */
10180 if (!validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
,
10181 INTEGER_TYPE
, INTEGER_TYPE
,
10182 INTEGER_TYPE
, REAL_TYPE
, VOID_TYPE
))
10185 fp_nan
= CALL_EXPR_ARG (exp
, 0);
10186 fp_infinite
= CALL_EXPR_ARG (exp
, 1);
10187 fp_normal
= CALL_EXPR_ARG (exp
, 2);
10188 fp_subnormal
= CALL_EXPR_ARG (exp
, 3);
10189 fp_zero
= CALL_EXPR_ARG (exp
, 4);
10190 arg
= CALL_EXPR_ARG (exp
, 5);
10191 type
= TREE_TYPE (arg
);
10192 mode
= TYPE_MODE (type
);
10193 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
10195 /* fpclassify(x) ->
10196 isnan(x) ? FP_NAN :
10197 (fabs(x) == Inf ? FP_INFINITE :
10198 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10199 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10201 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
10202 build_real (type
, dconst0
));
10203 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
10204 tmp
, fp_zero
, fp_subnormal
);
10206 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
10207 real_from_string (&r
, buf
);
10208 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
10209 arg
, build_real (type
, r
));
10210 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
10212 if (HONOR_INFINITIES (mode
))
10215 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
10216 build_real (type
, r
));
10217 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
10221 if (HONOR_NANS (mode
))
10223 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
10224 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
10230 /* Fold a call to an unordered comparison function such as
10231 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10232 being called and ARG0 and ARG1 are the arguments for the call.
10233 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10234 the opposite of the desired result. UNORDERED_CODE is used
10235 for modes that can hold NaNs and ORDERED_CODE is used for
10239 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
10240 enum tree_code unordered_code
,
10241 enum tree_code ordered_code
)
10243 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10244 enum tree_code code
;
10246 enum tree_code code0
, code1
;
10247 tree cmp_type
= NULL_TREE
;
10249 type0
= TREE_TYPE (arg0
);
10250 type1
= TREE_TYPE (arg1
);
10252 code0
= TREE_CODE (type0
);
10253 code1
= TREE_CODE (type1
);
10255 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
10256 /* Choose the wider of two real types. */
10257 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
10259 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
10261 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
10264 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
10265 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
10267 if (unordered_code
== UNORDERED_EXPR
)
10269 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
10270 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
10271 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
10274 code
= HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))) ? unordered_code
10276 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
10277 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
10280 /* Fold a call to built-in function FNDECL with 0 arguments.
10281 IGNORE is true if the result of the function call is ignored. This
10282 function returns NULL_TREE if no simplification was possible. */
10285 fold_builtin_0 (location_t loc
, tree fndecl
, bool ignore ATTRIBUTE_UNUSED
)
10287 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10288 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10291 CASE_FLT_FN (BUILT_IN_INF
):
10292 case BUILT_IN_INFD32
:
10293 case BUILT_IN_INFD64
:
10294 case BUILT_IN_INFD128
:
10295 return fold_builtin_inf (loc
, type
, true);
10297 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
10298 return fold_builtin_inf (loc
, type
, false);
10300 case BUILT_IN_CLASSIFY_TYPE
:
10301 return fold_builtin_classify_type (NULL_TREE
);
10309 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10310 IGNORE is true if the result of the function call is ignored. This
10311 function returns NULL_TREE if no simplification was possible. */
10314 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
, bool ignore
)
10316 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10317 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10320 case BUILT_IN_CONSTANT_P
:
10322 tree val
= fold_builtin_constant_p (arg0
);
10324 /* Gimplification will pull the CALL_EXPR for the builtin out of
10325 an if condition. When not optimizing, we'll not CSE it back.
10326 To avoid link error types of regressions, return false now. */
10327 if (!val
&& !optimize
)
10328 val
= integer_zero_node
;
10333 case BUILT_IN_CLASSIFY_TYPE
:
10334 return fold_builtin_classify_type (arg0
);
10336 case BUILT_IN_STRLEN
:
10337 return fold_builtin_strlen (loc
, type
, arg0
);
10339 CASE_FLT_FN (BUILT_IN_FABS
):
10340 return fold_builtin_fabs (loc
, arg0
, type
);
10343 case BUILT_IN_LABS
:
10344 case BUILT_IN_LLABS
:
10345 case BUILT_IN_IMAXABS
:
10346 return fold_builtin_abs (loc
, arg0
, type
);
10348 CASE_FLT_FN (BUILT_IN_CONJ
):
10349 if (validate_arg (arg0
, COMPLEX_TYPE
)
10350 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10351 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
10354 CASE_FLT_FN (BUILT_IN_CREAL
):
10355 if (validate_arg (arg0
, COMPLEX_TYPE
)
10356 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10357 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));;
10360 CASE_FLT_FN (BUILT_IN_CIMAG
):
10361 if (validate_arg (arg0
, COMPLEX_TYPE
)
10362 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10363 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
10366 CASE_FLT_FN (BUILT_IN_CCOS
):
10367 return fold_builtin_ccos(loc
, arg0
, type
, fndecl
, /*hyper=*/ false);
10369 CASE_FLT_FN (BUILT_IN_CCOSH
):
10370 return fold_builtin_ccos(loc
, arg0
, type
, fndecl
, /*hyper=*/ true);
10372 CASE_FLT_FN (BUILT_IN_CPROJ
):
10373 return fold_builtin_cproj(loc
, arg0
, type
);
10375 CASE_FLT_FN (BUILT_IN_CSIN
):
10376 if (validate_arg (arg0
, COMPLEX_TYPE
)
10377 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10378 return do_mpc_arg1 (arg0
, type
, mpc_sin
);
10381 CASE_FLT_FN (BUILT_IN_CSINH
):
10382 if (validate_arg (arg0
, COMPLEX_TYPE
)
10383 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10384 return do_mpc_arg1 (arg0
, type
, mpc_sinh
);
10387 CASE_FLT_FN (BUILT_IN_CTAN
):
10388 if (validate_arg (arg0
, COMPLEX_TYPE
)
10389 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10390 return do_mpc_arg1 (arg0
, type
, mpc_tan
);
10393 CASE_FLT_FN (BUILT_IN_CTANH
):
10394 if (validate_arg (arg0
, COMPLEX_TYPE
)
10395 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10396 return do_mpc_arg1 (arg0
, type
, mpc_tanh
);
10399 CASE_FLT_FN (BUILT_IN_CLOG
):
10400 if (validate_arg (arg0
, COMPLEX_TYPE
)
10401 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10402 return do_mpc_arg1 (arg0
, type
, mpc_log
);
10405 CASE_FLT_FN (BUILT_IN_CSQRT
):
10406 if (validate_arg (arg0
, COMPLEX_TYPE
)
10407 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10408 return do_mpc_arg1 (arg0
, type
, mpc_sqrt
);
10411 CASE_FLT_FN (BUILT_IN_CASIN
):
10412 if (validate_arg (arg0
, COMPLEX_TYPE
)
10413 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10414 return do_mpc_arg1 (arg0
, type
, mpc_asin
);
10417 CASE_FLT_FN (BUILT_IN_CACOS
):
10418 if (validate_arg (arg0
, COMPLEX_TYPE
)
10419 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10420 return do_mpc_arg1 (arg0
, type
, mpc_acos
);
10423 CASE_FLT_FN (BUILT_IN_CATAN
):
10424 if (validate_arg (arg0
, COMPLEX_TYPE
)
10425 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10426 return do_mpc_arg1 (arg0
, type
, mpc_atan
);
10429 CASE_FLT_FN (BUILT_IN_CASINH
):
10430 if (validate_arg (arg0
, COMPLEX_TYPE
)
10431 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10432 return do_mpc_arg1 (arg0
, type
, mpc_asinh
);
10435 CASE_FLT_FN (BUILT_IN_CACOSH
):
10436 if (validate_arg (arg0
, COMPLEX_TYPE
)
10437 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10438 return do_mpc_arg1 (arg0
, type
, mpc_acosh
);
10441 CASE_FLT_FN (BUILT_IN_CATANH
):
10442 if (validate_arg (arg0
, COMPLEX_TYPE
)
10443 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10444 return do_mpc_arg1 (arg0
, type
, mpc_atanh
);
10447 CASE_FLT_FN (BUILT_IN_CABS
):
10448 return fold_builtin_cabs (loc
, arg0
, type
, fndecl
);
10450 CASE_FLT_FN (BUILT_IN_CARG
):
10451 return fold_builtin_carg (loc
, arg0
, type
);
10453 CASE_FLT_FN (BUILT_IN_SQRT
):
10454 return fold_builtin_sqrt (loc
, arg0
, type
);
10456 CASE_FLT_FN (BUILT_IN_CBRT
):
10457 return fold_builtin_cbrt (loc
, arg0
, type
);
10459 CASE_FLT_FN (BUILT_IN_ASIN
):
10460 if (validate_arg (arg0
, REAL_TYPE
))
10461 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
10462 &dconstm1
, &dconst1
, true);
10465 CASE_FLT_FN (BUILT_IN_ACOS
):
10466 if (validate_arg (arg0
, REAL_TYPE
))
10467 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
10468 &dconstm1
, &dconst1
, true);
10471 CASE_FLT_FN (BUILT_IN_ATAN
):
10472 if (validate_arg (arg0
, REAL_TYPE
))
10473 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
10476 CASE_FLT_FN (BUILT_IN_ASINH
):
10477 if (validate_arg (arg0
, REAL_TYPE
))
10478 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
10481 CASE_FLT_FN (BUILT_IN_ACOSH
):
10482 if (validate_arg (arg0
, REAL_TYPE
))
10483 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
10484 &dconst1
, NULL
, true);
10487 CASE_FLT_FN (BUILT_IN_ATANH
):
10488 if (validate_arg (arg0
, REAL_TYPE
))
10489 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
10490 &dconstm1
, &dconst1
, false);
10493 CASE_FLT_FN (BUILT_IN_SIN
):
10494 if (validate_arg (arg0
, REAL_TYPE
))
10495 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
10498 CASE_FLT_FN (BUILT_IN_COS
):
10499 return fold_builtin_cos (loc
, arg0
, type
, fndecl
);
10501 CASE_FLT_FN (BUILT_IN_TAN
):
10502 return fold_builtin_tan (arg0
, type
);
10504 CASE_FLT_FN (BUILT_IN_CEXP
):
10505 return fold_builtin_cexp (loc
, arg0
, type
);
10507 CASE_FLT_FN (BUILT_IN_CEXPI
):
10508 if (validate_arg (arg0
, REAL_TYPE
))
10509 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
10512 CASE_FLT_FN (BUILT_IN_SINH
):
10513 if (validate_arg (arg0
, REAL_TYPE
))
10514 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
10517 CASE_FLT_FN (BUILT_IN_COSH
):
10518 return fold_builtin_cosh (loc
, arg0
, type
, fndecl
);
10520 CASE_FLT_FN (BUILT_IN_TANH
):
10521 if (validate_arg (arg0
, REAL_TYPE
))
10522 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
10525 CASE_FLT_FN (BUILT_IN_ERF
):
10526 if (validate_arg (arg0
, REAL_TYPE
))
10527 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
10530 CASE_FLT_FN (BUILT_IN_ERFC
):
10531 if (validate_arg (arg0
, REAL_TYPE
))
10532 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
10535 CASE_FLT_FN (BUILT_IN_TGAMMA
):
10536 if (validate_arg (arg0
, REAL_TYPE
))
10537 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
10540 CASE_FLT_FN (BUILT_IN_EXP
):
10541 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp
);
10543 CASE_FLT_FN (BUILT_IN_EXP2
):
10544 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp2
);
10546 CASE_FLT_FN (BUILT_IN_EXP10
):
10547 CASE_FLT_FN (BUILT_IN_POW10
):
10548 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp10
);
10550 CASE_FLT_FN (BUILT_IN_EXPM1
):
10551 if (validate_arg (arg0
, REAL_TYPE
))
10552 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
10555 CASE_FLT_FN (BUILT_IN_LOG
):
10556 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log
);
10558 CASE_FLT_FN (BUILT_IN_LOG2
):
10559 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log2
);
10561 CASE_FLT_FN (BUILT_IN_LOG10
):
10562 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log10
);
10564 CASE_FLT_FN (BUILT_IN_LOG1P
):
10565 if (validate_arg (arg0
, REAL_TYPE
))
10566 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
10567 &dconstm1
, NULL
, false);
10570 CASE_FLT_FN (BUILT_IN_J0
):
10571 if (validate_arg (arg0
, REAL_TYPE
))
10572 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
10576 CASE_FLT_FN (BUILT_IN_J1
):
10577 if (validate_arg (arg0
, REAL_TYPE
))
10578 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
10582 CASE_FLT_FN (BUILT_IN_Y0
):
10583 if (validate_arg (arg0
, REAL_TYPE
))
10584 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
10585 &dconst0
, NULL
, false);
10588 CASE_FLT_FN (BUILT_IN_Y1
):
10589 if (validate_arg (arg0
, REAL_TYPE
))
10590 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
10591 &dconst0
, NULL
, false);
10594 CASE_FLT_FN (BUILT_IN_NAN
):
10595 case BUILT_IN_NAND32
:
10596 case BUILT_IN_NAND64
:
10597 case BUILT_IN_NAND128
:
10598 return fold_builtin_nan (arg0
, type
, true);
10600 CASE_FLT_FN (BUILT_IN_NANS
):
10601 return fold_builtin_nan (arg0
, type
, false);
10603 CASE_FLT_FN (BUILT_IN_FLOOR
):
10604 return fold_builtin_floor (loc
, fndecl
, arg0
);
10606 CASE_FLT_FN (BUILT_IN_CEIL
):
10607 return fold_builtin_ceil (loc
, fndecl
, arg0
);
10609 CASE_FLT_FN (BUILT_IN_TRUNC
):
10610 return fold_builtin_trunc (loc
, fndecl
, arg0
);
10612 CASE_FLT_FN (BUILT_IN_ROUND
):
10613 return fold_builtin_round (loc
, fndecl
, arg0
);
10615 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
10616 CASE_FLT_FN (BUILT_IN_RINT
):
10617 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg0
);
10619 CASE_FLT_FN (BUILT_IN_ICEIL
):
10620 CASE_FLT_FN (BUILT_IN_LCEIL
):
10621 CASE_FLT_FN (BUILT_IN_LLCEIL
):
10622 CASE_FLT_FN (BUILT_IN_LFLOOR
):
10623 CASE_FLT_FN (BUILT_IN_IFLOOR
):
10624 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
10625 CASE_FLT_FN (BUILT_IN_IROUND
):
10626 CASE_FLT_FN (BUILT_IN_LROUND
):
10627 CASE_FLT_FN (BUILT_IN_LLROUND
):
10628 return fold_builtin_int_roundingfn (loc
, fndecl
, arg0
);
10630 CASE_FLT_FN (BUILT_IN_IRINT
):
10631 CASE_FLT_FN (BUILT_IN_LRINT
):
10632 CASE_FLT_FN (BUILT_IN_LLRINT
):
10633 return fold_fixed_mathfn (loc
, fndecl
, arg0
);
10635 case BUILT_IN_BSWAP16
:
10636 case BUILT_IN_BSWAP32
:
10637 case BUILT_IN_BSWAP64
:
10638 return fold_builtin_bswap (fndecl
, arg0
);
10640 CASE_INT_FN (BUILT_IN_FFS
):
10641 CASE_INT_FN (BUILT_IN_CLZ
):
10642 CASE_INT_FN (BUILT_IN_CTZ
):
10643 CASE_INT_FN (BUILT_IN_CLRSB
):
10644 CASE_INT_FN (BUILT_IN_POPCOUNT
):
10645 CASE_INT_FN (BUILT_IN_PARITY
):
10646 return fold_builtin_bitop (fndecl
, arg0
);
10648 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
10649 return fold_builtin_signbit (loc
, arg0
, type
);
10651 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
10652 return fold_builtin_significand (loc
, arg0
, type
);
10654 CASE_FLT_FN (BUILT_IN_ILOGB
):
10655 CASE_FLT_FN (BUILT_IN_LOGB
):
10656 return fold_builtin_logb (loc
, arg0
, type
);
10658 case BUILT_IN_ISASCII
:
10659 return fold_builtin_isascii (loc
, arg0
);
10661 case BUILT_IN_TOASCII
:
10662 return fold_builtin_toascii (loc
, arg0
);
10664 case BUILT_IN_ISDIGIT
:
10665 return fold_builtin_isdigit (loc
, arg0
);
10667 CASE_FLT_FN (BUILT_IN_FINITE
):
10668 case BUILT_IN_FINITED32
:
10669 case BUILT_IN_FINITED64
:
10670 case BUILT_IN_FINITED128
:
10671 case BUILT_IN_ISFINITE
:
10673 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
10676 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10679 CASE_FLT_FN (BUILT_IN_ISINF
):
10680 case BUILT_IN_ISINFD32
:
10681 case BUILT_IN_ISINFD64
:
10682 case BUILT_IN_ISINFD128
:
10684 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
10687 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10690 case BUILT_IN_ISNORMAL
:
10691 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10693 case BUILT_IN_ISINF_SIGN
:
10694 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
10696 CASE_FLT_FN (BUILT_IN_ISNAN
):
10697 case BUILT_IN_ISNAND32
:
10698 case BUILT_IN_ISNAND64
:
10699 case BUILT_IN_ISNAND128
:
10700 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
10702 case BUILT_IN_PRINTF
:
10703 case BUILT_IN_PRINTF_UNLOCKED
:
10704 case BUILT_IN_VPRINTF
:
10705 return fold_builtin_printf (loc
, fndecl
, arg0
, NULL_TREE
, ignore
, fcode
);
10707 case BUILT_IN_FREE
:
10708 if (integer_zerop (arg0
))
10709 return build_empty_stmt (loc
);
10720 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10721 IGNORE is true if the result of the function call is ignored. This
10722 function returns NULL_TREE if no simplification was possible. */
10725 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, bool ignore
)
10727 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10728 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10732 CASE_FLT_FN (BUILT_IN_JN
):
10733 if (validate_arg (arg0
, INTEGER_TYPE
)
10734 && validate_arg (arg1
, REAL_TYPE
))
10735 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10738 CASE_FLT_FN (BUILT_IN_YN
):
10739 if (validate_arg (arg0
, INTEGER_TYPE
)
10740 && validate_arg (arg1
, REAL_TYPE
))
10741 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10745 CASE_FLT_FN (BUILT_IN_DREM
):
10746 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10747 if (validate_arg (arg0
, REAL_TYPE
)
10748 && validate_arg(arg1
, REAL_TYPE
))
10749 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10752 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10753 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10754 if (validate_arg (arg0
, REAL_TYPE
)
10755 && validate_arg(arg1
, POINTER_TYPE
))
10756 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10759 CASE_FLT_FN (BUILT_IN_ATAN2
):
10760 if (validate_arg (arg0
, REAL_TYPE
)
10761 && validate_arg(arg1
, REAL_TYPE
))
10762 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10765 CASE_FLT_FN (BUILT_IN_FDIM
):
10766 if (validate_arg (arg0
, REAL_TYPE
)
10767 && validate_arg(arg1
, REAL_TYPE
))
10768 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10771 CASE_FLT_FN (BUILT_IN_HYPOT
):
10772 return fold_builtin_hypot (loc
, fndecl
, arg0
, arg1
, type
);
10774 CASE_FLT_FN (BUILT_IN_CPOW
):
10775 if (validate_arg (arg0
, COMPLEX_TYPE
)
10776 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10777 && validate_arg (arg1
, COMPLEX_TYPE
)
10778 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
)
10779 return do_mpc_arg2 (arg0
, arg1
, type
, /*do_nonfinite=*/ 0, mpc_pow
);
10782 CASE_FLT_FN (BUILT_IN_LDEXP
):
10783 return fold_builtin_load_exponent (loc
, arg0
, arg1
, type
, /*ldexp=*/true);
10784 CASE_FLT_FN (BUILT_IN_SCALBN
):
10785 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10786 return fold_builtin_load_exponent (loc
, arg0
, arg1
,
10787 type
, /*ldexp=*/false);
10789 CASE_FLT_FN (BUILT_IN_FREXP
):
10790 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
10792 CASE_FLT_FN (BUILT_IN_MODF
):
10793 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
10795 case BUILT_IN_BZERO
:
10796 return fold_builtin_bzero (loc
, arg0
, arg1
, ignore
);
10798 case BUILT_IN_FPUTS
:
10799 return fold_builtin_fputs (loc
, arg0
, arg1
, ignore
, false, NULL_TREE
);
10801 case BUILT_IN_FPUTS_UNLOCKED
:
10802 return fold_builtin_fputs (loc
, arg0
, arg1
, ignore
, true, NULL_TREE
);
10804 case BUILT_IN_STRSTR
:
10805 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
10807 case BUILT_IN_STRCAT
:
10808 return fold_builtin_strcat (loc
, arg0
, arg1
);
10810 case BUILT_IN_STRSPN
:
10811 return fold_builtin_strspn (loc
, arg0
, arg1
);
10813 case BUILT_IN_STRCSPN
:
10814 return fold_builtin_strcspn (loc
, arg0
, arg1
);
10816 case BUILT_IN_STRCHR
:
10817 case BUILT_IN_INDEX
:
10818 return fold_builtin_strchr (loc
, arg0
, arg1
, type
);
10820 case BUILT_IN_STRRCHR
:
10821 case BUILT_IN_RINDEX
:
10822 return fold_builtin_strrchr (loc
, arg0
, arg1
, type
);
10824 case BUILT_IN_STRCPY
:
10825 return fold_builtin_strcpy (loc
, fndecl
, arg0
, arg1
, NULL_TREE
);
10827 case BUILT_IN_STPCPY
:
10830 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
10834 return build_call_expr_loc (loc
, fn
, 2, arg0
, arg1
);
10837 return fold_builtin_stpcpy (loc
, fndecl
, arg0
, arg1
);
10840 case BUILT_IN_STRCMP
:
10841 return fold_builtin_strcmp (loc
, arg0
, arg1
);
10843 case BUILT_IN_STRPBRK
:
10844 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
10846 case BUILT_IN_EXPECT
:
10847 return fold_builtin_expect (loc
, arg0
, arg1
);
10849 CASE_FLT_FN (BUILT_IN_POW
):
10850 return fold_builtin_pow (loc
, fndecl
, arg0
, arg1
, type
);
10852 CASE_FLT_FN (BUILT_IN_POWI
):
10853 return fold_builtin_powi (loc
, fndecl
, arg0
, arg1
, type
);
10855 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10856 return fold_builtin_copysign (loc
, fndecl
, arg0
, arg1
, type
);
10858 CASE_FLT_FN (BUILT_IN_FMIN
):
10859 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/false);
10861 CASE_FLT_FN (BUILT_IN_FMAX
):
10862 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/true);
10864 case BUILT_IN_ISGREATER
:
10865 return fold_builtin_unordered_cmp (loc
, fndecl
,
10866 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10867 case BUILT_IN_ISGREATEREQUAL
:
10868 return fold_builtin_unordered_cmp (loc
, fndecl
,
10869 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10870 case BUILT_IN_ISLESS
:
10871 return fold_builtin_unordered_cmp (loc
, fndecl
,
10872 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10873 case BUILT_IN_ISLESSEQUAL
:
10874 return fold_builtin_unordered_cmp (loc
, fndecl
,
10875 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10876 case BUILT_IN_ISLESSGREATER
:
10877 return fold_builtin_unordered_cmp (loc
, fndecl
,
10878 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10879 case BUILT_IN_ISUNORDERED
:
10880 return fold_builtin_unordered_cmp (loc
, fndecl
,
10881 arg0
, arg1
, UNORDERED_EXPR
,
10884 /* We do the folding for va_start in the expander. */
10885 case BUILT_IN_VA_START
:
10888 case BUILT_IN_SPRINTF
:
10889 return fold_builtin_sprintf (loc
, arg0
, arg1
, NULL_TREE
, ignore
);
10891 case BUILT_IN_OBJECT_SIZE
:
10892 return fold_builtin_object_size (arg0
, arg1
);
10894 case BUILT_IN_PRINTF
:
10895 case BUILT_IN_PRINTF_UNLOCKED
:
10896 case BUILT_IN_VPRINTF
:
10897 return fold_builtin_printf (loc
, fndecl
, arg0
, arg1
, ignore
, fcode
);
10899 case BUILT_IN_PRINTF_CHK
:
10900 case BUILT_IN_VPRINTF_CHK
:
10901 if (!validate_arg (arg0
, INTEGER_TYPE
)
10902 || TREE_SIDE_EFFECTS (arg0
))
10905 return fold_builtin_printf (loc
, fndecl
,
10906 arg1
, NULL_TREE
, ignore
, fcode
);
10909 case BUILT_IN_FPRINTF
:
10910 case BUILT_IN_FPRINTF_UNLOCKED
:
10911 case BUILT_IN_VFPRINTF
:
10912 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg1
, NULL_TREE
,
10915 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
10916 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
10918 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
10919 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
10927 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10928 and ARG2. IGNORE is true if the result of the function call is ignored.
10929 This function returns NULL_TREE if no simplification was possible. */
10932 fold_builtin_3 (location_t loc
, tree fndecl
,
10933 tree arg0
, tree arg1
, tree arg2
, bool ignore
)
10935 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10936 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10940 CASE_FLT_FN (BUILT_IN_SINCOS
):
10941 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
10943 CASE_FLT_FN (BUILT_IN_FMA
):
10944 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
10947 CASE_FLT_FN (BUILT_IN_REMQUO
):
10948 if (validate_arg (arg0
, REAL_TYPE
)
10949 && validate_arg(arg1
, REAL_TYPE
)
10950 && validate_arg(arg2
, POINTER_TYPE
))
10951 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10954 case BUILT_IN_MEMSET
:
10955 return fold_builtin_memset (loc
, arg0
, arg1
, arg2
, type
, ignore
);
10957 case BUILT_IN_BCOPY
:
10958 return fold_builtin_memory_op (loc
, arg1
, arg0
, arg2
,
10959 void_type_node
, true, /*endp=*/3);
10961 case BUILT_IN_MEMCPY
:
10962 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10963 type
, ignore
, /*endp=*/0);
10965 case BUILT_IN_MEMPCPY
:
10966 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10967 type
, ignore
, /*endp=*/1);
10969 case BUILT_IN_MEMMOVE
:
10970 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10971 type
, ignore
, /*endp=*/3);
10973 case BUILT_IN_STRNCAT
:
10974 return fold_builtin_strncat (loc
, arg0
, arg1
, arg2
);
10976 case BUILT_IN_STRNCPY
:
10977 return fold_builtin_strncpy (loc
, fndecl
, arg0
, arg1
, arg2
, NULL_TREE
);
10979 case BUILT_IN_STRNCMP
:
10980 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
10982 case BUILT_IN_MEMCHR
:
10983 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
10985 case BUILT_IN_BCMP
:
10986 case BUILT_IN_MEMCMP
:
10987 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
10989 case BUILT_IN_SPRINTF
:
10990 return fold_builtin_sprintf (loc
, arg0
, arg1
, arg2
, ignore
);
10992 case BUILT_IN_SNPRINTF
:
10993 return fold_builtin_snprintf (loc
, arg0
, arg1
, arg2
, NULL_TREE
, ignore
);
10995 case BUILT_IN_STRCPY_CHK
:
10996 case BUILT_IN_STPCPY_CHK
:
10997 return fold_builtin_stxcpy_chk (loc
, fndecl
, arg0
, arg1
, arg2
, NULL_TREE
,
11000 case BUILT_IN_STRCAT_CHK
:
11001 return fold_builtin_strcat_chk (loc
, fndecl
, arg0
, arg1
, arg2
);
11003 case BUILT_IN_PRINTF_CHK
:
11004 case BUILT_IN_VPRINTF_CHK
:
11005 if (!validate_arg (arg0
, INTEGER_TYPE
)
11006 || TREE_SIDE_EFFECTS (arg0
))
11009 return fold_builtin_printf (loc
, fndecl
, arg1
, arg2
, ignore
, fcode
);
11012 case BUILT_IN_FPRINTF
:
11013 case BUILT_IN_FPRINTF_UNLOCKED
:
11014 case BUILT_IN_VFPRINTF
:
11015 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg1
, arg2
,
11018 case BUILT_IN_FPRINTF_CHK
:
11019 case BUILT_IN_VFPRINTF_CHK
:
11020 if (!validate_arg (arg1
, INTEGER_TYPE
)
11021 || TREE_SIDE_EFFECTS (arg1
))
11024 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg2
, NULL_TREE
,
11033 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
11034 ARG2, and ARG3. IGNORE is true if the result of the function call is
11035 ignored. This function returns NULL_TREE if no simplification was
11039 fold_builtin_4 (location_t loc
, tree fndecl
,
11040 tree arg0
, tree arg1
, tree arg2
, tree arg3
, bool ignore
)
11042 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11046 case BUILT_IN_MEMCPY_CHK
:
11047 case BUILT_IN_MEMPCPY_CHK
:
11048 case BUILT_IN_MEMMOVE_CHK
:
11049 case BUILT_IN_MEMSET_CHK
:
11050 return fold_builtin_memory_chk (loc
, fndecl
, arg0
, arg1
, arg2
, arg3
,
11052 DECL_FUNCTION_CODE (fndecl
));
11054 case BUILT_IN_STRNCPY_CHK
:
11055 case BUILT_IN_STPNCPY_CHK
:
11056 return fold_builtin_stxncpy_chk (loc
, arg0
, arg1
, arg2
, arg3
, NULL_TREE
,
11059 case BUILT_IN_STRNCAT_CHK
:
11060 return fold_builtin_strncat_chk (loc
, fndecl
, arg0
, arg1
, arg2
, arg3
);
11062 case BUILT_IN_SNPRINTF
:
11063 return fold_builtin_snprintf (loc
, arg0
, arg1
, arg2
, arg3
, ignore
);
11065 case BUILT_IN_FPRINTF_CHK
:
11066 case BUILT_IN_VFPRINTF_CHK
:
11067 if (!validate_arg (arg1
, INTEGER_TYPE
)
11068 || TREE_SIDE_EFFECTS (arg1
))
11071 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg2
, arg3
,
11081 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11082 arguments, where NARGS <= 4. IGNORE is true if the result of the
11083 function call is ignored. This function returns NULL_TREE if no
11084 simplification was possible. Note that this only folds builtins with
11085 fixed argument patterns. Foldings that do varargs-to-varargs
11086 transformations, or that match calls with more than 4 arguments,
11087 need to be handled with fold_builtin_varargs instead. */
11089 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11092 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool ignore
)
11094 tree ret
= NULL_TREE
;
11099 ret
= fold_builtin_0 (loc
, fndecl
, ignore
);
11102 ret
= fold_builtin_1 (loc
, fndecl
, args
[0], ignore
);
11105 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1], ignore
);
11108 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2], ignore
);
11111 ret
= fold_builtin_4 (loc
, fndecl
, args
[0], args
[1], args
[2], args
[3],
11119 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11120 SET_EXPR_LOCATION (ret
, loc
);
11121 TREE_NO_WARNING (ret
) = 1;
11127 /* Builtins with folding operations that operate on "..." arguments
11128 need special handling; we need to store the arguments in a convenient
11129 data structure before attempting any folding. Fortunately there are
11130 only a few builtins that fall into this category. FNDECL is the
11131 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11132 result of the function call is ignored. */
11135 fold_builtin_varargs (location_t loc
, tree fndecl
, tree exp
,
11136 bool ignore ATTRIBUTE_UNUSED
)
11138 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11139 tree ret
= NULL_TREE
;
11143 case BUILT_IN_SPRINTF_CHK
:
11144 case BUILT_IN_VSPRINTF_CHK
:
11145 ret
= fold_builtin_sprintf_chk (loc
, exp
, fcode
);
11148 case BUILT_IN_SNPRINTF_CHK
:
11149 case BUILT_IN_VSNPRINTF_CHK
:
11150 ret
= fold_builtin_snprintf_chk (loc
, exp
, NULL_TREE
, fcode
);
11153 case BUILT_IN_FPCLASSIFY
:
11154 ret
= fold_builtin_fpclassify (loc
, exp
);
11162 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11163 SET_EXPR_LOCATION (ret
, loc
);
11164 TREE_NO_WARNING (ret
) = 1;
11170 /* Return true if FNDECL shouldn't be folded right now.
11171 If a built-in function has an inline attribute always_inline
11172 wrapper, defer folding it after always_inline functions have
11173 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11174 might not be performed. */
11177 avoid_folding_inline_builtin (tree fndecl
)
11179 return (DECL_DECLARED_INLINE_P (fndecl
)
11180 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
11182 && !cfun
->always_inline_functions_inlined
11183 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
11186 /* A wrapper function for builtin folding that prevents warnings for
11187 "statement without effect" and the like, caused by removing the
11188 call node earlier than the warning is generated. */
11191 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
11193 tree ret
= NULL_TREE
;
11194 tree fndecl
= get_callee_fndecl (exp
);
11196 && TREE_CODE (fndecl
) == FUNCTION_DECL
11197 && DECL_BUILT_IN (fndecl
)
11198 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11199 yet. Defer folding until we see all the arguments
11200 (after inlining). */
11201 && !CALL_EXPR_VA_ARG_PACK (exp
))
11203 int nargs
= call_expr_nargs (exp
);
11205 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11206 instead last argument is __builtin_va_arg_pack (). Defer folding
11207 even in that case, until arguments are finalized. */
11208 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
11210 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
11212 && TREE_CODE (fndecl2
) == FUNCTION_DECL
11213 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
11214 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
11218 if (avoid_folding_inline_builtin (fndecl
))
11221 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11222 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
11223 CALL_EXPR_ARGP (exp
), ignore
);
11226 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
11228 tree
*args
= CALL_EXPR_ARGP (exp
);
11229 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
11232 ret
= fold_builtin_varargs (loc
, fndecl
, exp
, ignore
);
11240 /* Conveniently construct a function call expression. FNDECL names the
11241 function to be called and N arguments are passed in the array
11245 build_call_expr_loc_array (location_t loc
, tree fndecl
, int n
, tree
*argarray
)
11247 tree fntype
= TREE_TYPE (fndecl
);
11248 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
11250 return fold_builtin_call_array (loc
, TREE_TYPE (fntype
), fn
, n
, argarray
);
11253 /* Conveniently construct a function call expression. FNDECL names the
11254 function to be called and the arguments are passed in the vector
11258 build_call_expr_loc_vec (location_t loc
, tree fndecl
, VEC(tree
,gc
) *vec
)
11260 return build_call_expr_loc_array (loc
, fndecl
, VEC_length (tree
, vec
),
11261 VEC_address (tree
, vec
));
11265 /* Conveniently construct a function call expression. FNDECL names the
11266 function to be called, N is the number of arguments, and the "..."
11267 parameters are the argument expressions. */
11270 build_call_expr_loc (location_t loc
, tree fndecl
, int n
, ...)
11273 tree
*argarray
= XALLOCAVEC (tree
, n
);
11277 for (i
= 0; i
< n
; i
++)
11278 argarray
[i
] = va_arg (ap
, tree
);
11280 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
11283 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11284 varargs macros aren't supported by all bootstrap compilers. */
11287 build_call_expr (tree fndecl
, int n
, ...)
11290 tree
*argarray
= XALLOCAVEC (tree
, n
);
11294 for (i
= 0; i
< n
; i
++)
11295 argarray
[i
] = va_arg (ap
, tree
);
11297 return build_call_expr_loc_array (UNKNOWN_LOCATION
, fndecl
, n
, argarray
);
11300 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11301 N arguments are passed in the array ARGARRAY. */
11304 fold_builtin_call_array (location_t loc
, tree type
,
11309 tree ret
= NULL_TREE
;
11312 if (TREE_CODE (fn
) == ADDR_EXPR
)
11314 tree fndecl
= TREE_OPERAND (fn
, 0);
11315 if (TREE_CODE (fndecl
) == FUNCTION_DECL
11316 && DECL_BUILT_IN (fndecl
))
11318 /* If last argument is __builtin_va_arg_pack (), arguments to this
11319 function are not finalized yet. Defer folding until they are. */
11320 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
11322 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
11324 && TREE_CODE (fndecl2
) == FUNCTION_DECL
11325 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
11326 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
11327 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11329 if (avoid_folding_inline_builtin (fndecl
))
11330 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11331 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11333 ret
= targetm
.fold_builtin (fndecl
, n
, argarray
, false);
11337 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11339 else if (n
<= MAX_ARGS_TO_FOLD_BUILTIN
)
11341 /* First try the transformations that don't require consing up
11343 ret
= fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
11348 /* If we got this far, we need to build an exp. */
11349 exp
= build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11350 ret
= fold_builtin_varargs (loc
, fndecl
, exp
, false);
11351 return ret
? ret
: exp
;
11355 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11358 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11359 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11360 of arguments in ARGS to be omitted. OLDNARGS is the number of
11361 elements in ARGS. */
11364 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
11365 int skip
, tree fndecl
, int n
, va_list newargs
)
11367 int nargs
= oldnargs
- skip
+ n
;
11374 buffer
= XALLOCAVEC (tree
, nargs
);
11375 for (i
= 0; i
< n
; i
++)
11376 buffer
[i
] = va_arg (newargs
, tree
);
11377 for (j
= skip
; j
< oldnargs
; j
++, i
++)
11378 buffer
[i
] = args
[j
];
11381 buffer
= args
+ skip
;
11383 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
11386 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11387 list ARGS along with N new arguments specified as the "..."
11388 parameters. SKIP is the number of arguments in ARGS to be omitted.
11389 OLDNARGS is the number of elements in ARGS. */
11392 rewrite_call_expr_array (location_t loc
, int oldnargs
, tree
*args
,
11393 int skip
, tree fndecl
, int n
, ...)
11399 t
= rewrite_call_expr_valist (loc
, oldnargs
, args
, skip
, fndecl
, n
, ap
);
11405 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11406 along with N new arguments specified as the "..." parameters. SKIP
11407 is the number of arguments in EXP to be omitted. This function is used
11408 to do varargs-to-varargs transformations. */
11411 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
11417 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
11418 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
11424 /* Validate a single argument ARG against a tree code CODE representing
11428 validate_arg (const_tree arg
, enum tree_code code
)
11432 else if (code
== POINTER_TYPE
)
11433 return POINTER_TYPE_P (TREE_TYPE (arg
));
11434 else if (code
== INTEGER_TYPE
)
11435 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
11436 return code
== TREE_CODE (TREE_TYPE (arg
));
11439 /* This function validates the types of a function call argument list
11440 against a specified list of tree_codes. If the last specifier is a 0,
11441 that represents an ellipses, otherwise the last specifier must be a
11444 This is the GIMPLE version of validate_arglist. Eventually we want to
11445 completely convert builtins.c to work from GIMPLEs and the tree based
11446 validate_arglist will then be removed. */
11449 validate_gimple_arglist (const_gimple call
, ...)
11451 enum tree_code code
;
11457 va_start (ap
, call
);
11462 code
= (enum tree_code
) va_arg (ap
, int);
11466 /* This signifies an ellipses, any further arguments are all ok. */
11470 /* This signifies an endlink, if no arguments remain, return
11471 true, otherwise return false. */
11472 res
= (i
== gimple_call_num_args (call
));
11475 /* If no parameters remain or the parameter's code does not
11476 match the specified code, return false. Otherwise continue
11477 checking any remaining arguments. */
11478 arg
= gimple_call_arg (call
, i
++);
11479 if (!validate_arg (arg
, code
))
11486 /* We need gotos here since we can only have one VA_CLOSE in a
11494 /* This function validates the types of a function call argument list
11495 against a specified list of tree_codes. If the last specifier is a 0,
11496 that represents an ellipses, otherwise the last specifier must be a
11500 validate_arglist (const_tree callexpr
, ...)
11502 enum tree_code code
;
11505 const_call_expr_arg_iterator iter
;
11508 va_start (ap
, callexpr
);
11509 init_const_call_expr_arg_iterator (callexpr
, &iter
);
11513 code
= (enum tree_code
) va_arg (ap
, int);
11517 /* This signifies an ellipses, any further arguments are all ok. */
11521 /* This signifies an endlink, if no arguments remain, return
11522 true, otherwise return false. */
11523 res
= !more_const_call_expr_args_p (&iter
);
11526 /* If no parameters remain or the parameter's code does not
11527 match the specified code, return false. Otherwise continue
11528 checking any remaining arguments. */
11529 arg
= next_const_call_expr_arg (&iter
);
11530 if (!validate_arg (arg
, code
))
11537 /* We need gotos here since we can only have one VA_CLOSE in a
11545 /* Default target-specific builtin expander that does nothing. */
11548 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
11549 rtx target ATTRIBUTE_UNUSED
,
11550 rtx subtarget ATTRIBUTE_UNUSED
,
11551 enum machine_mode mode ATTRIBUTE_UNUSED
,
11552 int ignore ATTRIBUTE_UNUSED
)
11557 /* Returns true is EXP represents data that would potentially reside
11558 in a readonly section. */
11561 readonly_data_expr (tree exp
)
11565 if (TREE_CODE (exp
) != ADDR_EXPR
)
11568 exp
= get_base_address (TREE_OPERAND (exp
, 0));
11572 /* Make sure we call decl_readonly_section only for trees it
11573 can handle (since it returns true for everything it doesn't
11575 if (TREE_CODE (exp
) == STRING_CST
11576 || TREE_CODE (exp
) == CONSTRUCTOR
11577 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
11578 return decl_readonly_section (exp
, 0);
11583 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11584 to the call, and TYPE is its return type.
11586 Return NULL_TREE if no simplification was possible, otherwise return the
11587 simplified form of the call as a tree.
11589 The simplified form may be a constant or other expression which
11590 computes the same value, but in a more efficient manner (including
11591 calls to other builtin functions).
11593 The call may contain arguments which need to be evaluated, but
11594 which are not useful to determine the result of the call. In
11595 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11596 COMPOUND_EXPR will be an argument which must be evaluated.
11597 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11598 COMPOUND_EXPR in the chain will contain the tree for the simplified
11599 form of the builtin function call. */
11602 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
11604 if (!validate_arg (s1
, POINTER_TYPE
)
11605 || !validate_arg (s2
, POINTER_TYPE
))
11610 const char *p1
, *p2
;
11612 p2
= c_getstr (s2
);
11616 p1
= c_getstr (s1
);
11619 const char *r
= strstr (p1
, p2
);
11623 return build_int_cst (TREE_TYPE (s1
), 0);
11625 /* Return an offset into the constant string argument. */
11626 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11627 return fold_convert_loc (loc
, type
, tem
);
11630 /* The argument is const char *, and the result is char *, so we need
11631 a type conversion here to avoid a warning. */
11633 return fold_convert_loc (loc
, type
, s1
);
11638 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11642 /* New argument list transforming strstr(s1, s2) to
11643 strchr(s1, s2[0]). */
11644 return build_call_expr_loc (loc
, fn
, 2, s1
,
11645 build_int_cst (integer_type_node
, p2
[0]));
11649 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11650 the call, and TYPE is its return type.
11652 Return NULL_TREE if no simplification was possible, otherwise return the
11653 simplified form of the call as a tree.
11655 The simplified form may be a constant or other expression which
11656 computes the same value, but in a more efficient manner (including
11657 calls to other builtin functions).
11659 The call may contain arguments which need to be evaluated, but
11660 which are not useful to determine the result of the call. In
11661 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11662 COMPOUND_EXPR will be an argument which must be evaluated.
11663 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11664 COMPOUND_EXPR in the chain will contain the tree for the simplified
11665 form of the builtin function call. */
11668 fold_builtin_strchr (location_t loc
, tree s1
, tree s2
, tree type
)
11670 if (!validate_arg (s1
, POINTER_TYPE
)
11671 || !validate_arg (s2
, INTEGER_TYPE
))
11677 if (TREE_CODE (s2
) != INTEGER_CST
)
11680 p1
= c_getstr (s1
);
11687 if (target_char_cast (s2
, &c
))
11690 r
= strchr (p1
, c
);
11693 return build_int_cst (TREE_TYPE (s1
), 0);
11695 /* Return an offset into the constant string argument. */
11696 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11697 return fold_convert_loc (loc
, type
, tem
);
11703 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11704 the call, and TYPE is its return type.
11706 Return NULL_TREE if no simplification was possible, otherwise return the
11707 simplified form of the call as a tree.
11709 The simplified form may be a constant or other expression which
11710 computes the same value, but in a more efficient manner (including
11711 calls to other builtin functions).
11713 The call may contain arguments which need to be evaluated, but
11714 which are not useful to determine the result of the call. In
11715 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11716 COMPOUND_EXPR will be an argument which must be evaluated.
11717 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11718 COMPOUND_EXPR in the chain will contain the tree for the simplified
11719 form of the builtin function call. */
11722 fold_builtin_strrchr (location_t loc
, tree s1
, tree s2
, tree type
)
11724 if (!validate_arg (s1
, POINTER_TYPE
)
11725 || !validate_arg (s2
, INTEGER_TYPE
))
11732 if (TREE_CODE (s2
) != INTEGER_CST
)
11735 p1
= c_getstr (s1
);
11742 if (target_char_cast (s2
, &c
))
11745 r
= strrchr (p1
, c
);
11748 return build_int_cst (TREE_TYPE (s1
), 0);
11750 /* Return an offset into the constant string argument. */
11751 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11752 return fold_convert_loc (loc
, type
, tem
);
11755 if (! integer_zerop (s2
))
11758 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11762 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11763 return build_call_expr_loc (loc
, fn
, 2, s1
, s2
);
11767 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11768 to the call, and TYPE is its return type.
11770 Return NULL_TREE if no simplification was possible, otherwise return the
11771 simplified form of the call as a tree.
11773 The simplified form may be a constant or other expression which
11774 computes the same value, but in a more efficient manner (including
11775 calls to other builtin functions).
11777 The call may contain arguments which need to be evaluated, but
11778 which are not useful to determine the result of the call. In
11779 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11780 COMPOUND_EXPR will be an argument which must be evaluated.
11781 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11782 COMPOUND_EXPR in the chain will contain the tree for the simplified
11783 form of the builtin function call. */
11786 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
11788 if (!validate_arg (s1
, POINTER_TYPE
)
11789 || !validate_arg (s2
, POINTER_TYPE
))
11794 const char *p1
, *p2
;
11796 p2
= c_getstr (s2
);
11800 p1
= c_getstr (s1
);
11803 const char *r
= strpbrk (p1
, p2
);
11807 return build_int_cst (TREE_TYPE (s1
), 0);
11809 /* Return an offset into the constant string argument. */
11810 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11811 return fold_convert_loc (loc
, type
, tem
);
11815 /* strpbrk(x, "") == NULL.
11816 Evaluate and ignore s1 in case it had side-effects. */
11817 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
11820 return NULL_TREE
; /* Really call strpbrk. */
11822 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11826 /* New argument list transforming strpbrk(s1, s2) to
11827 strchr(s1, s2[0]). */
11828 return build_call_expr_loc (loc
, fn
, 2, s1
,
11829 build_int_cst (integer_type_node
, p2
[0]));
11833 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11836 Return NULL_TREE if no simplification was possible, otherwise return the
11837 simplified form of the call as a tree.
11839 The simplified form may be a constant or other expression which
11840 computes the same value, but in a more efficient manner (including
11841 calls to other builtin functions).
11843 The call may contain arguments which need to be evaluated, but
11844 which are not useful to determine the result of the call. In
11845 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11846 COMPOUND_EXPR will be an argument which must be evaluated.
11847 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11848 COMPOUND_EXPR in the chain will contain the tree for the simplified
11849 form of the builtin function call. */
11852 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED
, tree dst
, tree src
)
11854 if (!validate_arg (dst
, POINTER_TYPE
)
11855 || !validate_arg (src
, POINTER_TYPE
))
11859 const char *p
= c_getstr (src
);
11861 /* If the string length is zero, return the dst parameter. */
11862 if (p
&& *p
== '\0')
11865 if (optimize_insn_for_speed_p ())
11867 /* See if we can store by pieces into (dst + strlen(dst)). */
11869 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
11870 tree strcpy_fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
11872 if (!strlen_fn
|| !strcpy_fn
)
11875 /* If we don't have a movstr we don't want to emit an strcpy
11876 call. We have to do that if the length of the source string
11877 isn't computable (in that case we can use memcpy probably
11878 later expanding to a sequence of mov instructions). If we
11879 have movstr instructions we can emit strcpy calls. */
11882 tree len
= c_strlen (src
, 1);
11883 if (! len
|| TREE_SIDE_EFFECTS (len
))
11887 /* Stabilize the argument list. */
11888 dst
= builtin_save_expr (dst
);
11890 /* Create strlen (dst). */
11891 newdst
= build_call_expr_loc (loc
, strlen_fn
, 1, dst
);
11892 /* Create (dst p+ strlen (dst)). */
11894 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
11895 newdst
= builtin_save_expr (newdst
);
11897 call
= build_call_expr_loc (loc
, strcpy_fn
, 2, newdst
, src
);
11898 return build2 (COMPOUND_EXPR
, TREE_TYPE (dst
), call
, dst
);
11904 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11905 arguments to the call.
11907 Return NULL_TREE if no simplification was possible, otherwise return the
11908 simplified form of the call as a tree.
11910 The simplified form may be a constant or other expression which
11911 computes the same value, but in a more efficient manner (including
11912 calls to other builtin functions).
11914 The call may contain arguments which need to be evaluated, but
11915 which are not useful to determine the result of the call. In
11916 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11917 COMPOUND_EXPR will be an argument which must be evaluated.
11918 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11919 COMPOUND_EXPR in the chain will contain the tree for the simplified
11920 form of the builtin function call. */
11923 fold_builtin_strncat (location_t loc
, tree dst
, tree src
, tree len
)
11925 if (!validate_arg (dst
, POINTER_TYPE
)
11926 || !validate_arg (src
, POINTER_TYPE
)
11927 || !validate_arg (len
, INTEGER_TYPE
))
11931 const char *p
= c_getstr (src
);
11933 /* If the requested length is zero, or the src parameter string
11934 length is zero, return the dst parameter. */
11935 if (integer_zerop (len
) || (p
&& *p
== '\0'))
11936 return omit_two_operands_loc (loc
, TREE_TYPE (dst
), dst
, src
, len
);
11938 /* If the requested len is greater than or equal to the string
11939 length, call strcat. */
11940 if (TREE_CODE (len
) == INTEGER_CST
&& p
11941 && compare_tree_int (len
, strlen (p
)) >= 0)
11943 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
11945 /* If the replacement _DECL isn't initialized, don't do the
11950 return build_call_expr_loc (loc
, fn
, 2, dst
, src
);
11956 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11959 Return NULL_TREE if no simplification was possible, otherwise return the
11960 simplified form of the call as a tree.
11962 The simplified form may be a constant or other expression which
11963 computes the same value, but in a more efficient manner (including
11964 calls to other builtin functions).
11966 The call may contain arguments which need to be evaluated, but
11967 which are not useful to determine the result of the call. In
11968 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11969 COMPOUND_EXPR will be an argument which must be evaluated.
11970 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11971 COMPOUND_EXPR in the chain will contain the tree for the simplified
11972 form of the builtin function call. */
11975 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
11977 if (!validate_arg (s1
, POINTER_TYPE
)
11978 || !validate_arg (s2
, POINTER_TYPE
))
11982 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11984 /* If both arguments are constants, evaluate at compile-time. */
11987 const size_t r
= strspn (p1
, p2
);
11988 return size_int (r
);
11991 /* If either argument is "", return NULL_TREE. */
11992 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
11993 /* Evaluate and ignore both arguments in case either one has
11995 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
12001 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
12004 Return NULL_TREE if no simplification was possible, otherwise return the
12005 simplified form of the call as a tree.
12007 The simplified form may be a constant or other expression which
12008 computes the same value, but in a more efficient manner (including
12009 calls to other builtin functions).
12011 The call may contain arguments which need to be evaluated, but
12012 which are not useful to determine the result of the call. In
12013 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12014 COMPOUND_EXPR will be an argument which must be evaluated.
12015 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12016 COMPOUND_EXPR in the chain will contain the tree for the simplified
12017 form of the builtin function call. */
12020 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
12022 if (!validate_arg (s1
, POINTER_TYPE
)
12023 || !validate_arg (s2
, POINTER_TYPE
))
12027 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
12029 /* If both arguments are constants, evaluate at compile-time. */
12032 const size_t r
= strcspn (p1
, p2
);
12033 return size_int (r
);
12036 /* If the first argument is "", return NULL_TREE. */
12037 if (p1
&& *p1
== '\0')
12039 /* Evaluate and ignore argument s2 in case it has
12041 return omit_one_operand_loc (loc
, size_type_node
,
12042 size_zero_node
, s2
);
12045 /* If the second argument is "", return __builtin_strlen(s1). */
12046 if (p2
&& *p2
== '\0')
12048 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
12050 /* If the replacement _DECL isn't initialized, don't do the
12055 return build_call_expr_loc (loc
, fn
, 1, s1
);
12061 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
12062 to the call. IGNORE is true if the value returned
12063 by the builtin will be ignored. UNLOCKED is true is true if this
12064 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
12065 the known length of the string. Return NULL_TREE if no simplification
12069 fold_builtin_fputs (location_t loc
, tree arg0
, tree arg1
,
12070 bool ignore
, bool unlocked
, tree len
)
12072 /* If we're using an unlocked function, assume the other unlocked
12073 functions exist explicitly. */
12074 tree
const fn_fputc
= (unlocked
12075 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
12076 : builtin_decl_implicit (BUILT_IN_FPUTC
));
12077 tree
const fn_fwrite
= (unlocked
12078 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
12079 : builtin_decl_implicit (BUILT_IN_FWRITE
));
12081 /* If the return value is used, don't do the transformation. */
12085 /* Verify the arguments in the original call. */
12086 if (!validate_arg (arg0
, POINTER_TYPE
)
12087 || !validate_arg (arg1
, POINTER_TYPE
))
12091 len
= c_strlen (arg0
, 0);
12093 /* Get the length of the string passed to fputs. If the length
12094 can't be determined, punt. */
12096 || TREE_CODE (len
) != INTEGER_CST
)
12099 switch (compare_tree_int (len
, 1))
12101 case -1: /* length is 0, delete the call entirely . */
12102 return omit_one_operand_loc (loc
, integer_type_node
,
12103 integer_zero_node
, arg1
);;
12105 case 0: /* length is 1, call fputc. */
12107 const char *p
= c_getstr (arg0
);
12112 return build_call_expr_loc (loc
, fn_fputc
, 2,
12114 (integer_type_node
, p
[0]), arg1
);
12120 case 1: /* length is greater than 1, call fwrite. */
12122 /* If optimizing for size keep fputs. */
12123 if (optimize_function_for_size_p (cfun
))
12125 /* New argument list transforming fputs(string, stream) to
12126 fwrite(string, 1, len, stream). */
12128 return build_call_expr_loc (loc
, fn_fwrite
, 4, arg0
,
12129 size_one_node
, len
, arg1
);
12134 gcc_unreachable ();
12139 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12140 produced. False otherwise. This is done so that we don't output the error
12141 or warning twice or three times. */
12144 fold_builtin_next_arg (tree exp
, bool va_start_p
)
12146 tree fntype
= TREE_TYPE (current_function_decl
);
12147 int nargs
= call_expr_nargs (exp
);
12149 /* There is good chance the current input_location points inside the
12150 definition of the va_start macro (perhaps on the token for
12151 builtin) in a system header, so warnings will not be emitted.
12152 Use the location in real source code. */
12153 source_location current_location
=
12154 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
12157 if (!stdarg_p (fntype
))
12159 error ("%<va_start%> used in function with fixed args");
12165 if (va_start_p
&& (nargs
!= 2))
12167 error ("wrong number of arguments to function %<va_start%>");
12170 arg
= CALL_EXPR_ARG (exp
, 1);
12172 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12173 when we checked the arguments and if needed issued a warning. */
12178 /* Evidently an out of date version of <stdarg.h>; can't validate
12179 va_start's second argument, but can still work as intended. */
12180 warning_at (current_location
,
12182 "%<__builtin_next_arg%> called without an argument");
12185 else if (nargs
> 1)
12187 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12190 arg
= CALL_EXPR_ARG (exp
, 0);
12193 if (TREE_CODE (arg
) == SSA_NAME
)
12194 arg
= SSA_NAME_VAR (arg
);
12196 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12197 or __builtin_next_arg (0) the first time we see it, after checking
12198 the arguments and if needed issuing a warning. */
12199 if (!integer_zerop (arg
))
12201 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
12203 /* Strip off all nops for the sake of the comparison. This
12204 is not quite the same as STRIP_NOPS. It does more.
12205 We must also strip off INDIRECT_EXPR for C++ reference
12207 while (CONVERT_EXPR_P (arg
)
12208 || TREE_CODE (arg
) == INDIRECT_REF
)
12209 arg
= TREE_OPERAND (arg
, 0);
12210 if (arg
!= last_parm
)
12212 /* FIXME: Sometimes with the tree optimizers we can get the
12213 not the last argument even though the user used the last
12214 argument. We just warn and set the arg to be the last
12215 argument so that we will get wrong-code because of
12217 warning_at (current_location
,
12219 "second parameter of %<va_start%> not last named argument");
12222 /* Undefined by C99 7.15.1.4p4 (va_start):
12223 "If the parameter parmN is declared with the register storage
12224 class, with a function or array type, or with a type that is
12225 not compatible with the type that results after application of
12226 the default argument promotions, the behavior is undefined."
12228 else if (DECL_REGISTER (arg
))
12230 warning_at (current_location
,
12232 "undefined behaviour when second parameter of "
12233 "%<va_start%> is declared with %<register%> storage");
12236 /* We want to verify the second parameter just once before the tree
12237 optimizers are run and then avoid keeping it in the tree,
12238 as otherwise we could warn even for correct code like:
12239 void foo (int i, ...)
12240 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12242 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
12244 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
12250 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12251 ORIG may be null if this is a 2-argument call. We don't attempt to
12252 simplify calls with more than 3 arguments.
12254 Return NULL_TREE if no simplification was possible, otherwise return the
12255 simplified form of the call as a tree. If IGNORED is true, it means that
12256 the caller does not use the returned value of the function. */
12259 fold_builtin_sprintf (location_t loc
, tree dest
, tree fmt
,
12260 tree orig
, int ignored
)
12263 const char *fmt_str
= NULL
;
12265 /* Verify the required arguments in the original call. We deal with two
12266 types of sprintf() calls: 'sprintf (str, fmt)' and
12267 'sprintf (dest, "%s", orig)'. */
12268 if (!validate_arg (dest
, POINTER_TYPE
)
12269 || !validate_arg (fmt
, POINTER_TYPE
))
12271 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
12274 /* Check whether the format is a literal string constant. */
12275 fmt_str
= c_getstr (fmt
);
12276 if (fmt_str
== NULL
)
12280 retval
= NULL_TREE
;
12282 if (!init_target_chars ())
12285 /* If the format doesn't contain % args or %%, use strcpy. */
12286 if (strchr (fmt_str
, target_percent
) == NULL
)
12288 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12293 /* Don't optimize sprintf (buf, "abc", ptr++). */
12297 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12298 'format' is known to contain no % formats. */
12299 call
= build_call_expr_loc (loc
, fn
, 2, dest
, fmt
);
12301 retval
= build_int_cst (integer_type_node
, strlen (fmt_str
));
12304 /* If the format is "%s", use strcpy if the result isn't used. */
12305 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
12308 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12313 /* Don't crash on sprintf (str1, "%s"). */
12317 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12320 retval
= c_strlen (orig
, 1);
12321 if (!retval
|| TREE_CODE (retval
) != INTEGER_CST
)
12324 call
= build_call_expr_loc (loc
, fn
, 2, dest
, orig
);
12327 if (call
&& retval
)
12329 retval
= fold_convert_loc
12330 (loc
, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF
))),
12332 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
12338 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12339 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12340 attempt to simplify calls with more than 4 arguments.
12342 Return NULL_TREE if no simplification was possible, otherwise return the
12343 simplified form of the call as a tree. If IGNORED is true, it means that
12344 the caller does not use the returned value of the function. */
12347 fold_builtin_snprintf (location_t loc
, tree dest
, tree destsize
, tree fmt
,
12348 tree orig
, int ignored
)
12351 const char *fmt_str
= NULL
;
12352 unsigned HOST_WIDE_INT destlen
;
12354 /* Verify the required arguments in the original call. We deal with two
12355 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12356 'snprintf (dest, cst, "%s", orig)'. */
12357 if (!validate_arg (dest
, POINTER_TYPE
)
12358 || !validate_arg (destsize
, INTEGER_TYPE
)
12359 || !validate_arg (fmt
, POINTER_TYPE
))
12361 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
12364 if (!host_integerp (destsize
, 1))
12367 /* Check whether the format is a literal string constant. */
12368 fmt_str
= c_getstr (fmt
);
12369 if (fmt_str
== NULL
)
12373 retval
= NULL_TREE
;
12375 if (!init_target_chars ())
12378 destlen
= tree_low_cst (destsize
, 1);
12380 /* If the format doesn't contain % args or %%, use strcpy. */
12381 if (strchr (fmt_str
, target_percent
) == NULL
)
12383 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12384 size_t len
= strlen (fmt_str
);
12386 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12390 /* We could expand this as
12391 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12393 memcpy (str, fmt_with_nul_at_cstm1, cst);
12394 but in the former case that might increase code size
12395 and in the latter case grow .rodata section too much.
12396 So punt for now. */
12397 if (len
>= destlen
)
12403 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12404 'format' is known to contain no % formats and
12405 strlen (fmt) < cst. */
12406 call
= build_call_expr_loc (loc
, fn
, 2, dest
, fmt
);
12409 retval
= build_int_cst (integer_type_node
, strlen (fmt_str
));
12412 /* If the format is "%s", use strcpy if the result isn't used. */
12413 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
12415 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12416 unsigned HOST_WIDE_INT origlen
;
12418 /* Don't crash on snprintf (str1, cst, "%s"). */
12422 retval
= c_strlen (orig
, 1);
12423 if (!retval
|| !host_integerp (retval
, 1))
12426 origlen
= tree_low_cst (retval
, 1);
12427 /* We could expand this as
12428 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12430 memcpy (str1, str2_with_nul_at_cstm1, cst);
12431 but in the former case that might increase code size
12432 and in the latter case grow .rodata section too much.
12433 So punt for now. */
12434 if (origlen
>= destlen
)
12437 /* Convert snprintf (str1, cst, "%s", str2) into
12438 strcpy (str1, str2) if strlen (str2) < cst. */
12442 call
= build_call_expr_loc (loc
, fn
, 2, dest
, orig
);
12445 retval
= NULL_TREE
;
12448 if (call
&& retval
)
12450 tree fn
= builtin_decl_explicit (BUILT_IN_SNPRINTF
);
12451 retval
= fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fn
)), retval
);
12452 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
12458 /* Expand a call EXP to __builtin_object_size. */
12461 expand_builtin_object_size (tree exp
)
12464 int object_size_type
;
12465 tree fndecl
= get_callee_fndecl (exp
);
12467 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
12469 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12471 expand_builtin_trap ();
12475 ost
= CALL_EXPR_ARG (exp
, 1);
12478 if (TREE_CODE (ost
) != INTEGER_CST
12479 || tree_int_cst_sgn (ost
) < 0
12480 || compare_tree_int (ost
, 3) > 0)
12482 error ("%Klast argument of %D is not integer constant between 0 and 3",
12484 expand_builtin_trap ();
12488 object_size_type
= tree_low_cst (ost
, 0);
12490 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
12493 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12494 FCODE is the BUILT_IN_* to use.
12495 Return NULL_RTX if we failed; the caller should emit a normal call,
12496 otherwise try to get the result in TARGET, if convenient (and in
12497 mode MODE if that's convenient). */
12500 expand_builtin_memory_chk (tree exp
, rtx target
, enum machine_mode mode
,
12501 enum built_in_function fcode
)
12503 tree dest
, src
, len
, size
;
12505 if (!validate_arglist (exp
,
12507 fcode
== BUILT_IN_MEMSET_CHK
12508 ? INTEGER_TYPE
: POINTER_TYPE
,
12509 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
12512 dest
= CALL_EXPR_ARG (exp
, 0);
12513 src
= CALL_EXPR_ARG (exp
, 1);
12514 len
= CALL_EXPR_ARG (exp
, 2);
12515 size
= CALL_EXPR_ARG (exp
, 3);
12517 if (! host_integerp (size
, 1))
12520 if (host_integerp (len
, 1) || integer_all_onesp (size
))
12524 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
12526 warning_at (tree_nonartificial_location (exp
),
12527 0, "%Kcall to %D will always overflow destination buffer",
12528 exp
, get_callee_fndecl (exp
));
12533 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12534 mem{cpy,pcpy,move,set} is available. */
12537 case BUILT_IN_MEMCPY_CHK
:
12538 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
12540 case BUILT_IN_MEMPCPY_CHK
:
12541 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
12543 case BUILT_IN_MEMMOVE_CHK
:
12544 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
12546 case BUILT_IN_MEMSET_CHK
:
12547 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
12556 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
12557 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
12558 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
12559 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
12561 else if (fcode
== BUILT_IN_MEMSET_CHK
)
12565 unsigned int dest_align
= get_pointer_alignment (dest
);
12567 /* If DEST is not a pointer type, call the normal function. */
12568 if (dest_align
== 0)
12571 /* If SRC and DEST are the same (and not volatile), do nothing. */
12572 if (operand_equal_p (src
, dest
, 0))
12576 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
12578 /* Evaluate and ignore LEN in case it has side-effects. */
12579 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
12580 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
12583 expr
= fold_build_pointer_plus (dest
, len
);
12584 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
12587 /* __memmove_chk special case. */
12588 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
12590 unsigned int src_align
= get_pointer_alignment (src
);
12592 if (src_align
== 0)
12595 /* If src is categorized for a readonly section we can use
12596 normal __memcpy_chk. */
12597 if (readonly_data_expr (src
))
12599 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
12602 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
12603 dest
, src
, len
, size
);
12604 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
12605 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
12606 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
12613 /* Emit warning if a buffer overflow is detected at compile time. */
12616 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
12620 location_t loc
= tree_nonartificial_location (exp
);
12624 case BUILT_IN_STRCPY_CHK
:
12625 case BUILT_IN_STPCPY_CHK
:
12626 /* For __strcat_chk the warning will be emitted only if overflowing
12627 by at least strlen (dest) + 1 bytes. */
12628 case BUILT_IN_STRCAT_CHK
:
12629 len
= CALL_EXPR_ARG (exp
, 1);
12630 size
= CALL_EXPR_ARG (exp
, 2);
12633 case BUILT_IN_STRNCAT_CHK
:
12634 case BUILT_IN_STRNCPY_CHK
:
12635 case BUILT_IN_STPNCPY_CHK
:
12636 len
= CALL_EXPR_ARG (exp
, 2);
12637 size
= CALL_EXPR_ARG (exp
, 3);
12639 case BUILT_IN_SNPRINTF_CHK
:
12640 case BUILT_IN_VSNPRINTF_CHK
:
12641 len
= CALL_EXPR_ARG (exp
, 1);
12642 size
= CALL_EXPR_ARG (exp
, 3);
12645 gcc_unreachable ();
12651 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
12656 len
= c_strlen (len
, 1);
12657 if (! len
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
12660 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
12662 tree src
= CALL_EXPR_ARG (exp
, 1);
12663 if (! src
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
12665 src
= c_strlen (src
, 1);
12666 if (! src
|| ! host_integerp (src
, 1))
12668 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
12669 exp
, get_callee_fndecl (exp
));
12672 else if (tree_int_cst_lt (src
, size
))
12675 else if (! host_integerp (len
, 1) || ! tree_int_cst_lt (size
, len
))
12678 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
12679 exp
, get_callee_fndecl (exp
));
12682 /* Emit warning if a buffer overflow is detected at compile time
12683 in __sprintf_chk/__vsprintf_chk calls. */
12686 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
12688 tree size
, len
, fmt
;
12689 const char *fmt_str
;
12690 int nargs
= call_expr_nargs (exp
);
12692 /* Verify the required arguments in the original call. */
12696 size
= CALL_EXPR_ARG (exp
, 2);
12697 fmt
= CALL_EXPR_ARG (exp
, 3);
12699 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
12702 /* Check whether the format is a literal string constant. */
12703 fmt_str
= c_getstr (fmt
);
12704 if (fmt_str
== NULL
)
12707 if (!init_target_chars ())
12710 /* If the format doesn't contain % args or %%, we know its size. */
12711 if (strchr (fmt_str
, target_percent
) == 0)
12712 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
12713 /* If the format is "%s" and first ... argument is a string literal,
12715 else if (fcode
== BUILT_IN_SPRINTF_CHK
12716 && strcmp (fmt_str
, target_percent_s
) == 0)
12722 arg
= CALL_EXPR_ARG (exp
, 4);
12723 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
12726 len
= c_strlen (arg
, 1);
12727 if (!len
|| ! host_integerp (len
, 1))
12733 if (! tree_int_cst_lt (len
, size
))
12734 warning_at (tree_nonartificial_location (exp
),
12735 0, "%Kcall to %D will always overflow destination buffer",
12736 exp
, get_callee_fndecl (exp
));
12739 /* Emit warning if a free is called with address of a variable. */
12742 maybe_emit_free_warning (tree exp
)
12744 tree arg
= CALL_EXPR_ARG (exp
, 0);
12747 if (TREE_CODE (arg
) != ADDR_EXPR
)
12750 arg
= get_base_address (TREE_OPERAND (arg
, 0));
12751 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
12754 if (SSA_VAR_P (arg
))
12755 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
12756 "%Kattempt to free a non-heap object %qD", exp
, arg
);
12758 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
12759 "%Kattempt to free a non-heap object", exp
);
12762 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12766 fold_builtin_object_size (tree ptr
, tree ost
)
12768 unsigned HOST_WIDE_INT bytes
;
12769 int object_size_type
;
12771 if (!validate_arg (ptr
, POINTER_TYPE
)
12772 || !validate_arg (ost
, INTEGER_TYPE
))
12777 if (TREE_CODE (ost
) != INTEGER_CST
12778 || tree_int_cst_sgn (ost
) < 0
12779 || compare_tree_int (ost
, 3) > 0)
12782 object_size_type
= tree_low_cst (ost
, 0);
12784 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12785 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12786 and (size_t) 0 for types 2 and 3. */
12787 if (TREE_SIDE_EFFECTS (ptr
))
12788 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
12790 if (TREE_CODE (ptr
) == ADDR_EXPR
)
12792 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
12793 if (double_int_fits_to_tree_p (size_type_node
,
12794 uhwi_to_double_int (bytes
)))
12795 return build_int_cstu (size_type_node
, bytes
);
12797 else if (TREE_CODE (ptr
) == SSA_NAME
)
12799 /* If object size is not known yet, delay folding until
12800 later. Maybe subsequent passes will help determining
12802 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
12803 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2 ? -1 : 0)
12804 && double_int_fits_to_tree_p (size_type_node
,
12805 uhwi_to_double_int (bytes
)))
12806 return build_int_cstu (size_type_node
, bytes
);
12812 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12813 DEST, SRC, LEN, and SIZE are the arguments to the call.
12814 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12815 code of the builtin. If MAXLEN is not NULL, it is maximum length
12816 passed as third argument. */
12819 fold_builtin_memory_chk (location_t loc
, tree fndecl
,
12820 tree dest
, tree src
, tree len
, tree size
,
12821 tree maxlen
, bool ignore
,
12822 enum built_in_function fcode
)
12826 if (!validate_arg (dest
, POINTER_TYPE
)
12827 || !validate_arg (src
,
12828 (fcode
== BUILT_IN_MEMSET_CHK
12829 ? INTEGER_TYPE
: POINTER_TYPE
))
12830 || !validate_arg (len
, INTEGER_TYPE
)
12831 || !validate_arg (size
, INTEGER_TYPE
))
12834 /* If SRC and DEST are the same (and not volatile), return DEST
12835 (resp. DEST+LEN for __mempcpy_chk). */
12836 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
12838 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
12839 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
12843 tree temp
= fold_build_pointer_plus_loc (loc
, dest
, len
);
12844 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), temp
);
12848 if (! host_integerp (size
, 1))
12851 if (! integer_all_onesp (size
))
12853 if (! host_integerp (len
, 1))
12855 /* If LEN is not constant, try MAXLEN too.
12856 For MAXLEN only allow optimizing into non-_ocs function
12857 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12858 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12860 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
12862 /* (void) __mempcpy_chk () can be optimized into
12863 (void) __memcpy_chk (). */
12864 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
12868 return build_call_expr_loc (loc
, fn
, 4, dest
, src
, len
, size
);
12876 if (tree_int_cst_lt (size
, maxlen
))
12881 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12882 mem{cpy,pcpy,move,set} is available. */
12885 case BUILT_IN_MEMCPY_CHK
:
12886 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
12888 case BUILT_IN_MEMPCPY_CHK
:
12889 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
12891 case BUILT_IN_MEMMOVE_CHK
:
12892 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
12894 case BUILT_IN_MEMSET_CHK
:
12895 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
12904 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
12907 /* Fold a call to the __st[rp]cpy_chk builtin.
12908 DEST, SRC, and SIZE are the arguments to the call.
12909 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12910 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12911 strings passed as second argument. */
12914 fold_builtin_stxcpy_chk (location_t loc
, tree fndecl
, tree dest
,
12915 tree src
, tree size
,
12916 tree maxlen
, bool ignore
,
12917 enum built_in_function fcode
)
12921 if (!validate_arg (dest
, POINTER_TYPE
)
12922 || !validate_arg (src
, POINTER_TYPE
)
12923 || !validate_arg (size
, INTEGER_TYPE
))
12926 /* If SRC and DEST are the same (and not volatile), return DEST. */
12927 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
12928 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
12930 if (! host_integerp (size
, 1))
12933 if (! integer_all_onesp (size
))
12935 len
= c_strlen (src
, 1);
12936 if (! len
|| ! host_integerp (len
, 1))
12938 /* If LEN is not constant, try MAXLEN too.
12939 For MAXLEN only allow optimizing into non-_ocs function
12940 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12941 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12943 if (fcode
== BUILT_IN_STPCPY_CHK
)
12948 /* If return value of __stpcpy_chk is ignored,
12949 optimize into __strcpy_chk. */
12950 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
12954 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, size
);
12957 if (! len
|| TREE_SIDE_EFFECTS (len
))
12960 /* If c_strlen returned something, but not a constant,
12961 transform __strcpy_chk into __memcpy_chk. */
12962 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
12966 len
= fold_convert_loc (loc
, size_type_node
, len
);
12967 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
12968 build_int_cst (size_type_node
, 1));
12969 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
12970 build_call_expr_loc (loc
, fn
, 4,
12971 dest
, src
, len
, size
));
12977 if (! tree_int_cst_lt (maxlen
, size
))
12981 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12982 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
12983 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
12987 return build_call_expr_loc (loc
, fn
, 2, dest
, src
);
12990 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12991 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12992 length passed as third argument. IGNORE is true if return value can be
12993 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12996 fold_builtin_stxncpy_chk (location_t loc
, tree dest
, tree src
,
12997 tree len
, tree size
, tree maxlen
, bool ignore
,
12998 enum built_in_function fcode
)
13002 if (!validate_arg (dest
, POINTER_TYPE
)
13003 || !validate_arg (src
, POINTER_TYPE
)
13004 || !validate_arg (len
, INTEGER_TYPE
)
13005 || !validate_arg (size
, INTEGER_TYPE
))
13008 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
13010 /* If return value of __stpncpy_chk is ignored,
13011 optimize into __strncpy_chk. */
13012 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
13014 return build_call_expr_loc (loc
, fn
, 4, dest
, src
, len
, size
);
13017 if (! host_integerp (size
, 1))
13020 if (! integer_all_onesp (size
))
13022 if (! host_integerp (len
, 1))
13024 /* If LEN is not constant, try MAXLEN too.
13025 For MAXLEN only allow optimizing into non-_ocs function
13026 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13027 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
13033 if (tree_int_cst_lt (size
, maxlen
))
13037 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
13038 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
13039 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
13043 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
13046 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
13047 are the arguments to the call. */
13050 fold_builtin_strcat_chk (location_t loc
, tree fndecl
, tree dest
,
13051 tree src
, tree size
)
13056 if (!validate_arg (dest
, POINTER_TYPE
)
13057 || !validate_arg (src
, POINTER_TYPE
)
13058 || !validate_arg (size
, INTEGER_TYPE
))
13061 p
= c_getstr (src
);
13062 /* If the SRC parameter is "", return DEST. */
13063 if (p
&& *p
== '\0')
13064 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
13066 if (! host_integerp (size
, 1) || ! integer_all_onesp (size
))
13069 /* If __builtin_strcat_chk is used, assume strcat is available. */
13070 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
13074 return build_call_expr_loc (loc
, fn
, 2, dest
, src
);
13077 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
13081 fold_builtin_strncat_chk (location_t loc
, tree fndecl
,
13082 tree dest
, tree src
, tree len
, tree size
)
13087 if (!validate_arg (dest
, POINTER_TYPE
)
13088 || !validate_arg (src
, POINTER_TYPE
)
13089 || !validate_arg (size
, INTEGER_TYPE
)
13090 || !validate_arg (size
, INTEGER_TYPE
))
13093 p
= c_getstr (src
);
13094 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
13095 if (p
&& *p
== '\0')
13096 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
13097 else if (integer_zerop (len
))
13098 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
13100 if (! host_integerp (size
, 1))
13103 if (! integer_all_onesp (size
))
13105 tree src_len
= c_strlen (src
, 1);
13107 && host_integerp (src_len
, 1)
13108 && host_integerp (len
, 1)
13109 && ! tree_int_cst_lt (len
, src_len
))
13111 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13112 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
13116 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, size
);
13121 /* If __builtin_strncat_chk is used, assume strncat is available. */
13122 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
13126 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
13129 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13130 Return NULL_TREE if a normal call should be emitted rather than
13131 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13132 or BUILT_IN_VSPRINTF_CHK. */
13135 fold_builtin_sprintf_chk_1 (location_t loc
, int nargs
, tree
*args
,
13136 enum built_in_function fcode
)
13138 tree dest
, size
, len
, fn
, fmt
, flag
;
13139 const char *fmt_str
;
13141 /* Verify the required arguments in the original call. */
13145 if (!validate_arg (dest
, POINTER_TYPE
))
13148 if (!validate_arg (flag
, INTEGER_TYPE
))
13151 if (!validate_arg (size
, INTEGER_TYPE
))
13154 if (!validate_arg (fmt
, POINTER_TYPE
))
13157 if (! host_integerp (size
, 1))
13162 if (!init_target_chars ())
13165 /* Check whether the format is a literal string constant. */
13166 fmt_str
= c_getstr (fmt
);
13167 if (fmt_str
!= NULL
)
13169 /* If the format doesn't contain % args or %%, we know the size. */
13170 if (strchr (fmt_str
, target_percent
) == 0)
13172 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
13173 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
13175 /* If the format is "%s" and first ... argument is a string literal,
13176 we know the size too. */
13177 else if (fcode
== BUILT_IN_SPRINTF_CHK
13178 && strcmp (fmt_str
, target_percent_s
) == 0)
13185 if (validate_arg (arg
, POINTER_TYPE
))
13187 len
= c_strlen (arg
, 1);
13188 if (! len
|| ! host_integerp (len
, 1))
13195 if (! integer_all_onesp (size
))
13197 if (! len
|| ! tree_int_cst_lt (len
, size
))
13201 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13202 or if format doesn't contain % chars or is "%s". */
13203 if (! integer_zerop (flag
))
13205 if (fmt_str
== NULL
)
13207 if (strchr (fmt_str
, target_percent
) != NULL
13208 && strcmp (fmt_str
, target_percent_s
))
13212 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13213 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
13214 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
13218 return rewrite_call_expr_array (loc
, nargs
, args
, 4, fn
, 2, dest
, fmt
);
13221 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13222 a normal call should be emitted rather than expanding the function
13223 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13226 fold_builtin_sprintf_chk (location_t loc
, tree exp
,
13227 enum built_in_function fcode
)
13229 return fold_builtin_sprintf_chk_1 (loc
, call_expr_nargs (exp
),
13230 CALL_EXPR_ARGP (exp
), fcode
);
13233 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13234 NULL_TREE if a normal call should be emitted rather than expanding
13235 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13236 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13237 passed as second argument. */
13240 fold_builtin_snprintf_chk_1 (location_t loc
, int nargs
, tree
*args
,
13241 tree maxlen
, enum built_in_function fcode
)
13243 tree dest
, size
, len
, fn
, fmt
, flag
;
13244 const char *fmt_str
;
13246 /* Verify the required arguments in the original call. */
13250 if (!validate_arg (dest
, POINTER_TYPE
))
13253 if (!validate_arg (len
, INTEGER_TYPE
))
13256 if (!validate_arg (flag
, INTEGER_TYPE
))
13259 if (!validate_arg (size
, INTEGER_TYPE
))
13262 if (!validate_arg (fmt
, POINTER_TYPE
))
13265 if (! host_integerp (size
, 1))
13268 if (! integer_all_onesp (size
))
13270 if (! host_integerp (len
, 1))
13272 /* If LEN is not constant, try MAXLEN too.
13273 For MAXLEN only allow optimizing into non-_ocs function
13274 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13275 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
13281 if (tree_int_cst_lt (size
, maxlen
))
13285 if (!init_target_chars ())
13288 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13289 or if format doesn't contain % chars or is "%s". */
13290 if (! integer_zerop (flag
))
13292 fmt_str
= c_getstr (fmt
);
13293 if (fmt_str
== NULL
)
13295 if (strchr (fmt_str
, target_percent
) != NULL
13296 && strcmp (fmt_str
, target_percent_s
))
13300 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13302 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
13303 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
13307 return rewrite_call_expr_array (loc
, nargs
, args
, 5, fn
, 3, dest
, len
, fmt
);
13310 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13311 a normal call should be emitted rather than expanding the function
13312 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13313 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13314 passed as second argument. */
13317 fold_builtin_snprintf_chk (location_t loc
, tree exp
, tree maxlen
,
13318 enum built_in_function fcode
)
13320 return fold_builtin_snprintf_chk_1 (loc
, call_expr_nargs (exp
),
13321 CALL_EXPR_ARGP (exp
), maxlen
, fcode
);
13324 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13325 FMT and ARG are the arguments to the call; we don't fold cases with
13326 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13328 Return NULL_TREE if no simplification was possible, otherwise return the
13329 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13330 code of the function to be simplified. */
13333 fold_builtin_printf (location_t loc
, tree fndecl
, tree fmt
,
13334 tree arg
, bool ignore
,
13335 enum built_in_function fcode
)
13337 tree fn_putchar
, fn_puts
, newarg
, call
= NULL_TREE
;
13338 const char *fmt_str
= NULL
;
13340 /* If the return value is used, don't do the transformation. */
13344 /* Verify the required arguments in the original call. */
13345 if (!validate_arg (fmt
, POINTER_TYPE
))
13348 /* Check whether the format is a literal string constant. */
13349 fmt_str
= c_getstr (fmt
);
13350 if (fmt_str
== NULL
)
13353 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
13355 /* If we're using an unlocked function, assume the other
13356 unlocked functions exist explicitly. */
13357 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
13358 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
13362 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
13363 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
13366 if (!init_target_chars ())
13369 if (strcmp (fmt_str
, target_percent_s
) == 0
13370 || strchr (fmt_str
, target_percent
) == NULL
)
13374 if (strcmp (fmt_str
, target_percent_s
) == 0)
13376 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
13379 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
13382 str
= c_getstr (arg
);
13388 /* The format specifier doesn't contain any '%' characters. */
13389 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
13395 /* If the string was "", printf does nothing. */
13396 if (str
[0] == '\0')
13397 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
13399 /* If the string has length of 1, call putchar. */
13400 if (str
[1] == '\0')
13402 /* Given printf("c"), (where c is any one character,)
13403 convert "c"[0] to an int and pass that to the replacement
13405 newarg
= build_int_cst (integer_type_node
, str
[0]);
13407 call
= build_call_expr_loc (loc
, fn_putchar
, 1, newarg
);
13411 /* If the string was "string\n", call puts("string"). */
13412 size_t len
= strlen (str
);
13413 if ((unsigned char)str
[len
- 1] == target_newline
13414 && (size_t) (int) len
== len
13418 tree offset_node
, string_cst
;
13420 /* Create a NUL-terminated string that's one char shorter
13421 than the original, stripping off the trailing '\n'. */
13422 newarg
= build_string_literal (len
, str
);
13423 string_cst
= string_constant (newarg
, &offset_node
);
13424 gcc_checking_assert (string_cst
13425 && (TREE_STRING_LENGTH (string_cst
)
13427 && integer_zerop (offset_node
)
13429 TREE_STRING_POINTER (string_cst
)[len
- 1]
13430 == target_newline
);
13431 /* build_string_literal creates a new STRING_CST,
13432 modify it in place to avoid double copying. */
13433 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
13434 newstr
[len
- 1] = '\0';
13436 call
= build_call_expr_loc (loc
, fn_puts
, 1, newarg
);
13439 /* We'd like to arrange to call fputs(string,stdout) here,
13440 but we need stdout and don't have a way to get it yet. */
13445 /* The other optimizations can be done only on the non-va_list variants. */
13446 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
13449 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13450 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
13452 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
13455 call
= build_call_expr_loc (loc
, fn_puts
, 1, arg
);
13458 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13459 else if (strcmp (fmt_str
, target_percent_c
) == 0)
13461 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
13464 call
= build_call_expr_loc (loc
, fn_putchar
, 1, arg
);
13470 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), call
);
13473 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13474 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13475 more than 3 arguments, and ARG may be null in the 2-argument case.
13477 Return NULL_TREE if no simplification was possible, otherwise return the
13478 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13479 code of the function to be simplified. */
13482 fold_builtin_fprintf (location_t loc
, tree fndecl
, tree fp
,
13483 tree fmt
, tree arg
, bool ignore
,
13484 enum built_in_function fcode
)
13486 tree fn_fputc
, fn_fputs
, call
= NULL_TREE
;
13487 const char *fmt_str
= NULL
;
13489 /* If the return value is used, don't do the transformation. */
13493 /* Verify the required arguments in the original call. */
13494 if (!validate_arg (fp
, POINTER_TYPE
))
13496 if (!validate_arg (fmt
, POINTER_TYPE
))
13499 /* Check whether the format is a literal string constant. */
13500 fmt_str
= c_getstr (fmt
);
13501 if (fmt_str
== NULL
)
13504 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
13506 /* If we're using an unlocked function, assume the other
13507 unlocked functions exist explicitly. */
13508 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
13509 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
13513 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
13514 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
13517 if (!init_target_chars ())
13520 /* If the format doesn't contain % args or %%, use strcpy. */
13521 if (strchr (fmt_str
, target_percent
) == NULL
)
13523 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
13527 /* If the format specifier was "", fprintf does nothing. */
13528 if (fmt_str
[0] == '\0')
13530 /* If FP has side-effects, just wait until gimplification is
13532 if (TREE_SIDE_EFFECTS (fp
))
13535 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
13538 /* When "string" doesn't contain %, replace all cases of
13539 fprintf (fp, string) with fputs (string, fp). The fputs
13540 builtin will take care of special cases like length == 1. */
13542 call
= build_call_expr_loc (loc
, fn_fputs
, 2, fmt
, fp
);
13545 /* The other optimizations can be done only on the non-va_list variants. */
13546 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
13549 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13550 else if (strcmp (fmt_str
, target_percent_s
) == 0)
13552 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
13555 call
= build_call_expr_loc (loc
, fn_fputs
, 2, arg
, fp
);
13558 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13559 else if (strcmp (fmt_str
, target_percent_c
) == 0)
13561 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
13564 call
= build_call_expr_loc (loc
, fn_fputc
, 2, arg
, fp
);
13569 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), call
);
13572 /* Initialize format string characters in the target charset. */
13575 init_target_chars (void)
13580 target_newline
= lang_hooks
.to_target_charset ('\n');
13581 target_percent
= lang_hooks
.to_target_charset ('%');
13582 target_c
= lang_hooks
.to_target_charset ('c');
13583 target_s
= lang_hooks
.to_target_charset ('s');
13584 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
13588 target_percent_c
[0] = target_percent
;
13589 target_percent_c
[1] = target_c
;
13590 target_percent_c
[2] = '\0';
13592 target_percent_s
[0] = target_percent
;
13593 target_percent_s
[1] = target_s
;
13594 target_percent_s
[2] = '\0';
13596 target_percent_s_newline
[0] = target_percent
;
13597 target_percent_s_newline
[1] = target_s
;
13598 target_percent_s_newline
[2] = target_newline
;
13599 target_percent_s_newline
[3] = '\0';
13606 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13607 and no overflow/underflow occurred. INEXACT is true if M was not
13608 exactly calculated. TYPE is the tree type for the result. This
13609 function assumes that you cleared the MPFR flags and then
13610 calculated M to see if anything subsequently set a flag prior to
13611 entering this function. Return NULL_TREE if any checks fail. */
13614 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
13616 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13617 overflow/underflow occurred. If -frounding-math, proceed iff the
13618 result of calling FUNC was exact. */
13619 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13620 && (!flag_rounding_math
|| !inexact
))
13622 REAL_VALUE_TYPE rr
;
13624 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
13625 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13626 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13627 but the mpft_t is not, then we underflowed in the
13629 if (real_isfinite (&rr
)
13630 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
13632 REAL_VALUE_TYPE rmode
;
13634 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
13635 /* Proceed iff the specified mode can hold the value. */
13636 if (real_identical (&rmode
, &rr
))
13637 return build_real (type
, rmode
);
13643 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13644 number and no overflow/underflow occurred. INEXACT is true if M
13645 was not exactly calculated. TYPE is the tree type for the result.
13646 This function assumes that you cleared the MPFR flags and then
13647 calculated M to see if anything subsequently set a flag prior to
13648 entering this function. Return NULL_TREE if any checks fail, if
13649 FORCE_CONVERT is true, then bypass the checks. */
13652 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
13654 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13655 overflow/underflow occurred. If -frounding-math, proceed iff the
13656 result of calling FUNC was exact. */
13658 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
13659 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13660 && (!flag_rounding_math
|| !inexact
)))
13662 REAL_VALUE_TYPE re
, im
;
13664 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
13665 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
13666 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13667 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13668 but the mpft_t is not, then we underflowed in the
13671 || (real_isfinite (&re
) && real_isfinite (&im
)
13672 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
13673 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
13675 REAL_VALUE_TYPE re_mode
, im_mode
;
13677 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
13678 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
13679 /* Proceed iff the specified mode can hold the value. */
13681 || (real_identical (&re_mode
, &re
)
13682 && real_identical (&im_mode
, &im
)))
13683 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
13684 build_real (TREE_TYPE (type
), im_mode
));
13690 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13691 FUNC on it and return the resulting value as a tree with type TYPE.
13692 If MIN and/or MAX are not NULL, then the supplied ARG must be
13693 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13694 acceptable values, otherwise they are not. The mpfr precision is
13695 set to the precision of TYPE. We assume that function FUNC returns
13696 zero if the result could be calculated exactly within the requested
13700 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
13701 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
13704 tree result
= NULL_TREE
;
13708 /* To proceed, MPFR must exactly represent the target floating point
13709 format, which only happens when the target base equals two. */
13710 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13711 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
13713 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
13715 if (real_isfinite (ra
)
13716 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
13717 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
13719 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13720 const int prec
= fmt
->p
;
13721 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13725 mpfr_init2 (m
, prec
);
13726 mpfr_from_real (m
, ra
, GMP_RNDN
);
13727 mpfr_clear_flags ();
13728 inexact
= func (m
, m
, rnd
);
13729 result
= do_mpfr_ckconv (m
, type
, inexact
);
13737 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13738 FUNC on it and return the resulting value as a tree with type TYPE.
13739 The mpfr precision is set to the precision of TYPE. We assume that
13740 function FUNC returns zero if the result could be calculated
13741 exactly within the requested precision. */
13744 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
13745 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
13747 tree result
= NULL_TREE
;
13752 /* To proceed, MPFR must exactly represent the target floating point
13753 format, which only happens when the target base equals two. */
13754 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13755 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
13756 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
13758 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
13759 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
13761 if (real_isfinite (ra1
) && real_isfinite (ra2
))
13763 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13764 const int prec
= fmt
->p
;
13765 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13769 mpfr_inits2 (prec
, m1
, m2
, NULL
);
13770 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13771 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
13772 mpfr_clear_flags ();
13773 inexact
= func (m1
, m1
, m2
, rnd
);
13774 result
= do_mpfr_ckconv (m1
, type
, inexact
);
13775 mpfr_clears (m1
, m2
, NULL
);
13782 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13783 FUNC on it and return the resulting value as a tree with type TYPE.
13784 The mpfr precision is set to the precision of TYPE. We assume that
13785 function FUNC returns zero if the result could be calculated
13786 exactly within the requested precision. */
13789 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
13790 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
13792 tree result
= NULL_TREE
;
13798 /* To proceed, MPFR must exactly represent the target floating point
13799 format, which only happens when the target base equals two. */
13800 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13801 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
13802 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
13803 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
13805 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
13806 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
13807 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
13809 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
13811 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13812 const int prec
= fmt
->p
;
13813 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13817 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
13818 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13819 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
13820 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
13821 mpfr_clear_flags ();
13822 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
13823 result
= do_mpfr_ckconv (m1
, type
, inexact
);
13824 mpfr_clears (m1
, m2
, m3
, NULL
);
13831 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13832 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13833 If ARG_SINP and ARG_COSP are NULL then the result is returned
13834 as a complex value.
13835 The type is taken from the type of ARG and is used for setting the
13836 precision of the calculation and results. */
13839 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
13841 tree
const type
= TREE_TYPE (arg
);
13842 tree result
= NULL_TREE
;
13846 /* To proceed, MPFR must exactly represent the target floating point
13847 format, which only happens when the target base equals two. */
13848 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13849 && TREE_CODE (arg
) == REAL_CST
13850 && !TREE_OVERFLOW (arg
))
13852 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
13854 if (real_isfinite (ra
))
13856 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13857 const int prec
= fmt
->p
;
13858 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13859 tree result_s
, result_c
;
13863 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
13864 mpfr_from_real (m
, ra
, GMP_RNDN
);
13865 mpfr_clear_flags ();
13866 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
13867 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
13868 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
13869 mpfr_clears (m
, ms
, mc
, NULL
);
13870 if (result_s
&& result_c
)
13872 /* If we are to return in a complex value do so. */
13873 if (!arg_sinp
&& !arg_cosp
)
13874 return build_complex (build_complex_type (type
),
13875 result_c
, result_s
);
13877 /* Dereference the sin/cos pointer arguments. */
13878 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
13879 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
13880 /* Proceed if valid pointer type were passed in. */
13881 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
13882 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
13884 /* Set the values. */
13885 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
13887 TREE_SIDE_EFFECTS (result_s
) = 1;
13888 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
13890 TREE_SIDE_EFFECTS (result_c
) = 1;
13891 /* Combine the assignments into a compound expr. */
13892 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13893 result_s
, result_c
));
13901 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13902 two-argument mpfr order N Bessel function FUNC on them and return
13903 the resulting value as a tree with type TYPE. The mpfr precision
13904 is set to the precision of TYPE. We assume that function FUNC
13905 returns zero if the result could be calculated exactly within the
13906 requested precision. */
13908 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
13909 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
13910 const REAL_VALUE_TYPE
*min
, bool inclusive
)
13912 tree result
= NULL_TREE
;
13917 /* To proceed, MPFR must exactly represent the target floating point
13918 format, which only happens when the target base equals two. */
13919 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13920 && host_integerp (arg1
, 0)
13921 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
13923 const HOST_WIDE_INT n
= tree_low_cst(arg1
, 0);
13924 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
13927 && real_isfinite (ra
)
13928 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
13930 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13931 const int prec
= fmt
->p
;
13932 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13936 mpfr_init2 (m
, prec
);
13937 mpfr_from_real (m
, ra
, GMP_RNDN
);
13938 mpfr_clear_flags ();
13939 inexact
= func (m
, n
, m
, rnd
);
13940 result
= do_mpfr_ckconv (m
, type
, inexact
);
13948 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13949 the pointer *(ARG_QUO) and return the result. The type is taken
13950 from the type of ARG0 and is used for setting the precision of the
13951 calculation and results. */
13954 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
13956 tree
const type
= TREE_TYPE (arg0
);
13957 tree result
= NULL_TREE
;
13962 /* To proceed, MPFR must exactly represent the target floating point
13963 format, which only happens when the target base equals two. */
13964 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13965 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
13966 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
13968 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
13969 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
13971 if (real_isfinite (ra0
) && real_isfinite (ra1
))
13973 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13974 const int prec
= fmt
->p
;
13975 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13980 mpfr_inits2 (prec
, m0
, m1
, NULL
);
13981 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
13982 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13983 mpfr_clear_flags ();
13984 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
13985 /* Remquo is independent of the rounding mode, so pass
13986 inexact=0 to do_mpfr_ckconv(). */
13987 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
13988 mpfr_clears (m0
, m1
, NULL
);
13991 /* MPFR calculates quo in the host's long so it may
13992 return more bits in quo than the target int can hold
13993 if sizeof(host long) > sizeof(target int). This can
13994 happen even for native compilers in LP64 mode. In
13995 these cases, modulo the quo value with the largest
13996 number that the target int can hold while leaving one
13997 bit for the sign. */
13998 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
13999 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
14001 /* Dereference the quo pointer argument. */
14002 arg_quo
= build_fold_indirect_ref (arg_quo
);
14003 /* Proceed iff a valid pointer type was passed in. */
14004 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
14006 /* Set the value. */
14008 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
14009 build_int_cst (TREE_TYPE (arg_quo
),
14011 TREE_SIDE_EFFECTS (result_quo
) = 1;
14012 /* Combine the quo assignment with the rem. */
14013 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
14014 result_quo
, result_rem
));
14022 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
14023 resulting value as a tree with type TYPE. The mpfr precision is
14024 set to the precision of TYPE. We assume that this mpfr function
14025 returns zero if the result could be calculated exactly within the
14026 requested precision. In addition, the integer pointer represented
14027 by ARG_SG will be dereferenced and set to the appropriate signgam
14031 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
14033 tree result
= NULL_TREE
;
14037 /* To proceed, MPFR must exactly represent the target floating point
14038 format, which only happens when the target base equals two. Also
14039 verify ARG is a constant and that ARG_SG is an int pointer. */
14040 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
14041 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
14042 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
14043 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
14045 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
14047 /* In addition to NaN and Inf, the argument cannot be zero or a
14048 negative integer. */
14049 if (real_isfinite (ra
)
14050 && ra
->cl
!= rvc_zero
14051 && !(real_isneg(ra
) && real_isinteger(ra
, TYPE_MODE (type
))))
14053 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
14054 const int prec
= fmt
->p
;
14055 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
14060 mpfr_init2 (m
, prec
);
14061 mpfr_from_real (m
, ra
, GMP_RNDN
);
14062 mpfr_clear_flags ();
14063 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
14064 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
14070 /* Dereference the arg_sg pointer argument. */
14071 arg_sg
= build_fold_indirect_ref (arg_sg
);
14072 /* Assign the signgam value into *arg_sg. */
14073 result_sg
= fold_build2 (MODIFY_EXPR
,
14074 TREE_TYPE (arg_sg
), arg_sg
,
14075 build_int_cst (TREE_TYPE (arg_sg
), sg
));
14076 TREE_SIDE_EFFECTS (result_sg
) = 1;
14077 /* Combine the signgam assignment with the lgamma result. */
14078 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
14079 result_sg
, result_lg
));
14087 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
14088 function FUNC on it and return the resulting value as a tree with
14089 type TYPE. The mpfr precision is set to the precision of TYPE. We
14090 assume that function FUNC returns zero if the result could be
14091 calculated exactly within the requested precision. */
14094 do_mpc_arg1 (tree arg
, tree type
, int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
))
14096 tree result
= NULL_TREE
;
14100 /* To proceed, MPFR must exactly represent the target floating point
14101 format, which only happens when the target base equals two. */
14102 if (TREE_CODE (arg
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg
)
14103 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
14104 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg
))))->b
== 2)
14106 const REAL_VALUE_TYPE
*const re
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
14107 const REAL_VALUE_TYPE
*const im
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
14109 if (real_isfinite (re
) && real_isfinite (im
))
14111 const struct real_format
*const fmt
=
14112 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
14113 const int prec
= fmt
->p
;
14114 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
14115 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
14119 mpc_init2 (m
, prec
);
14120 mpfr_from_real (mpc_realref(m
), re
, rnd
);
14121 mpfr_from_real (mpc_imagref(m
), im
, rnd
);
14122 mpfr_clear_flags ();
14123 inexact
= func (m
, m
, crnd
);
14124 result
= do_mpc_ckconv (m
, type
, inexact
, /*force_convert=*/ 0);
14132 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14133 mpc function FUNC on it and return the resulting value as a tree
14134 with type TYPE. The mpfr precision is set to the precision of
14135 TYPE. We assume that function FUNC returns zero if the result
14136 could be calculated exactly within the requested precision. If
14137 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14138 in the arguments and/or results. */
14141 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
14142 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
14144 tree result
= NULL_TREE
;
14149 /* To proceed, MPFR must exactly represent the target floating point
14150 format, which only happens when the target base equals two. */
14151 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
14152 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
14153 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
14154 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
14155 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
14157 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
14158 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
14159 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
14160 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
14163 || (real_isfinite (re0
) && real_isfinite (im0
)
14164 && real_isfinite (re1
) && real_isfinite (im1
)))
14166 const struct real_format
*const fmt
=
14167 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
14168 const int prec
= fmt
->p
;
14169 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
14170 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
14174 mpc_init2 (m0
, prec
);
14175 mpc_init2 (m1
, prec
);
14176 mpfr_from_real (mpc_realref(m0
), re0
, rnd
);
14177 mpfr_from_real (mpc_imagref(m0
), im0
, rnd
);
14178 mpfr_from_real (mpc_realref(m1
), re1
, rnd
);
14179 mpfr_from_real (mpc_imagref(m1
), im1
, rnd
);
14180 mpfr_clear_flags ();
14181 inexact
= func (m0
, m0
, m1
, crnd
);
14182 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
14191 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14192 a normal call should be emitted rather than expanding the function
14193 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14196 gimple_fold_builtin_sprintf_chk (gimple stmt
, enum built_in_function fcode
)
14198 int nargs
= gimple_call_num_args (stmt
);
14200 return fold_builtin_sprintf_chk_1 (gimple_location (stmt
), nargs
,
14202 ? gimple_call_arg_ptr (stmt
, 0)
14203 : &error_mark_node
), fcode
);
14206 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14207 a normal call should be emitted rather than expanding the function
14208 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14209 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14210 passed as second argument. */
14213 gimple_fold_builtin_snprintf_chk (gimple stmt
, tree maxlen
,
14214 enum built_in_function fcode
)
14216 int nargs
= gimple_call_num_args (stmt
);
14218 return fold_builtin_snprintf_chk_1 (gimple_location (stmt
), nargs
,
14220 ? gimple_call_arg_ptr (stmt
, 0)
14221 : &error_mark_node
), maxlen
, fcode
);
14224 /* Builtins with folding operations that operate on "..." arguments
14225 need special handling; we need to store the arguments in a convenient
14226 data structure before attempting any folding. Fortunately there are
14227 only a few builtins that fall into this category. FNDECL is the
14228 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14229 result of the function call is ignored. */
14232 gimple_fold_builtin_varargs (tree fndecl
, gimple stmt
,
14233 bool ignore ATTRIBUTE_UNUSED
)
14235 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
14236 tree ret
= NULL_TREE
;
14240 case BUILT_IN_SPRINTF_CHK
:
14241 case BUILT_IN_VSPRINTF_CHK
:
14242 ret
= gimple_fold_builtin_sprintf_chk (stmt
, fcode
);
14245 case BUILT_IN_SNPRINTF_CHK
:
14246 case BUILT_IN_VSNPRINTF_CHK
:
14247 ret
= gimple_fold_builtin_snprintf_chk (stmt
, NULL_TREE
, fcode
);
14254 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
14255 TREE_NO_WARNING (ret
) = 1;
14261 /* A wrapper function for builtin folding that prevents warnings for
14262 "statement without effect" and the like, caused by removing the
14263 call node earlier than the warning is generated. */
14266 fold_call_stmt (gimple stmt
, bool ignore
)
14268 tree ret
= NULL_TREE
;
14269 tree fndecl
= gimple_call_fndecl (stmt
);
14270 location_t loc
= gimple_location (stmt
);
14272 && TREE_CODE (fndecl
) == FUNCTION_DECL
14273 && DECL_BUILT_IN (fndecl
)
14274 && !gimple_call_va_arg_pack_p (stmt
))
14276 int nargs
= gimple_call_num_args (stmt
);
14277 tree
*args
= (nargs
> 0
14278 ? gimple_call_arg_ptr (stmt
, 0)
14279 : &error_mark_node
);
14281 if (avoid_folding_inline_builtin (fndecl
))
14283 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
14285 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
14289 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
14290 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
14292 ret
= gimple_fold_builtin_varargs (fndecl
, stmt
, ignore
);
14295 /* Propagate location information from original call to
14296 expansion of builtin. Otherwise things like
14297 maybe_emit_chk_warning, that operate on the expansion
14298 of a builtin, will use the wrong location information. */
14299 if (gimple_has_location (stmt
))
14301 tree realret
= ret
;
14302 if (TREE_CODE (ret
) == NOP_EXPR
)
14303 realret
= TREE_OPERAND (ret
, 0);
14304 if (CAN_HAVE_LOCATION_P (realret
)
14305 && !EXPR_HAS_LOCATION (realret
))
14306 SET_EXPR_LOCATION (realret
, loc
);
14316 /* Look up the function in builtin_decl that corresponds to DECL
14317 and set ASMSPEC as its user assembler name. DECL must be a
14318 function decl that declares a builtin. */
14321 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
14324 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
14325 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
14328 builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
14329 set_user_assembler_name (builtin
, asmspec
);
14330 switch (DECL_FUNCTION_CODE (decl
))
14332 case BUILT_IN_MEMCPY
:
14333 init_block_move_fn (asmspec
);
14334 memcpy_libfunc
= set_user_assembler_libfunc ("memcpy", asmspec
);
14336 case BUILT_IN_MEMSET
:
14337 init_block_clear_fn (asmspec
);
14338 memset_libfunc
= set_user_assembler_libfunc ("memset", asmspec
);
14340 case BUILT_IN_MEMMOVE
:
14341 memmove_libfunc
= set_user_assembler_libfunc ("memmove", asmspec
);
14343 case BUILT_IN_MEMCMP
:
14344 memcmp_libfunc
= set_user_assembler_libfunc ("memcmp", asmspec
);
14346 case BUILT_IN_ABORT
:
14347 abort_libfunc
= set_user_assembler_libfunc ("abort", asmspec
);
14350 if (INT_TYPE_SIZE
< BITS_PER_WORD
)
14352 set_user_assembler_libfunc ("ffs", asmspec
);
14353 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
,
14354 MODE_INT
, 0), "ffs");
14362 /* Return true if DECL is a builtin that expands to a constant or similarly
14365 is_simple_builtin (tree decl
)
14367 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
14368 switch (DECL_FUNCTION_CODE (decl
))
14370 /* Builtins that expand to constants. */
14371 case BUILT_IN_CONSTANT_P
:
14372 case BUILT_IN_EXPECT
:
14373 case BUILT_IN_OBJECT_SIZE
:
14374 case BUILT_IN_UNREACHABLE
:
14375 /* Simple register moves or loads from stack. */
14376 case BUILT_IN_ASSUME_ALIGNED
:
14377 case BUILT_IN_RETURN_ADDRESS
:
14378 case BUILT_IN_EXTRACT_RETURN_ADDR
:
14379 case BUILT_IN_FROB_RETURN_ADDR
:
14380 case BUILT_IN_RETURN
:
14381 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
14382 case BUILT_IN_FRAME_ADDRESS
:
14383 case BUILT_IN_VA_END
:
14384 case BUILT_IN_STACK_SAVE
:
14385 case BUILT_IN_STACK_RESTORE
:
14386 /* Exception state returns or moves registers around. */
14387 case BUILT_IN_EH_FILTER
:
14388 case BUILT_IN_EH_POINTER
:
14389 case BUILT_IN_EH_COPY_VALUES
:
14399 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14400 most probably expanded inline into reasonably simple code. This is a
14401 superset of is_simple_builtin. */
14403 is_inexpensive_builtin (tree decl
)
14407 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
14409 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
14410 switch (DECL_FUNCTION_CODE (decl
))
14413 case BUILT_IN_ALLOCA
:
14414 case BUILT_IN_ALLOCA_WITH_ALIGN
:
14415 case BUILT_IN_BSWAP16
:
14416 case BUILT_IN_BSWAP32
:
14417 case BUILT_IN_BSWAP64
:
14419 case BUILT_IN_CLZIMAX
:
14420 case BUILT_IN_CLZL
:
14421 case BUILT_IN_CLZLL
:
14423 case BUILT_IN_CTZIMAX
:
14424 case BUILT_IN_CTZL
:
14425 case BUILT_IN_CTZLL
:
14427 case BUILT_IN_FFSIMAX
:
14428 case BUILT_IN_FFSL
:
14429 case BUILT_IN_FFSLL
:
14430 case BUILT_IN_IMAXABS
:
14431 case BUILT_IN_FINITE
:
14432 case BUILT_IN_FINITEF
:
14433 case BUILT_IN_FINITEL
:
14434 case BUILT_IN_FINITED32
:
14435 case BUILT_IN_FINITED64
:
14436 case BUILT_IN_FINITED128
:
14437 case BUILT_IN_FPCLASSIFY
:
14438 case BUILT_IN_ISFINITE
:
14439 case BUILT_IN_ISINF_SIGN
:
14440 case BUILT_IN_ISINF
:
14441 case BUILT_IN_ISINFF
:
14442 case BUILT_IN_ISINFL
:
14443 case BUILT_IN_ISINFD32
:
14444 case BUILT_IN_ISINFD64
:
14445 case BUILT_IN_ISINFD128
:
14446 case BUILT_IN_ISNAN
:
14447 case BUILT_IN_ISNANF
:
14448 case BUILT_IN_ISNANL
:
14449 case BUILT_IN_ISNAND32
:
14450 case BUILT_IN_ISNAND64
:
14451 case BUILT_IN_ISNAND128
:
14452 case BUILT_IN_ISNORMAL
:
14453 case BUILT_IN_ISGREATER
:
14454 case BUILT_IN_ISGREATEREQUAL
:
14455 case BUILT_IN_ISLESS
:
14456 case BUILT_IN_ISLESSEQUAL
:
14457 case BUILT_IN_ISLESSGREATER
:
14458 case BUILT_IN_ISUNORDERED
:
14459 case BUILT_IN_VA_ARG_PACK
:
14460 case BUILT_IN_VA_ARG_PACK_LEN
:
14461 case BUILT_IN_VA_COPY
:
14462 case BUILT_IN_TRAP
:
14463 case BUILT_IN_SAVEREGS
:
14464 case BUILT_IN_POPCOUNTL
:
14465 case BUILT_IN_POPCOUNTLL
:
14466 case BUILT_IN_POPCOUNTIMAX
:
14467 case BUILT_IN_POPCOUNT
:
14468 case BUILT_IN_PARITYL
:
14469 case BUILT_IN_PARITYLL
:
14470 case BUILT_IN_PARITYIMAX
:
14471 case BUILT_IN_PARITY
:
14472 case BUILT_IN_LABS
:
14473 case BUILT_IN_LLABS
:
14474 case BUILT_IN_PREFETCH
:
14478 return is_simple_builtin (decl
);