1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
31 #include "tree-object-size.h"
33 #include "basic-block.h"
34 #include "tree-ssa-alias.h"
35 #include "internal-fn.h"
36 #include "gimple-expr.h"
41 #include "hard-reg-set.h"
44 #include "insn-config.h"
50 #include "typeclass.h"
54 #include "langhooks.h"
55 #include "tree-ssanames.h"
57 #include "value-prof.h"
58 #include "diagnostic-core.h"
64 static tree
do_mpc_arg1 (tree
, tree
, int (*)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
));
66 struct target_builtins default_target_builtins
;
68 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
71 /* Define the names of the builtin function types and codes. */
72 const char *const built_in_class_names
[BUILT_IN_LAST
]
73 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
75 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
76 const char * built_in_names
[(int) END_BUILTINS
] =
78 #include "builtins.def"
82 /* Setup an array of _DECL trees, make sure each element is
83 initialized to NULL_TREE. */
84 builtin_info_type builtin_info
;
86 /* Non-zero if __builtin_constant_p should be folded right away. */
87 bool force_folding_builtin_constant_p
;
89 static const char *c_getstr (tree
);
90 static rtx
c_readstr (const char *, enum machine_mode
);
91 static int target_char_cast (tree
, char *);
92 static rtx
get_memory_rtx (tree
, tree
);
93 static int apply_args_size (void);
94 static int apply_result_size (void);
95 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
96 static rtx
result_vector (int, rtx
);
98 static void expand_builtin_update_setjmp_buf (rtx
);
99 static void expand_builtin_prefetch (tree
);
100 static rtx
expand_builtin_apply_args (void);
101 static rtx
expand_builtin_apply_args_1 (void);
102 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
103 static void expand_builtin_return (rtx
);
104 static enum type_class
type_to_class (tree
);
105 static rtx
expand_builtin_classify_type (tree
);
106 static void expand_errno_check (tree
, rtx
);
107 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
108 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
109 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
110 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
111 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
112 static rtx
expand_builtin_sincos (tree
);
113 static rtx
expand_builtin_cexpi (tree
, rtx
);
114 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
115 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
116 static rtx
expand_builtin_next_arg (void);
117 static rtx
expand_builtin_va_start (tree
);
118 static rtx
expand_builtin_va_end (tree
);
119 static rtx
expand_builtin_va_copy (tree
);
120 static rtx
expand_builtin_memcmp (tree
, rtx
, enum machine_mode
);
121 static rtx
expand_builtin_strcmp (tree
, rtx
);
122 static rtx
expand_builtin_strncmp (tree
, rtx
, enum machine_mode
);
123 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
124 static rtx
expand_builtin_memcpy (tree
, rtx
);
125 static rtx
expand_builtin_mempcpy (tree
, rtx
, enum machine_mode
);
126 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
127 enum machine_mode
, int);
128 static rtx
expand_builtin_strcpy (tree
, rtx
);
129 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
130 static rtx
expand_builtin_stpcpy (tree
, rtx
, enum machine_mode
);
131 static rtx
expand_builtin_strncpy (tree
, rtx
);
132 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, enum machine_mode
);
133 static rtx
expand_builtin_memset (tree
, rtx
, enum machine_mode
);
134 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, enum machine_mode
, tree
);
135 static rtx
expand_builtin_bzero (tree
);
136 static rtx
expand_builtin_strlen (tree
, rtx
, enum machine_mode
);
137 static rtx
expand_builtin_alloca (tree
, bool);
138 static rtx
expand_builtin_unop (enum machine_mode
, tree
, rtx
, rtx
, optab
);
139 static rtx
expand_builtin_frame_address (tree
, tree
);
140 static tree
stabilize_va_list_loc (location_t
, tree
, int);
141 static rtx
expand_builtin_expect (tree
, rtx
);
142 static tree
fold_builtin_constant_p (tree
);
143 static tree
fold_builtin_expect (location_t
, tree
, tree
);
144 static tree
fold_builtin_classify_type (tree
);
145 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
146 static tree
fold_builtin_inf (location_t
, tree
, int);
147 static tree
fold_builtin_nan (tree
, tree
, int);
148 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
149 static bool validate_arg (const_tree
, enum tree_code code
);
150 static bool integer_valued_real_p (tree
);
151 static tree
fold_trunc_transparent_mathfn (location_t
, tree
, tree
);
152 static bool readonly_data_expr (tree
);
153 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
154 static rtx
expand_builtin_signbit (tree
, rtx
);
155 static tree
fold_builtin_sqrt (location_t
, tree
, tree
);
156 static tree
fold_builtin_cbrt (location_t
, tree
, tree
);
157 static tree
fold_builtin_pow (location_t
, tree
, tree
, tree
, tree
);
158 static tree
fold_builtin_powi (location_t
, tree
, tree
, tree
, tree
);
159 static tree
fold_builtin_cos (location_t
, tree
, tree
, tree
);
160 static tree
fold_builtin_cosh (location_t
, tree
, tree
, tree
);
161 static tree
fold_builtin_tan (tree
, tree
);
162 static tree
fold_builtin_trunc (location_t
, tree
, tree
);
163 static tree
fold_builtin_floor (location_t
, tree
, tree
);
164 static tree
fold_builtin_ceil (location_t
, tree
, tree
);
165 static tree
fold_builtin_round (location_t
, tree
, tree
);
166 static tree
fold_builtin_int_roundingfn (location_t
, tree
, tree
);
167 static tree
fold_builtin_bitop (tree
, tree
);
168 static tree
fold_builtin_memory_op (location_t
, tree
, tree
, tree
, tree
, bool, int);
169 static tree
fold_builtin_strchr (location_t
, tree
, tree
, tree
);
170 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
171 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
172 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
173 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
174 static tree
fold_builtin_signbit (location_t
, tree
, tree
);
175 static tree
fold_builtin_copysign (location_t
, tree
, tree
, tree
, tree
);
176 static tree
fold_builtin_isascii (location_t
, tree
);
177 static tree
fold_builtin_toascii (location_t
, tree
);
178 static tree
fold_builtin_isdigit (location_t
, tree
);
179 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
180 static tree
fold_builtin_abs (location_t
, tree
, tree
);
181 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
183 static tree
fold_builtin_n (location_t
, tree
, tree
*, int, bool);
184 static tree
fold_builtin_0 (location_t
, tree
, bool);
185 static tree
fold_builtin_1 (location_t
, tree
, tree
, bool);
186 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
, bool);
187 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
, bool);
188 static tree
fold_builtin_4 (location_t
, tree
, tree
, tree
, tree
, tree
, bool);
189 static tree
fold_builtin_varargs (location_t
, tree
, tree
, bool);
191 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
192 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
193 static tree
fold_builtin_strrchr (location_t
, tree
, tree
, tree
);
194 static tree
fold_builtin_strncat (location_t
, tree
, tree
, tree
);
195 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
196 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
197 static tree
fold_builtin_sprintf (location_t
, tree
, tree
, tree
, int);
198 static tree
fold_builtin_snprintf (location_t
, tree
, tree
, tree
, tree
, int);
200 static rtx
expand_builtin_object_size (tree
);
201 static rtx
expand_builtin_memory_chk (tree
, rtx
, enum machine_mode
,
202 enum built_in_function
);
203 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
204 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
205 static void maybe_emit_free_warning (tree
);
206 static tree
fold_builtin_object_size (tree
, tree
);
207 static tree
fold_builtin_strcat_chk (location_t
, tree
, tree
, tree
, tree
);
208 static tree
fold_builtin_strncat_chk (location_t
, tree
, tree
, tree
, tree
, tree
);
209 static tree
fold_builtin_sprintf_chk (location_t
, tree
, enum built_in_function
);
210 static tree
fold_builtin_printf (location_t
, tree
, tree
, tree
, bool, enum built_in_function
);
211 static tree
fold_builtin_fprintf (location_t
, tree
, tree
, tree
, tree
, bool,
212 enum built_in_function
);
213 static bool init_target_chars (void);
215 static unsigned HOST_WIDE_INT target_newline
;
216 static unsigned HOST_WIDE_INT target_percent
;
217 static unsigned HOST_WIDE_INT target_c
;
218 static unsigned HOST_WIDE_INT target_s
;
219 static char target_percent_c
[3];
220 static char target_percent_s
[3];
221 static char target_percent_s_newline
[4];
222 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
223 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
224 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
225 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
226 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
227 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
228 static tree
do_mpfr_sincos (tree
, tree
, tree
);
229 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
230 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
231 const REAL_VALUE_TYPE
*, bool);
232 static tree
do_mpfr_remquo (tree
, tree
, tree
);
233 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
234 static void expand_builtin_sync_synchronize (void);
236 /* Return true if NAME starts with __builtin_ or __sync_. */
239 is_builtin_name (const char *name
)
241 if (strncmp (name
, "__builtin_", 10) == 0)
243 if (strncmp (name
, "__sync_", 7) == 0)
245 if (strncmp (name
, "__atomic_", 9) == 0)
248 && (!strcmp (name
, "__cilkrts_detach")
249 || !strcmp (name
, "__cilkrts_pop_frame")))
255 /* Return true if DECL is a function symbol representing a built-in. */
258 is_builtin_fn (tree decl
)
260 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
263 /* By default we assume that c99 functions are present at the runtime,
264 but sincos is not. */
266 default_libc_has_function (enum function_class fn_class
)
268 if (fn_class
== function_c94
269 || fn_class
== function_c99_misc
270 || fn_class
== function_c99_math_complex
)
277 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED
)
283 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED
)
288 /* Return true if NODE should be considered for inline expansion regardless
289 of the optimization level. This means whenever a function is invoked with
290 its "internal" name, which normally contains the prefix "__builtin". */
293 called_as_built_in (tree node
)
295 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
296 we want the name used to call the function, not the name it
298 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
299 return is_builtin_name (name
);
302 /* Compute values M and N such that M divides (address of EXP - N) and such
303 that N < M. If these numbers can be determined, store M in alignp and N in
304 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
305 *alignp and any bit-offset to *bitposp.
307 Note that the address (and thus the alignment) computed here is based
308 on the address to which a symbol resolves, whereas DECL_ALIGN is based
309 on the address at which an object is actually located. These two
310 addresses are not always the same. For example, on ARM targets,
311 the address &foo of a Thumb function foo() has the lowest bit set,
312 whereas foo() itself starts on an even address.
314 If ADDR_P is true we are taking the address of the memory reference EXP
315 and thus cannot rely on the access taking place. */
318 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
319 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
321 HOST_WIDE_INT bitsize
, bitpos
;
323 enum machine_mode mode
;
324 int unsignedp
, volatilep
;
325 unsigned int align
= BITS_PER_UNIT
;
326 bool known_alignment
= false;
328 /* Get the innermost object and the constant (bitpos) and possibly
329 variable (offset) offset of the access. */
330 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
331 &mode
, &unsignedp
, &volatilep
, true);
333 /* Extract alignment information from the innermost object and
334 possibly adjust bitpos and offset. */
335 if (TREE_CODE (exp
) == FUNCTION_DECL
)
337 /* Function addresses can encode extra information besides their
338 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
339 allows the low bit to be used as a virtual bit, we know
340 that the address itself must be at least 2-byte aligned. */
341 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
342 align
= 2 * BITS_PER_UNIT
;
344 else if (TREE_CODE (exp
) == LABEL_DECL
)
346 else if (TREE_CODE (exp
) == CONST_DECL
)
348 /* The alignment of a CONST_DECL is determined by its initializer. */
349 exp
= DECL_INITIAL (exp
);
350 align
= TYPE_ALIGN (TREE_TYPE (exp
));
351 #ifdef CONSTANT_ALIGNMENT
352 if (CONSTANT_CLASS_P (exp
))
353 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
355 known_alignment
= true;
357 else if (DECL_P (exp
))
359 align
= DECL_ALIGN (exp
);
360 known_alignment
= true;
362 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
364 align
= TYPE_ALIGN (TREE_TYPE (exp
));
366 else if (TREE_CODE (exp
) == INDIRECT_REF
367 || TREE_CODE (exp
) == MEM_REF
368 || TREE_CODE (exp
) == TARGET_MEM_REF
)
370 tree addr
= TREE_OPERAND (exp
, 0);
372 unsigned HOST_WIDE_INT ptr_bitpos
;
374 if (TREE_CODE (addr
) == BIT_AND_EXPR
375 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
377 align
= (TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1))
378 & -TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1)));
379 align
*= BITS_PER_UNIT
;
380 addr
= TREE_OPERAND (addr
, 0);
384 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
385 align
= MAX (ptr_align
, align
);
387 /* The alignment of the pointer operand in a TARGET_MEM_REF
388 has to take the variable offset parts into account. */
389 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
393 unsigned HOST_WIDE_INT step
= 1;
395 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
396 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
398 if (TMR_INDEX2 (exp
))
399 align
= BITS_PER_UNIT
;
400 known_alignment
= false;
403 /* When EXP is an actual memory reference then we can use
404 TYPE_ALIGN of a pointer indirection to derive alignment.
405 Do so only if get_pointer_alignment_1 did not reveal absolute
406 alignment knowledge and if using that alignment would
407 improve the situation. */
408 if (!addr_p
&& !known_alignment
409 && TYPE_ALIGN (TREE_TYPE (exp
)) > align
)
410 align
= TYPE_ALIGN (TREE_TYPE (exp
));
413 /* Else adjust bitpos accordingly. */
414 bitpos
+= ptr_bitpos
;
415 if (TREE_CODE (exp
) == MEM_REF
416 || TREE_CODE (exp
) == TARGET_MEM_REF
)
417 bitpos
+= mem_ref_offset (exp
).low
* BITS_PER_UNIT
;
420 else if (TREE_CODE (exp
) == STRING_CST
)
422 /* STRING_CST are the only constant objects we allow to be not
423 wrapped inside a CONST_DECL. */
424 align
= TYPE_ALIGN (TREE_TYPE (exp
));
425 #ifdef CONSTANT_ALIGNMENT
426 if (CONSTANT_CLASS_P (exp
))
427 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
429 known_alignment
= true;
432 /* If there is a non-constant offset part extract the maximum
433 alignment that can prevail. */
436 unsigned int trailing_zeros
= tree_ctz (offset
);
437 if (trailing_zeros
< HOST_BITS_PER_INT
)
439 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
441 align
= MIN (align
, inner
);
446 *bitposp
= bitpos
& (*alignp
- 1);
447 return known_alignment
;
450 /* For a memory reference expression EXP compute values M and N such that M
451 divides (&EXP - N) and such that N < M. If these numbers can be determined,
452 store M in alignp and N in *BITPOSP and return true. Otherwise return false
453 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
456 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
457 unsigned HOST_WIDE_INT
*bitposp
)
459 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
462 /* Return the alignment in bits of EXP, an object. */
465 get_object_alignment (tree exp
)
467 unsigned HOST_WIDE_INT bitpos
= 0;
470 get_object_alignment_1 (exp
, &align
, &bitpos
);
472 /* align and bitpos now specify known low bits of the pointer.
473 ptr & (align - 1) == bitpos. */
476 align
= (bitpos
& -bitpos
);
480 /* For a pointer valued expression EXP compute values M and N such that M
481 divides (EXP - N) and such that N < M. If these numbers can be determined,
482 store M in alignp and N in *BITPOSP and return true. Return false if
483 the results are just a conservative approximation.
485 If EXP is not a pointer, false is returned too. */
488 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
489 unsigned HOST_WIDE_INT
*bitposp
)
493 if (TREE_CODE (exp
) == ADDR_EXPR
)
494 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
495 alignp
, bitposp
, true);
496 else if (TREE_CODE (exp
) == SSA_NAME
497 && POINTER_TYPE_P (TREE_TYPE (exp
)))
499 unsigned int ptr_align
, ptr_misalign
;
500 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
502 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
504 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
505 *alignp
= ptr_align
* BITS_PER_UNIT
;
506 /* We cannot really tell whether this result is an approximation. */
512 *alignp
= BITS_PER_UNIT
;
516 else if (TREE_CODE (exp
) == INTEGER_CST
)
518 *alignp
= BIGGEST_ALIGNMENT
;
519 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
520 & (BIGGEST_ALIGNMENT
- 1));
525 *alignp
= BITS_PER_UNIT
;
529 /* Return the alignment in bits of EXP, a pointer valued expression.
530 The alignment returned is, by default, the alignment of the thing that
531 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
533 Otherwise, look at the expression to see if we can do better, i.e., if the
534 expression is actually pointing at an object whose alignment is tighter. */
537 get_pointer_alignment (tree exp
)
539 unsigned HOST_WIDE_INT bitpos
= 0;
542 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
544 /* align and bitpos now specify known low bits of the pointer.
545 ptr & (align - 1) == bitpos. */
548 align
= (bitpos
& -bitpos
);
553 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
554 way, because it could contain a zero byte in the middle.
555 TREE_STRING_LENGTH is the size of the character array, not the string.
557 ONLY_VALUE should be nonzero if the result is not going to be emitted
558 into the instruction stream and zero if it is going to be expanded.
559 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
560 is returned, otherwise NULL, since
561 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
562 evaluate the side-effects.
564 The value returned is of type `ssizetype'.
566 Unfortunately, string_constant can't access the values of const char
567 arrays with initializers, so neither can we do so here. */
570 c_strlen (tree src
, int only_value
)
573 HOST_WIDE_INT offset
;
579 if (TREE_CODE (src
) == COND_EXPR
580 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
584 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
585 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
586 if (tree_int_cst_equal (len1
, len2
))
590 if (TREE_CODE (src
) == COMPOUND_EXPR
591 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
592 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
594 loc
= EXPR_LOC_OR_LOC (src
, input_location
);
596 src
= string_constant (src
, &offset_node
);
600 max
= TREE_STRING_LENGTH (src
) - 1;
601 ptr
= TREE_STRING_POINTER (src
);
603 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
605 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
606 compute the offset to the following null if we don't know where to
607 start searching for it. */
610 for (i
= 0; i
< max
; i
++)
614 /* We don't know the starting offset, but we do know that the string
615 has no internal zero bytes. We can assume that the offset falls
616 within the bounds of the string; otherwise, the programmer deserves
617 what he gets. Subtract the offset from the length of the string,
618 and return that. This would perhaps not be valid if we were dealing
619 with named arrays in addition to literal string constants. */
621 return size_diffop_loc (loc
, size_int (max
), offset_node
);
624 /* We have a known offset into the string. Start searching there for
625 a null character if we can represent it as a single HOST_WIDE_INT. */
626 if (offset_node
== 0)
628 else if (! tree_fits_shwi_p (offset_node
))
631 offset
= tree_to_shwi (offset_node
);
633 /* If the offset is known to be out of bounds, warn, and call strlen at
635 if (offset
< 0 || offset
> max
)
637 /* Suppress multiple warnings for propagated constant strings. */
638 if (! TREE_NO_WARNING (src
))
640 warning_at (loc
, 0, "offset outside bounds of constant string");
641 TREE_NO_WARNING (src
) = 1;
646 /* Use strlen to search for the first zero byte. Since any strings
647 constructed with build_string will have nulls appended, we win even
648 if we get handed something like (char[4])"abcd".
650 Since OFFSET is our starting index into the string, no further
651 calculation is needed. */
652 return ssize_int (strlen (ptr
+ offset
));
655 /* Return a char pointer for a C string if it is a string constant
656 or sum of string constant and integer constant. */
663 src
= string_constant (src
, &offset_node
);
667 if (offset_node
== 0)
668 return TREE_STRING_POINTER (src
);
669 else if (!tree_fits_uhwi_p (offset_node
)
670 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
673 return TREE_STRING_POINTER (src
) + tree_to_uhwi (offset_node
);
676 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
677 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
680 c_readstr (const char *str
, enum machine_mode mode
)
686 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
691 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
694 if (WORDS_BIG_ENDIAN
)
695 j
= GET_MODE_SIZE (mode
) - i
- 1;
696 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
697 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
698 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
700 gcc_assert (j
< HOST_BITS_PER_DOUBLE_INT
);
703 ch
= (unsigned char) str
[i
];
704 c
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
706 return immed_double_const (c
[0], c
[1], mode
);
709 /* Cast a target constant CST to target CHAR and if that value fits into
710 host char type, return zero and put that value into variable pointed to by
714 target_char_cast (tree cst
, char *p
)
716 unsigned HOST_WIDE_INT val
, hostval
;
718 if (TREE_CODE (cst
) != INTEGER_CST
719 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
722 val
= TREE_INT_CST_LOW (cst
);
723 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
724 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
727 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
728 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
737 /* Similar to save_expr, but assumes that arbitrary code is not executed
738 in between the multiple evaluations. In particular, we assume that a
739 non-addressable local variable will not be modified. */
742 builtin_save_expr (tree exp
)
744 if (TREE_CODE (exp
) == SSA_NAME
745 || (TREE_ADDRESSABLE (exp
) == 0
746 && (TREE_CODE (exp
) == PARM_DECL
747 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
)))))
750 return save_expr (exp
);
753 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
754 times to get the address of either a higher stack frame, or a return
755 address located within it (depending on FNDECL_CODE). */
758 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
762 #ifdef INITIAL_FRAME_ADDRESS_RTX
763 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
767 /* For a zero count with __builtin_return_address, we don't care what
768 frame address we return, because target-specific definitions will
769 override us. Therefore frame pointer elimination is OK, and using
770 the soft frame pointer is OK.
772 For a nonzero count, or a zero count with __builtin_frame_address,
773 we require a stable offset from the current frame pointer to the
774 previous one, so we must use the hard frame pointer, and
775 we must disable frame pointer elimination. */
776 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
777 tem
= frame_pointer_rtx
;
780 tem
= hard_frame_pointer_rtx
;
782 /* Tell reload not to eliminate the frame pointer. */
783 crtl
->accesses_prior_frames
= 1;
787 /* Some machines need special handling before we can access
788 arbitrary frames. For example, on the SPARC, we must first flush
789 all register windows to the stack. */
790 #ifdef SETUP_FRAME_ADDRESSES
792 SETUP_FRAME_ADDRESSES ();
795 /* On the SPARC, the return address is not in the frame, it is in a
796 register. There is no way to access it off of the current frame
797 pointer, but it can be accessed off the previous frame pointer by
798 reading the value from the register window save area. */
799 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
800 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
804 /* Scan back COUNT frames to the specified frame. */
805 for (i
= 0; i
< count
; i
++)
807 /* Assume the dynamic chain pointer is in the word that the
808 frame address points to, unless otherwise specified. */
809 #ifdef DYNAMIC_CHAIN_ADDRESS
810 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
812 tem
= memory_address (Pmode
, tem
);
813 tem
= gen_frame_mem (Pmode
, tem
);
814 tem
= copy_to_reg (tem
);
817 /* For __builtin_frame_address, return what we've got. But, on
818 the SPARC for example, we may have to add a bias. */
819 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
820 #ifdef FRAME_ADDR_RTX
821 return FRAME_ADDR_RTX (tem
);
826 /* For __builtin_return_address, get the return address from that frame. */
827 #ifdef RETURN_ADDR_RTX
828 tem
= RETURN_ADDR_RTX (count
, tem
);
830 tem
= memory_address (Pmode
,
831 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
832 tem
= gen_frame_mem (Pmode
, tem
);
837 /* Alias set used for setjmp buffer. */
838 static alias_set_type setjmp_alias_set
= -1;
840 /* Construct the leading half of a __builtin_setjmp call. Control will
841 return to RECEIVER_LABEL. This is also called directly by the SJLJ
842 exception handling code. */
845 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
847 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
851 if (setjmp_alias_set
== -1)
852 setjmp_alias_set
= new_alias_set ();
854 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
856 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
858 /* We store the frame pointer and the address of receiver_label in
859 the buffer and use the rest of it for the stack save area, which
860 is machine-dependent. */
862 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
863 set_mem_alias_set (mem
, setjmp_alias_set
);
864 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
866 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
867 GET_MODE_SIZE (Pmode
))),
868 set_mem_alias_set (mem
, setjmp_alias_set
);
870 emit_move_insn (validize_mem (mem
),
871 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
873 stack_save
= gen_rtx_MEM (sa_mode
,
874 plus_constant (Pmode
, buf_addr
,
875 2 * GET_MODE_SIZE (Pmode
)));
876 set_mem_alias_set (stack_save
, setjmp_alias_set
);
877 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
879 /* If there is further processing to do, do it. */
880 #ifdef HAVE_builtin_setjmp_setup
881 if (HAVE_builtin_setjmp_setup
)
882 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
885 /* We have a nonlocal label. */
886 cfun
->has_nonlocal_label
= 1;
889 /* Construct the trailing part of a __builtin_setjmp call. This is
890 also called directly by the SJLJ exception handling code.
891 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
894 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
898 /* Mark the FP as used when we get here, so we have to make sure it's
899 marked as used by this function. */
900 emit_use (hard_frame_pointer_rtx
);
902 /* Mark the static chain as clobbered here so life information
903 doesn't get messed up for it. */
904 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
905 if (chain
&& REG_P (chain
))
906 emit_clobber (chain
);
908 /* Now put in the code to restore the frame pointer, and argument
909 pointer, if needed. */
910 #ifdef HAVE_nonlocal_goto
911 if (! HAVE_nonlocal_goto
)
914 /* First adjust our frame pointer to its actual value. It was
915 previously set to the start of the virtual area corresponding to
916 the stacked variables when we branched here and now needs to be
917 adjusted to the actual hardware fp value.
919 Assignments to virtual registers are converted by
920 instantiate_virtual_regs into the corresponding assignment
921 to the underlying register (fp in this case) that makes
922 the original assignment true.
923 So the following insn will actually be decrementing fp by
924 STARTING_FRAME_OFFSET. */
925 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
927 /* Restoring the frame pointer also modifies the hard frame pointer.
928 Mark it used (so that the previous assignment remains live once
929 the frame pointer is eliminated) and clobbered (to represent the
930 implicit update from the assignment). */
931 emit_use (hard_frame_pointer_rtx
);
932 emit_clobber (hard_frame_pointer_rtx
);
935 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
936 if (fixed_regs
[ARG_POINTER_REGNUM
])
938 #ifdef ELIMINABLE_REGS
939 /* If the argument pointer can be eliminated in favor of the
940 frame pointer, we don't need to restore it. We assume here
941 that if such an elimination is present, it can always be used.
942 This is the case on all known machines; if we don't make this
943 assumption, we do unnecessary saving on many machines. */
945 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
947 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
948 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
949 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
952 if (i
== ARRAY_SIZE (elim_regs
))
955 /* Now restore our arg pointer from the address at which it
956 was saved in our stack frame. */
957 emit_move_insn (crtl
->args
.internal_arg_pointer
,
958 copy_to_reg (get_arg_pointer_save_area ()));
963 #ifdef HAVE_builtin_setjmp_receiver
964 if (receiver_label
!= NULL
&& HAVE_builtin_setjmp_receiver
)
965 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
968 #ifdef HAVE_nonlocal_goto_receiver
969 if (HAVE_nonlocal_goto_receiver
)
970 emit_insn (gen_nonlocal_goto_receiver ());
975 /* We must not allow the code we just generated to be reordered by
976 scheduling. Specifically, the update of the frame pointer must
977 happen immediately, not later. */
978 emit_insn (gen_blockage ());
981 /* __builtin_longjmp is passed a pointer to an array of five words (not
982 all will be used on all machines). It operates similarly to the C
983 library function of the same name, but is more efficient. Much of
984 the code below is copied from the handling of non-local gotos. */
987 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
989 rtx fp
, lab
, stack
, insn
, last
;
990 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
992 /* DRAP is needed for stack realign if longjmp is expanded to current
994 if (SUPPORTS_STACK_ALIGNMENT
)
995 crtl
->need_drap
= true;
997 if (setjmp_alias_set
== -1)
998 setjmp_alias_set
= new_alias_set ();
1000 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1002 buf_addr
= force_reg (Pmode
, buf_addr
);
1004 /* We require that the user must pass a second argument of 1, because
1005 that is what builtin_setjmp will return. */
1006 gcc_assert (value
== const1_rtx
);
1008 last
= get_last_insn ();
1009 #ifdef HAVE_builtin_longjmp
1010 if (HAVE_builtin_longjmp
)
1011 emit_insn (gen_builtin_longjmp (buf_addr
));
1015 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
1016 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
1017 GET_MODE_SIZE (Pmode
)));
1019 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
1020 2 * GET_MODE_SIZE (Pmode
)));
1021 set_mem_alias_set (fp
, setjmp_alias_set
);
1022 set_mem_alias_set (lab
, setjmp_alias_set
);
1023 set_mem_alias_set (stack
, setjmp_alias_set
);
1025 /* Pick up FP, label, and SP from the block and jump. This code is
1026 from expand_goto in stmt.c; see there for detailed comments. */
1027 #ifdef HAVE_nonlocal_goto
1028 if (HAVE_nonlocal_goto
)
1029 /* We have to pass a value to the nonlocal_goto pattern that will
1030 get copied into the static_chain pointer, but it does not matter
1031 what that value is, because builtin_setjmp does not use it. */
1032 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
1036 lab
= copy_to_reg (lab
);
1038 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1039 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1041 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1042 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1044 emit_use (hard_frame_pointer_rtx
);
1045 emit_use (stack_pointer_rtx
);
1046 emit_indirect_jump (lab
);
1050 /* Search backwards and mark the jump insn as a non-local goto.
1051 Note that this precludes the use of __builtin_longjmp to a
1052 __builtin_setjmp target in the same function. However, we've
1053 already cautioned the user that these functions are for
1054 internal exception handling use only. */
1055 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1057 gcc_assert (insn
!= last
);
1061 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1064 else if (CALL_P (insn
))
1070 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1072 return (iter
->i
< iter
->n
);
1075 /* This function validates the types of a function call argument list
1076 against a specified list of tree_codes. If the last specifier is a 0,
1077 that represents an ellipses, otherwise the last specifier must be a
1081 validate_arglist (const_tree callexpr
, ...)
1083 enum tree_code code
;
1086 const_call_expr_arg_iterator iter
;
1089 va_start (ap
, callexpr
);
1090 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1094 code
= (enum tree_code
) va_arg (ap
, int);
1098 /* This signifies an ellipses, any further arguments are all ok. */
1102 /* This signifies an endlink, if no arguments remain, return
1103 true, otherwise return false. */
1104 res
= !more_const_call_expr_args_p (&iter
);
1107 /* If no parameters remain or the parameter's code does not
1108 match the specified code, return false. Otherwise continue
1109 checking any remaining arguments. */
1110 arg
= next_const_call_expr_arg (&iter
);
1111 if (!validate_arg (arg
, code
))
1118 /* We need gotos here since we can only have one VA_CLOSE in a
1126 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1127 and the address of the save area. */
1130 expand_builtin_nonlocal_goto (tree exp
)
1132 tree t_label
, t_save_area
;
1133 rtx r_label
, r_save_area
, r_fp
, r_sp
, insn
;
1135 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1138 t_label
= CALL_EXPR_ARG (exp
, 0);
1139 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1141 r_label
= expand_normal (t_label
);
1142 r_label
= convert_memory_address (Pmode
, r_label
);
1143 r_save_area
= expand_normal (t_save_area
);
1144 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1145 /* Copy the address of the save location to a register just in case it was
1146 based on the frame pointer. */
1147 r_save_area
= copy_to_reg (r_save_area
);
1148 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1149 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1150 plus_constant (Pmode
, r_save_area
,
1151 GET_MODE_SIZE (Pmode
)));
1153 crtl
->has_nonlocal_goto
= 1;
1155 #ifdef HAVE_nonlocal_goto
1156 /* ??? We no longer need to pass the static chain value, afaik. */
1157 if (HAVE_nonlocal_goto
)
1158 emit_insn (gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1162 r_label
= copy_to_reg (r_label
);
1164 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1165 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1167 /* Restore frame pointer for containing function. */
1168 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1169 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1171 /* USE of hard_frame_pointer_rtx added for consistency;
1172 not clear if really needed. */
1173 emit_use (hard_frame_pointer_rtx
);
1174 emit_use (stack_pointer_rtx
);
1176 /* If the architecture is using a GP register, we must
1177 conservatively assume that the target function makes use of it.
1178 The prologue of functions with nonlocal gotos must therefore
1179 initialize the GP register to the appropriate value, and we
1180 must then make sure that this value is live at the point
1181 of the jump. (Note that this doesn't necessarily apply
1182 to targets with a nonlocal_goto pattern; they are free
1183 to implement it in their own way. Note also that this is
1184 a no-op if the GP register is a global invariant.) */
1185 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1186 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1187 emit_use (pic_offset_table_rtx
);
1189 emit_indirect_jump (r_label
);
1192 /* Search backwards to the jump insn and mark it as a
1194 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1198 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1201 else if (CALL_P (insn
))
1208 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1209 (not all will be used on all machines) that was passed to __builtin_setjmp.
1210 It updates the stack pointer in that block to correspond to the current
1214 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1216 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1218 = gen_rtx_MEM (sa_mode
,
1221 plus_constant (Pmode
, buf_addr
,
1222 2 * GET_MODE_SIZE (Pmode
))));
1224 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1227 /* Expand a call to __builtin_prefetch. For a target that does not support
1228 data prefetch, evaluate the memory address argument in case it has side
1232 expand_builtin_prefetch (tree exp
)
1234 tree arg0
, arg1
, arg2
;
1238 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1241 arg0
= CALL_EXPR_ARG (exp
, 0);
1243 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1244 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1246 nargs
= call_expr_nargs (exp
);
1248 arg1
= CALL_EXPR_ARG (exp
, 1);
1250 arg1
= integer_zero_node
;
1252 arg2
= CALL_EXPR_ARG (exp
, 2);
1254 arg2
= integer_three_node
;
1256 /* Argument 0 is an address. */
1257 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1259 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1260 if (TREE_CODE (arg1
) != INTEGER_CST
)
1262 error ("second argument to %<__builtin_prefetch%> must be a constant");
1263 arg1
= integer_zero_node
;
1265 op1
= expand_normal (arg1
);
1266 /* Argument 1 must be either zero or one. */
1267 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1269 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1274 /* Argument 2 (locality) must be a compile-time constant int. */
1275 if (TREE_CODE (arg2
) != INTEGER_CST
)
1277 error ("third argument to %<__builtin_prefetch%> must be a constant");
1278 arg2
= integer_zero_node
;
1280 op2
= expand_normal (arg2
);
1281 /* Argument 2 must be 0, 1, 2, or 3. */
1282 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1284 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1288 #ifdef HAVE_prefetch
1291 struct expand_operand ops
[3];
1293 create_address_operand (&ops
[0], op0
);
1294 create_integer_operand (&ops
[1], INTVAL (op1
));
1295 create_integer_operand (&ops
[2], INTVAL (op2
));
1296 if (maybe_expand_insn (CODE_FOR_prefetch
, 3, ops
))
1301 /* Don't do anything with direct references to volatile memory, but
1302 generate code to handle other side effects. */
1303 if (!MEM_P (op0
) && side_effects_p (op0
))
1307 /* Get a MEM rtx for expression EXP which is the address of an operand
1308 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1309 the maximum length of the block of memory that might be accessed or
1313 get_memory_rtx (tree exp
, tree len
)
1315 tree orig_exp
= exp
;
1318 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1319 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1320 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1321 exp
= TREE_OPERAND (exp
, 0);
1323 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1324 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1326 /* Get an expression we can use to find the attributes to assign to MEM.
1327 First remove any nops. */
1328 while (CONVERT_EXPR_P (exp
)
1329 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1330 exp
= TREE_OPERAND (exp
, 0);
1332 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1333 (as builtin stringops may alias with anything). */
1334 exp
= fold_build2 (MEM_REF
,
1335 build_array_type (char_type_node
,
1336 build_range_type (sizetype
,
1337 size_one_node
, len
)),
1338 exp
, build_int_cst (ptr_type_node
, 0));
1340 /* If the MEM_REF has no acceptable address, try to get the base object
1341 from the original address we got, and build an all-aliasing
1342 unknown-sized access to that one. */
1343 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1344 set_mem_attributes (mem
, exp
, 0);
1345 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1346 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1349 exp
= build_fold_addr_expr (exp
);
1350 exp
= fold_build2 (MEM_REF
,
1351 build_array_type (char_type_node
,
1352 build_range_type (sizetype
,
1355 exp
, build_int_cst (ptr_type_node
, 0));
1356 set_mem_attributes (mem
, exp
, 0);
1358 set_mem_alias_set (mem
, 0);
1362 /* Built-in functions to perform an untyped call and return. */
1364 #define apply_args_mode \
1365 (this_target_builtins->x_apply_args_mode)
1366 #define apply_result_mode \
1367 (this_target_builtins->x_apply_result_mode)
1369 /* Return the size required for the block returned by __builtin_apply_args,
1370 and initialize apply_args_mode. */
1373 apply_args_size (void)
1375 static int size
= -1;
1378 enum machine_mode mode
;
1380 /* The values computed by this function never change. */
1383 /* The first value is the incoming arg-pointer. */
1384 size
= GET_MODE_SIZE (Pmode
);
1386 /* The second value is the structure value address unless this is
1387 passed as an "invisible" first argument. */
1388 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1389 size
+= GET_MODE_SIZE (Pmode
);
1391 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1392 if (FUNCTION_ARG_REGNO_P (regno
))
1394 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1396 gcc_assert (mode
!= VOIDmode
);
1398 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1399 if (size
% align
!= 0)
1400 size
= CEIL (size
, align
) * align
;
1401 size
+= GET_MODE_SIZE (mode
);
1402 apply_args_mode
[regno
] = mode
;
1406 apply_args_mode
[regno
] = VOIDmode
;
1412 /* Return the size required for the block returned by __builtin_apply,
1413 and initialize apply_result_mode. */
1416 apply_result_size (void)
1418 static int size
= -1;
1420 enum machine_mode mode
;
1422 /* The values computed by this function never change. */
1427 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1428 if (targetm
.calls
.function_value_regno_p (regno
))
1430 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1432 gcc_assert (mode
!= VOIDmode
);
1434 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1435 if (size
% align
!= 0)
1436 size
= CEIL (size
, align
) * align
;
1437 size
+= GET_MODE_SIZE (mode
);
1438 apply_result_mode
[regno
] = mode
;
1441 apply_result_mode
[regno
] = VOIDmode
;
1443 /* Allow targets that use untyped_call and untyped_return to override
1444 the size so that machine-specific information can be stored here. */
1445 #ifdef APPLY_RESULT_SIZE
1446 size
= APPLY_RESULT_SIZE
;
1452 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1453 /* Create a vector describing the result block RESULT. If SAVEP is true,
1454 the result block is used to save the values; otherwise it is used to
1455 restore the values. */
1458 result_vector (int savep
, rtx result
)
1460 int regno
, size
, align
, nelts
;
1461 enum machine_mode mode
;
1463 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1466 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1467 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1469 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1470 if (size
% align
!= 0)
1471 size
= CEIL (size
, align
) * align
;
1472 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1473 mem
= adjust_address (result
, mode
, size
);
1474 savevec
[nelts
++] = (savep
1475 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
1476 : gen_rtx_SET (VOIDmode
, reg
, mem
));
1477 size
+= GET_MODE_SIZE (mode
);
1479 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1481 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1483 /* Save the state required to perform an untyped call with the same
1484 arguments as were passed to the current function. */
1487 expand_builtin_apply_args_1 (void)
1490 int size
, align
, regno
;
1491 enum machine_mode mode
;
1492 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1494 /* Create a block where the arg-pointer, structure value address,
1495 and argument registers can be saved. */
1496 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1498 /* Walk past the arg-pointer and structure value address. */
1499 size
= GET_MODE_SIZE (Pmode
);
1500 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1501 size
+= GET_MODE_SIZE (Pmode
);
1503 /* Save each register used in calling a function to the block. */
1504 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1505 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1507 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1508 if (size
% align
!= 0)
1509 size
= CEIL (size
, align
) * align
;
1511 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1513 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1514 size
+= GET_MODE_SIZE (mode
);
1517 /* Save the arg pointer to the block. */
1518 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1519 #ifdef STACK_GROWS_DOWNWARD
1520 /* We need the pointer as the caller actually passed them to us, not
1521 as we might have pretended they were passed. Make sure it's a valid
1522 operand, as emit_move_insn isn't expected to handle a PLUS. */
1524 = force_operand (plus_constant (Pmode
, tem
, crtl
->args
.pretend_args_size
),
1527 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1529 size
= GET_MODE_SIZE (Pmode
);
1531 /* Save the structure value address unless this is passed as an
1532 "invisible" first argument. */
1533 if (struct_incoming_value
)
1535 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1536 copy_to_reg (struct_incoming_value
));
1537 size
+= GET_MODE_SIZE (Pmode
);
1540 /* Return the address of the block. */
1541 return copy_addr_to_reg (XEXP (registers
, 0));
1544 /* __builtin_apply_args returns block of memory allocated on
1545 the stack into which is stored the arg pointer, structure
1546 value address, static chain, and all the registers that might
1547 possibly be used in performing a function call. The code is
1548 moved to the start of the function so the incoming values are
1552 expand_builtin_apply_args (void)
1554 /* Don't do __builtin_apply_args more than once in a function.
1555 Save the result of the first call and reuse it. */
1556 if (apply_args_value
!= 0)
1557 return apply_args_value
;
1559 /* When this function is called, it means that registers must be
1560 saved on entry to this function. So we migrate the
1561 call to the first insn of this function. */
1566 temp
= expand_builtin_apply_args_1 ();
1570 apply_args_value
= temp
;
1572 /* Put the insns after the NOTE that starts the function.
1573 If this is inside a start_sequence, make the outer-level insn
1574 chain current, so the code is placed at the start of the
1575 function. If internal_arg_pointer is a non-virtual pseudo,
1576 it needs to be placed after the function that initializes
1578 push_topmost_sequence ();
1579 if (REG_P (crtl
->args
.internal_arg_pointer
)
1580 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1581 emit_insn_before (seq
, parm_birth_insn
);
1583 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1584 pop_topmost_sequence ();
1589 /* Perform an untyped call and save the state required to perform an
1590 untyped return of whatever value was returned by the given function. */
1593 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1595 int size
, align
, regno
;
1596 enum machine_mode mode
;
1597 rtx incoming_args
, result
, reg
, dest
, src
, call_insn
;
1598 rtx old_stack_level
= 0;
1599 rtx call_fusage
= 0;
1600 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1602 arguments
= convert_memory_address (Pmode
, arguments
);
1604 /* Create a block where the return registers can be saved. */
1605 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1607 /* Fetch the arg pointer from the ARGUMENTS block. */
1608 incoming_args
= gen_reg_rtx (Pmode
);
1609 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1610 #ifndef STACK_GROWS_DOWNWARD
1611 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1612 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1615 /* Push a new argument block and copy the arguments. Do not allow
1616 the (potential) memcpy call below to interfere with our stack
1618 do_pending_stack_adjust ();
1621 /* Save the stack with nonlocal if available. */
1622 #ifdef HAVE_save_stack_nonlocal
1623 if (HAVE_save_stack_nonlocal
)
1624 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1627 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1629 /* Allocate a block of memory onto the stack and copy the memory
1630 arguments to the outgoing arguments address. We can pass TRUE
1631 as the 4th argument because we just saved the stack pointer
1632 and will restore it right after the call. */
1633 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1635 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1636 may have already set current_function_calls_alloca to true.
1637 current_function_calls_alloca won't be set if argsize is zero,
1638 so we have to guarantee need_drap is true here. */
1639 if (SUPPORTS_STACK_ALIGNMENT
)
1640 crtl
->need_drap
= true;
1642 dest
= virtual_outgoing_args_rtx
;
1643 #ifndef STACK_GROWS_DOWNWARD
1644 if (CONST_INT_P (argsize
))
1645 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1647 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1649 dest
= gen_rtx_MEM (BLKmode
, dest
);
1650 set_mem_align (dest
, PARM_BOUNDARY
);
1651 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1652 set_mem_align (src
, PARM_BOUNDARY
);
1653 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1655 /* Refer to the argument block. */
1657 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1658 set_mem_align (arguments
, PARM_BOUNDARY
);
1660 /* Walk past the arg-pointer and structure value address. */
1661 size
= GET_MODE_SIZE (Pmode
);
1663 size
+= GET_MODE_SIZE (Pmode
);
1665 /* Restore each of the registers previously saved. Make USE insns
1666 for each of these registers for use in making the call. */
1667 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1668 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1670 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1671 if (size
% align
!= 0)
1672 size
= CEIL (size
, align
) * align
;
1673 reg
= gen_rtx_REG (mode
, regno
);
1674 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1675 use_reg (&call_fusage
, reg
);
1676 size
+= GET_MODE_SIZE (mode
);
1679 /* Restore the structure value address unless this is passed as an
1680 "invisible" first argument. */
1681 size
= GET_MODE_SIZE (Pmode
);
1684 rtx value
= gen_reg_rtx (Pmode
);
1685 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1686 emit_move_insn (struct_value
, value
);
1687 if (REG_P (struct_value
))
1688 use_reg (&call_fusage
, struct_value
);
1689 size
+= GET_MODE_SIZE (Pmode
);
1692 /* All arguments and registers used for the call are set up by now! */
1693 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1695 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1696 and we don't want to load it into a register as an optimization,
1697 because prepare_call_address already did it if it should be done. */
1698 if (GET_CODE (function
) != SYMBOL_REF
)
1699 function
= memory_address (FUNCTION_MODE
, function
);
1701 /* Generate the actual call instruction and save the return value. */
1702 #ifdef HAVE_untyped_call
1703 if (HAVE_untyped_call
)
1704 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1705 result
, result_vector (1, result
)));
1708 #ifdef HAVE_call_value
1709 if (HAVE_call_value
)
1713 /* Locate the unique return register. It is not possible to
1714 express a call that sets more than one return register using
1715 call_value; use untyped_call for that. In fact, untyped_call
1716 only needs to save the return registers in the given block. */
1717 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1718 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1720 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1722 valreg
= gen_rtx_REG (mode
, regno
);
1725 emit_call_insn (GEN_CALL_VALUE (valreg
,
1726 gen_rtx_MEM (FUNCTION_MODE
, function
),
1727 const0_rtx
, NULL_RTX
, const0_rtx
));
1729 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1735 /* Find the CALL insn we just emitted, and attach the register usage
1737 call_insn
= last_call_insn ();
1738 add_function_usage_to (call_insn
, call_fusage
);
1740 /* Restore the stack. */
1741 #ifdef HAVE_save_stack_nonlocal
1742 if (HAVE_save_stack_nonlocal
)
1743 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1746 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1747 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1751 /* Return the address of the result block. */
1752 result
= copy_addr_to_reg (XEXP (result
, 0));
1753 return convert_memory_address (ptr_mode
, result
);
1756 /* Perform an untyped return. */
1759 expand_builtin_return (rtx result
)
1761 int size
, align
, regno
;
1762 enum machine_mode mode
;
1764 rtx call_fusage
= 0;
1766 result
= convert_memory_address (Pmode
, result
);
1768 apply_result_size ();
1769 result
= gen_rtx_MEM (BLKmode
, result
);
1771 #ifdef HAVE_untyped_return
1772 if (HAVE_untyped_return
)
1774 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1780 /* Restore the return value and note that each value is used. */
1782 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1783 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1785 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1786 if (size
% align
!= 0)
1787 size
= CEIL (size
, align
) * align
;
1788 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1789 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1791 push_to_sequence (call_fusage
);
1793 call_fusage
= get_insns ();
1795 size
+= GET_MODE_SIZE (mode
);
1798 /* Put the USE insns before the return. */
1799 emit_insn (call_fusage
);
1801 /* Return whatever values was restored by jumping directly to the end
1803 expand_naked_return ();
1806 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1808 static enum type_class
1809 type_to_class (tree type
)
1811 switch (TREE_CODE (type
))
1813 case VOID_TYPE
: return void_type_class
;
1814 case INTEGER_TYPE
: return integer_type_class
;
1815 case ENUMERAL_TYPE
: return enumeral_type_class
;
1816 case BOOLEAN_TYPE
: return boolean_type_class
;
1817 case POINTER_TYPE
: return pointer_type_class
;
1818 case REFERENCE_TYPE
: return reference_type_class
;
1819 case OFFSET_TYPE
: return offset_type_class
;
1820 case REAL_TYPE
: return real_type_class
;
1821 case COMPLEX_TYPE
: return complex_type_class
;
1822 case FUNCTION_TYPE
: return function_type_class
;
1823 case METHOD_TYPE
: return method_type_class
;
1824 case RECORD_TYPE
: return record_type_class
;
1826 case QUAL_UNION_TYPE
: return union_type_class
;
1827 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1828 ? string_type_class
: array_type_class
);
1829 case LANG_TYPE
: return lang_type_class
;
1830 default: return no_type_class
;
1834 /* Expand a call EXP to __builtin_classify_type. */
1837 expand_builtin_classify_type (tree exp
)
1839 if (call_expr_nargs (exp
))
1840 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1841 return GEN_INT (no_type_class
);
1844 /* This helper macro, meant to be used in mathfn_built_in below,
1845 determines which among a set of three builtin math functions is
1846 appropriate for a given type mode. The `F' and `L' cases are
1847 automatically generated from the `double' case. */
1848 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1849 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1850 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1851 fcodel = BUILT_IN_MATHFN##L ; break;
1852 /* Similar to above, but appends _R after any F/L suffix. */
1853 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1854 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1855 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1856 fcodel = BUILT_IN_MATHFN##L_R ; break;
1858 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1859 if available. If IMPLICIT is true use the implicit builtin declaration,
1860 otherwise use the explicit declaration. If we can't do the conversion,
1864 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit_p
)
1866 enum built_in_function fcode
, fcodef
, fcodel
, fcode2
;
1870 CASE_MATHFN (BUILT_IN_ACOS
)
1871 CASE_MATHFN (BUILT_IN_ACOSH
)
1872 CASE_MATHFN (BUILT_IN_ASIN
)
1873 CASE_MATHFN (BUILT_IN_ASINH
)
1874 CASE_MATHFN (BUILT_IN_ATAN
)
1875 CASE_MATHFN (BUILT_IN_ATAN2
)
1876 CASE_MATHFN (BUILT_IN_ATANH
)
1877 CASE_MATHFN (BUILT_IN_CBRT
)
1878 CASE_MATHFN (BUILT_IN_CEIL
)
1879 CASE_MATHFN (BUILT_IN_CEXPI
)
1880 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1881 CASE_MATHFN (BUILT_IN_COS
)
1882 CASE_MATHFN (BUILT_IN_COSH
)
1883 CASE_MATHFN (BUILT_IN_DREM
)
1884 CASE_MATHFN (BUILT_IN_ERF
)
1885 CASE_MATHFN (BUILT_IN_ERFC
)
1886 CASE_MATHFN (BUILT_IN_EXP
)
1887 CASE_MATHFN (BUILT_IN_EXP10
)
1888 CASE_MATHFN (BUILT_IN_EXP2
)
1889 CASE_MATHFN (BUILT_IN_EXPM1
)
1890 CASE_MATHFN (BUILT_IN_FABS
)
1891 CASE_MATHFN (BUILT_IN_FDIM
)
1892 CASE_MATHFN (BUILT_IN_FLOOR
)
1893 CASE_MATHFN (BUILT_IN_FMA
)
1894 CASE_MATHFN (BUILT_IN_FMAX
)
1895 CASE_MATHFN (BUILT_IN_FMIN
)
1896 CASE_MATHFN (BUILT_IN_FMOD
)
1897 CASE_MATHFN (BUILT_IN_FREXP
)
1898 CASE_MATHFN (BUILT_IN_GAMMA
)
1899 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1900 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1901 CASE_MATHFN (BUILT_IN_HYPOT
)
1902 CASE_MATHFN (BUILT_IN_ILOGB
)
1903 CASE_MATHFN (BUILT_IN_ICEIL
)
1904 CASE_MATHFN (BUILT_IN_IFLOOR
)
1905 CASE_MATHFN (BUILT_IN_INF
)
1906 CASE_MATHFN (BUILT_IN_IRINT
)
1907 CASE_MATHFN (BUILT_IN_IROUND
)
1908 CASE_MATHFN (BUILT_IN_ISINF
)
1909 CASE_MATHFN (BUILT_IN_J0
)
1910 CASE_MATHFN (BUILT_IN_J1
)
1911 CASE_MATHFN (BUILT_IN_JN
)
1912 CASE_MATHFN (BUILT_IN_LCEIL
)
1913 CASE_MATHFN (BUILT_IN_LDEXP
)
1914 CASE_MATHFN (BUILT_IN_LFLOOR
)
1915 CASE_MATHFN (BUILT_IN_LGAMMA
)
1916 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1917 CASE_MATHFN (BUILT_IN_LLCEIL
)
1918 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1919 CASE_MATHFN (BUILT_IN_LLRINT
)
1920 CASE_MATHFN (BUILT_IN_LLROUND
)
1921 CASE_MATHFN (BUILT_IN_LOG
)
1922 CASE_MATHFN (BUILT_IN_LOG10
)
1923 CASE_MATHFN (BUILT_IN_LOG1P
)
1924 CASE_MATHFN (BUILT_IN_LOG2
)
1925 CASE_MATHFN (BUILT_IN_LOGB
)
1926 CASE_MATHFN (BUILT_IN_LRINT
)
1927 CASE_MATHFN (BUILT_IN_LROUND
)
1928 CASE_MATHFN (BUILT_IN_MODF
)
1929 CASE_MATHFN (BUILT_IN_NAN
)
1930 CASE_MATHFN (BUILT_IN_NANS
)
1931 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1932 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1933 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1934 CASE_MATHFN (BUILT_IN_POW
)
1935 CASE_MATHFN (BUILT_IN_POWI
)
1936 CASE_MATHFN (BUILT_IN_POW10
)
1937 CASE_MATHFN (BUILT_IN_REMAINDER
)
1938 CASE_MATHFN (BUILT_IN_REMQUO
)
1939 CASE_MATHFN (BUILT_IN_RINT
)
1940 CASE_MATHFN (BUILT_IN_ROUND
)
1941 CASE_MATHFN (BUILT_IN_SCALB
)
1942 CASE_MATHFN (BUILT_IN_SCALBLN
)
1943 CASE_MATHFN (BUILT_IN_SCALBN
)
1944 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1945 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1946 CASE_MATHFN (BUILT_IN_SIN
)
1947 CASE_MATHFN (BUILT_IN_SINCOS
)
1948 CASE_MATHFN (BUILT_IN_SINH
)
1949 CASE_MATHFN (BUILT_IN_SQRT
)
1950 CASE_MATHFN (BUILT_IN_TAN
)
1951 CASE_MATHFN (BUILT_IN_TANH
)
1952 CASE_MATHFN (BUILT_IN_TGAMMA
)
1953 CASE_MATHFN (BUILT_IN_TRUNC
)
1954 CASE_MATHFN (BUILT_IN_Y0
)
1955 CASE_MATHFN (BUILT_IN_Y1
)
1956 CASE_MATHFN (BUILT_IN_YN
)
1962 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1964 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1966 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1971 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1974 return builtin_decl_explicit (fcode2
);
1977 /* Like mathfn_built_in_1(), but always use the implicit array. */
1980 mathfn_built_in (tree type
, enum built_in_function fn
)
1982 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1985 /* If errno must be maintained, expand the RTL to check if the result,
1986 TARGET, of a built-in function call, EXP, is NaN, and if so set
1990 expand_errno_check (tree exp
, rtx target
)
1992 rtx lab
= gen_label_rtx ();
1994 /* Test the result; if it is NaN, set errno=EDOM because
1995 the argument was not in the domain. */
1996 do_compare_rtx_and_jump (target
, target
, EQ
, 0, GET_MODE (target
),
1997 NULL_RTX
, NULL_RTX
, lab
,
1998 /* The jump is very likely. */
1999 REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1));
2002 /* If this built-in doesn't throw an exception, set errno directly. */
2003 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
2005 #ifdef GEN_ERRNO_RTX
2006 rtx errno_rtx
= GEN_ERRNO_RTX
;
2009 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
2011 emit_move_insn (errno_rtx
,
2012 gen_int_mode (TARGET_EDOM
, GET_MODE (errno_rtx
)));
2018 /* Make sure the library call isn't expanded as a tail call. */
2019 CALL_EXPR_TAILCALL (exp
) = 0;
2021 /* We can't set errno=EDOM directly; let the library call do it.
2022 Pop the arguments right away in case the call gets deleted. */
2024 expand_call (exp
, target
, 0);
2029 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2030 Return NULL_RTX if a normal call should be emitted rather than expanding
2031 the function in-line. EXP is the expression that is a call to the builtin
2032 function; if convenient, the result should be placed in TARGET.
2033 SUBTARGET may be used as the target for computing one of EXP's operands. */
2036 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
2038 optab builtin_optab
;
2040 tree fndecl
= get_callee_fndecl (exp
);
2041 enum machine_mode mode
;
2042 bool errno_set
= false;
2043 bool try_widening
= false;
2046 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2049 arg
= CALL_EXPR_ARG (exp
, 0);
2051 switch (DECL_FUNCTION_CODE (fndecl
))
2053 CASE_FLT_FN (BUILT_IN_SQRT
):
2054 errno_set
= ! tree_expr_nonnegative_p (arg
);
2055 try_widening
= true;
2056 builtin_optab
= sqrt_optab
;
2058 CASE_FLT_FN (BUILT_IN_EXP
):
2059 errno_set
= true; builtin_optab
= exp_optab
; break;
2060 CASE_FLT_FN (BUILT_IN_EXP10
):
2061 CASE_FLT_FN (BUILT_IN_POW10
):
2062 errno_set
= true; builtin_optab
= exp10_optab
; break;
2063 CASE_FLT_FN (BUILT_IN_EXP2
):
2064 errno_set
= true; builtin_optab
= exp2_optab
; break;
2065 CASE_FLT_FN (BUILT_IN_EXPM1
):
2066 errno_set
= true; builtin_optab
= expm1_optab
; break;
2067 CASE_FLT_FN (BUILT_IN_LOGB
):
2068 errno_set
= true; builtin_optab
= logb_optab
; break;
2069 CASE_FLT_FN (BUILT_IN_LOG
):
2070 errno_set
= true; builtin_optab
= log_optab
; break;
2071 CASE_FLT_FN (BUILT_IN_LOG10
):
2072 errno_set
= true; builtin_optab
= log10_optab
; break;
2073 CASE_FLT_FN (BUILT_IN_LOG2
):
2074 errno_set
= true; builtin_optab
= log2_optab
; break;
2075 CASE_FLT_FN (BUILT_IN_LOG1P
):
2076 errno_set
= true; builtin_optab
= log1p_optab
; break;
2077 CASE_FLT_FN (BUILT_IN_ASIN
):
2078 builtin_optab
= asin_optab
; break;
2079 CASE_FLT_FN (BUILT_IN_ACOS
):
2080 builtin_optab
= acos_optab
; break;
2081 CASE_FLT_FN (BUILT_IN_TAN
):
2082 builtin_optab
= tan_optab
; break;
2083 CASE_FLT_FN (BUILT_IN_ATAN
):
2084 builtin_optab
= atan_optab
; break;
2085 CASE_FLT_FN (BUILT_IN_FLOOR
):
2086 builtin_optab
= floor_optab
; break;
2087 CASE_FLT_FN (BUILT_IN_CEIL
):
2088 builtin_optab
= ceil_optab
; break;
2089 CASE_FLT_FN (BUILT_IN_TRUNC
):
2090 builtin_optab
= btrunc_optab
; break;
2091 CASE_FLT_FN (BUILT_IN_ROUND
):
2092 builtin_optab
= round_optab
; break;
2093 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
2094 builtin_optab
= nearbyint_optab
;
2095 if (flag_trapping_math
)
2097 /* Else fallthrough and expand as rint. */
2098 CASE_FLT_FN (BUILT_IN_RINT
):
2099 builtin_optab
= rint_optab
; break;
2100 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
2101 builtin_optab
= significand_optab
; break;
2106 /* Make a suitable register to place result in. */
2107 mode
= TYPE_MODE (TREE_TYPE (exp
));
2109 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2112 /* Before working hard, check whether the instruction is available, but try
2113 to widen the mode for specific operations. */
2114 if ((optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
2115 || (try_widening
&& !excess_precision_type (TREE_TYPE (exp
))))
2116 && (!errno_set
|| !optimize_insn_for_size_p ()))
2118 rtx result
= gen_reg_rtx (mode
);
2120 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2121 need to expand the argument again. This way, we will not perform
2122 side-effects more the once. */
2123 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2125 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2129 /* Compute into RESULT.
2130 Set RESULT to wherever the result comes back. */
2131 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2136 expand_errno_check (exp
, result
);
2138 /* Output the entire sequence. */
2139 insns
= get_insns ();
2145 /* If we were unable to expand via the builtin, stop the sequence
2146 (without outputting the insns) and call to the library function
2147 with the stabilized argument list. */
2151 return expand_call (exp
, target
, target
== const0_rtx
);
2154 /* Expand a call to the builtin binary math functions (pow and atan2).
2155 Return NULL_RTX if a normal call should be emitted rather than expanding the
2156 function in-line. EXP is the expression that is a call to the builtin
2157 function; if convenient, the result should be placed in TARGET.
2158 SUBTARGET may be used as the target for computing one of EXP's
2162 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2164 optab builtin_optab
;
2165 rtx op0
, op1
, insns
, result
;
2166 int op1_type
= REAL_TYPE
;
2167 tree fndecl
= get_callee_fndecl (exp
);
2169 enum machine_mode mode
;
2170 bool errno_set
= true;
2172 switch (DECL_FUNCTION_CODE (fndecl
))
2174 CASE_FLT_FN (BUILT_IN_SCALBN
):
2175 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2176 CASE_FLT_FN (BUILT_IN_LDEXP
):
2177 op1_type
= INTEGER_TYPE
;
2182 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2185 arg0
= CALL_EXPR_ARG (exp
, 0);
2186 arg1
= CALL_EXPR_ARG (exp
, 1);
2188 switch (DECL_FUNCTION_CODE (fndecl
))
2190 CASE_FLT_FN (BUILT_IN_POW
):
2191 builtin_optab
= pow_optab
; break;
2192 CASE_FLT_FN (BUILT_IN_ATAN2
):
2193 builtin_optab
= atan2_optab
; break;
2194 CASE_FLT_FN (BUILT_IN_SCALB
):
2195 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2197 builtin_optab
= scalb_optab
; break;
2198 CASE_FLT_FN (BUILT_IN_SCALBN
):
2199 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2200 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2202 /* Fall through... */
2203 CASE_FLT_FN (BUILT_IN_LDEXP
):
2204 builtin_optab
= ldexp_optab
; break;
2205 CASE_FLT_FN (BUILT_IN_FMOD
):
2206 builtin_optab
= fmod_optab
; break;
2207 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2208 CASE_FLT_FN (BUILT_IN_DREM
):
2209 builtin_optab
= remainder_optab
; break;
2214 /* Make a suitable register to place result in. */
2215 mode
= TYPE_MODE (TREE_TYPE (exp
));
2217 /* Before working hard, check whether the instruction is available. */
2218 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2221 result
= gen_reg_rtx (mode
);
2223 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2226 if (errno_set
&& optimize_insn_for_size_p ())
2229 /* Always stabilize the argument list. */
2230 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2231 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2233 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2234 op1
= expand_normal (arg1
);
2238 /* Compute into RESULT.
2239 Set RESULT to wherever the result comes back. */
2240 result
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2241 result
, 0, OPTAB_DIRECT
);
2243 /* If we were unable to expand via the builtin, stop the sequence
2244 (without outputting the insns) and call to the library function
2245 with the stabilized argument list. */
2249 return expand_call (exp
, target
, target
== const0_rtx
);
2253 expand_errno_check (exp
, result
);
2255 /* Output the entire sequence. */
2256 insns
= get_insns ();
2263 /* Expand a call to the builtin trinary math functions (fma).
2264 Return NULL_RTX if a normal call should be emitted rather than expanding the
2265 function in-line. EXP is the expression that is a call to the builtin
2266 function; if convenient, the result should be placed in TARGET.
2267 SUBTARGET may be used as the target for computing one of EXP's
2271 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2273 optab builtin_optab
;
2274 rtx op0
, op1
, op2
, insns
, result
;
2275 tree fndecl
= get_callee_fndecl (exp
);
2276 tree arg0
, arg1
, arg2
;
2277 enum machine_mode mode
;
2279 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2282 arg0
= CALL_EXPR_ARG (exp
, 0);
2283 arg1
= CALL_EXPR_ARG (exp
, 1);
2284 arg2
= CALL_EXPR_ARG (exp
, 2);
2286 switch (DECL_FUNCTION_CODE (fndecl
))
2288 CASE_FLT_FN (BUILT_IN_FMA
):
2289 builtin_optab
= fma_optab
; break;
2294 /* Make a suitable register to place result in. */
2295 mode
= TYPE_MODE (TREE_TYPE (exp
));
2297 /* Before working hard, check whether the instruction is available. */
2298 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2301 result
= gen_reg_rtx (mode
);
2303 /* Always stabilize the argument list. */
2304 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2305 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2306 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2308 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2309 op1
= expand_normal (arg1
);
2310 op2
= expand_normal (arg2
);
2314 /* Compute into RESULT.
2315 Set RESULT to wherever the result comes back. */
2316 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2319 /* If we were unable to expand via the builtin, stop the sequence
2320 (without outputting the insns) and call to the library function
2321 with the stabilized argument list. */
2325 return expand_call (exp
, target
, target
== const0_rtx
);
2328 /* Output the entire sequence. */
2329 insns
= get_insns ();
2336 /* Expand a call to the builtin sin and cos math functions.
2337 Return NULL_RTX if a normal call should be emitted rather than expanding the
2338 function in-line. EXP is the expression that is a call to the builtin
2339 function; if convenient, the result should be placed in TARGET.
2340 SUBTARGET may be used as the target for computing one of EXP's
2344 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2346 optab builtin_optab
;
2348 tree fndecl
= get_callee_fndecl (exp
);
2349 enum machine_mode mode
;
2352 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2355 arg
= CALL_EXPR_ARG (exp
, 0);
2357 switch (DECL_FUNCTION_CODE (fndecl
))
2359 CASE_FLT_FN (BUILT_IN_SIN
):
2360 CASE_FLT_FN (BUILT_IN_COS
):
2361 builtin_optab
= sincos_optab
; break;
2366 /* Make a suitable register to place result in. */
2367 mode
= TYPE_MODE (TREE_TYPE (exp
));
2369 /* Check if sincos insn is available, otherwise fallback
2370 to sin or cos insn. */
2371 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2372 switch (DECL_FUNCTION_CODE (fndecl
))
2374 CASE_FLT_FN (BUILT_IN_SIN
):
2375 builtin_optab
= sin_optab
; break;
2376 CASE_FLT_FN (BUILT_IN_COS
):
2377 builtin_optab
= cos_optab
; break;
2382 /* Before working hard, check whether the instruction is available. */
2383 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2385 rtx result
= gen_reg_rtx (mode
);
2387 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2388 need to expand the argument again. This way, we will not perform
2389 side-effects more the once. */
2390 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2392 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2396 /* Compute into RESULT.
2397 Set RESULT to wherever the result comes back. */
2398 if (builtin_optab
== sincos_optab
)
2402 switch (DECL_FUNCTION_CODE (fndecl
))
2404 CASE_FLT_FN (BUILT_IN_SIN
):
2405 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2407 CASE_FLT_FN (BUILT_IN_COS
):
2408 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2416 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2420 /* Output the entire sequence. */
2421 insns
= get_insns ();
2427 /* If we were unable to expand via the builtin, stop the sequence
2428 (without outputting the insns) and call to the library function
2429 with the stabilized argument list. */
2433 return expand_call (exp
, target
, target
== const0_rtx
);
2436 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2437 return an RTL instruction code that implements the functionality.
2438 If that isn't possible or available return CODE_FOR_nothing. */
2440 static enum insn_code
2441 interclass_mathfn_icode (tree arg
, tree fndecl
)
2443 bool errno_set
= false;
2444 optab builtin_optab
= unknown_optab
;
2445 enum machine_mode mode
;
2447 switch (DECL_FUNCTION_CODE (fndecl
))
2449 CASE_FLT_FN (BUILT_IN_ILOGB
):
2450 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2451 CASE_FLT_FN (BUILT_IN_ISINF
):
2452 builtin_optab
= isinf_optab
; break;
2453 case BUILT_IN_ISNORMAL
:
2454 case BUILT_IN_ISFINITE
:
2455 CASE_FLT_FN (BUILT_IN_FINITE
):
2456 case BUILT_IN_FINITED32
:
2457 case BUILT_IN_FINITED64
:
2458 case BUILT_IN_FINITED128
:
2459 case BUILT_IN_ISINFD32
:
2460 case BUILT_IN_ISINFD64
:
2461 case BUILT_IN_ISINFD128
:
2462 /* These builtins have no optabs (yet). */
2468 /* There's no easy way to detect the case we need to set EDOM. */
2469 if (flag_errno_math
&& errno_set
)
2470 return CODE_FOR_nothing
;
2472 /* Optab mode depends on the mode of the input argument. */
2473 mode
= TYPE_MODE (TREE_TYPE (arg
));
2476 return optab_handler (builtin_optab
, mode
);
2477 return CODE_FOR_nothing
;
2480 /* Expand a call to one of the builtin math functions that operate on
2481 floating point argument and output an integer result (ilogb, isinf,
2483 Return 0 if a normal call should be emitted rather than expanding the
2484 function in-line. EXP is the expression that is a call to the builtin
2485 function; if convenient, the result should be placed in TARGET. */
2488 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2490 enum insn_code icode
= CODE_FOR_nothing
;
2492 tree fndecl
= get_callee_fndecl (exp
);
2493 enum machine_mode mode
;
2496 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2499 arg
= CALL_EXPR_ARG (exp
, 0);
2500 icode
= interclass_mathfn_icode (arg
, fndecl
);
2501 mode
= TYPE_MODE (TREE_TYPE (arg
));
2503 if (icode
!= CODE_FOR_nothing
)
2505 struct expand_operand ops
[1];
2506 rtx last
= get_last_insn ();
2507 tree orig_arg
= arg
;
2509 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2510 need to expand the argument again. This way, we will not perform
2511 side-effects more the once. */
2512 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2514 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2516 if (mode
!= GET_MODE (op0
))
2517 op0
= convert_to_mode (mode
, op0
, 0);
2519 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2520 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2521 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2522 return ops
[0].value
;
2524 delete_insns_since (last
);
2525 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2531 /* Expand a call to the builtin sincos math function.
2532 Return NULL_RTX if a normal call should be emitted rather than expanding the
2533 function in-line. EXP is the expression that is a call to the builtin
2537 expand_builtin_sincos (tree exp
)
2539 rtx op0
, op1
, op2
, target1
, target2
;
2540 enum machine_mode mode
;
2541 tree arg
, sinp
, cosp
;
2543 location_t loc
= EXPR_LOCATION (exp
);
2544 tree alias_type
, alias_off
;
2546 if (!validate_arglist (exp
, REAL_TYPE
,
2547 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2550 arg
= CALL_EXPR_ARG (exp
, 0);
2551 sinp
= CALL_EXPR_ARG (exp
, 1);
2552 cosp
= CALL_EXPR_ARG (exp
, 2);
2554 /* Make a suitable register to place result in. */
2555 mode
= TYPE_MODE (TREE_TYPE (arg
));
2557 /* Check if sincos insn is available, otherwise emit the call. */
2558 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2561 target1
= gen_reg_rtx (mode
);
2562 target2
= gen_reg_rtx (mode
);
2564 op0
= expand_normal (arg
);
2565 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2566 alias_off
= build_int_cst (alias_type
, 0);
2567 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2569 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2572 /* Compute into target1 and target2.
2573 Set TARGET to wherever the result comes back. */
2574 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2575 gcc_assert (result
);
2577 /* Move target1 and target2 to the memory locations indicated
2579 emit_move_insn (op1
, target1
);
2580 emit_move_insn (op2
, target2
);
2585 /* Expand a call to the internal cexpi builtin to the sincos math function.
2586 EXP is the expression that is a call to the builtin function; if convenient,
2587 the result should be placed in TARGET. */
2590 expand_builtin_cexpi (tree exp
, rtx target
)
2592 tree fndecl
= get_callee_fndecl (exp
);
2594 enum machine_mode mode
;
2596 location_t loc
= EXPR_LOCATION (exp
);
2598 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2601 arg
= CALL_EXPR_ARG (exp
, 0);
2602 type
= TREE_TYPE (arg
);
2603 mode
= TYPE_MODE (TREE_TYPE (arg
));
2605 /* Try expanding via a sincos optab, fall back to emitting a libcall
2606 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2607 is only generated from sincos, cexp or if we have either of them. */
2608 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2610 op1
= gen_reg_rtx (mode
);
2611 op2
= gen_reg_rtx (mode
);
2613 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2615 /* Compute into op1 and op2. */
2616 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2618 else if (targetm
.libc_has_function (function_sincos
))
2620 tree call
, fn
= NULL_TREE
;
2624 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2625 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2626 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2627 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2628 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2629 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2633 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2634 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2635 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2636 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2637 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2638 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2640 /* Make sure not to fold the sincos call again. */
2641 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2642 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2643 call
, 3, arg
, top1
, top2
));
2647 tree call
, fn
= NULL_TREE
, narg
;
2648 tree ctype
= build_complex_type (type
);
2650 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2651 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2652 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2653 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2654 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2655 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2659 /* If we don't have a decl for cexp create one. This is the
2660 friendliest fallback if the user calls __builtin_cexpi
2661 without full target C99 function support. */
2662 if (fn
== NULL_TREE
)
2665 const char *name
= NULL
;
2667 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2669 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2671 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2674 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2675 fn
= build_fn_decl (name
, fntype
);
2678 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2679 build_real (type
, dconst0
), arg
);
2681 /* Make sure not to fold the cexp call again. */
2682 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2683 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2684 target
, VOIDmode
, EXPAND_NORMAL
);
2687 /* Now build the proper return type. */
2688 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2689 make_tree (TREE_TYPE (arg
), op2
),
2690 make_tree (TREE_TYPE (arg
), op1
)),
2691 target
, VOIDmode
, EXPAND_NORMAL
);
2694 /* Conveniently construct a function call expression. FNDECL names the
2695 function to be called, N is the number of arguments, and the "..."
2696 parameters are the argument expressions. Unlike build_call_exr
2697 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2700 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2703 tree fntype
= TREE_TYPE (fndecl
);
2704 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2707 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2709 SET_EXPR_LOCATION (fn
, loc
);
2713 /* Expand a call to one of the builtin rounding functions gcc defines
2714 as an extension (lfloor and lceil). As these are gcc extensions we
2715 do not need to worry about setting errno to EDOM.
2716 If expanding via optab fails, lower expression to (int)(floor(x)).
2717 EXP is the expression that is a call to the builtin function;
2718 if convenient, the result should be placed in TARGET. */
2721 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2723 convert_optab builtin_optab
;
2724 rtx op0
, insns
, tmp
;
2725 tree fndecl
= get_callee_fndecl (exp
);
2726 enum built_in_function fallback_fn
;
2727 tree fallback_fndecl
;
2728 enum machine_mode mode
;
2731 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2734 arg
= CALL_EXPR_ARG (exp
, 0);
2736 switch (DECL_FUNCTION_CODE (fndecl
))
2738 CASE_FLT_FN (BUILT_IN_ICEIL
):
2739 CASE_FLT_FN (BUILT_IN_LCEIL
):
2740 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2741 builtin_optab
= lceil_optab
;
2742 fallback_fn
= BUILT_IN_CEIL
;
2745 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2746 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2747 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2748 builtin_optab
= lfloor_optab
;
2749 fallback_fn
= BUILT_IN_FLOOR
;
2756 /* Make a suitable register to place result in. */
2757 mode
= TYPE_MODE (TREE_TYPE (exp
));
2759 target
= gen_reg_rtx (mode
);
2761 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2762 need to expand the argument again. This way, we will not perform
2763 side-effects more the once. */
2764 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2766 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2770 /* Compute into TARGET. */
2771 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2773 /* Output the entire sequence. */
2774 insns
= get_insns ();
2780 /* If we were unable to expand via the builtin, stop the sequence
2781 (without outputting the insns). */
2784 /* Fall back to floating point rounding optab. */
2785 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2787 /* For non-C99 targets we may end up without a fallback fndecl here
2788 if the user called __builtin_lfloor directly. In this case emit
2789 a call to the floor/ceil variants nevertheless. This should result
2790 in the best user experience for not full C99 targets. */
2791 if (fallback_fndecl
== NULL_TREE
)
2794 const char *name
= NULL
;
2796 switch (DECL_FUNCTION_CODE (fndecl
))
2798 case BUILT_IN_ICEIL
:
2799 case BUILT_IN_LCEIL
:
2800 case BUILT_IN_LLCEIL
:
2803 case BUILT_IN_ICEILF
:
2804 case BUILT_IN_LCEILF
:
2805 case BUILT_IN_LLCEILF
:
2808 case BUILT_IN_ICEILL
:
2809 case BUILT_IN_LCEILL
:
2810 case BUILT_IN_LLCEILL
:
2813 case BUILT_IN_IFLOOR
:
2814 case BUILT_IN_LFLOOR
:
2815 case BUILT_IN_LLFLOOR
:
2818 case BUILT_IN_IFLOORF
:
2819 case BUILT_IN_LFLOORF
:
2820 case BUILT_IN_LLFLOORF
:
2823 case BUILT_IN_IFLOORL
:
2824 case BUILT_IN_LFLOORL
:
2825 case BUILT_IN_LLFLOORL
:
2832 fntype
= build_function_type_list (TREE_TYPE (arg
),
2833 TREE_TYPE (arg
), NULL_TREE
);
2834 fallback_fndecl
= build_fn_decl (name
, fntype
);
2837 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2839 tmp
= expand_normal (exp
);
2840 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2842 /* Truncate the result of floating point optab to integer
2843 via expand_fix (). */
2844 target
= gen_reg_rtx (mode
);
2845 expand_fix (target
, tmp
, 0);
2850 /* Expand a call to one of the builtin math functions doing integer
2852 Return 0 if a normal call should be emitted rather than expanding the
2853 function in-line. EXP is the expression that is a call to the builtin
2854 function; if convenient, the result should be placed in TARGET. */
2857 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2859 convert_optab builtin_optab
;
2861 tree fndecl
= get_callee_fndecl (exp
);
2863 enum machine_mode mode
;
2864 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2866 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2869 arg
= CALL_EXPR_ARG (exp
, 0);
2871 switch (DECL_FUNCTION_CODE (fndecl
))
2873 CASE_FLT_FN (BUILT_IN_IRINT
):
2874 fallback_fn
= BUILT_IN_LRINT
;
2876 CASE_FLT_FN (BUILT_IN_LRINT
):
2877 CASE_FLT_FN (BUILT_IN_LLRINT
):
2878 builtin_optab
= lrint_optab
;
2881 CASE_FLT_FN (BUILT_IN_IROUND
):
2882 fallback_fn
= BUILT_IN_LROUND
;
2884 CASE_FLT_FN (BUILT_IN_LROUND
):
2885 CASE_FLT_FN (BUILT_IN_LLROUND
):
2886 builtin_optab
= lround_optab
;
2893 /* There's no easy way to detect the case we need to set EDOM. */
2894 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2897 /* Make a suitable register to place result in. */
2898 mode
= TYPE_MODE (TREE_TYPE (exp
));
2900 /* There's no easy way to detect the case we need to set EDOM. */
2901 if (!flag_errno_math
)
2903 rtx result
= gen_reg_rtx (mode
);
2905 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2906 need to expand the argument again. This way, we will not perform
2907 side-effects more the once. */
2908 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2910 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2914 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2916 /* Output the entire sequence. */
2917 insns
= get_insns ();
2923 /* If we were unable to expand via the builtin, stop the sequence
2924 (without outputting the insns) and call to the library function
2925 with the stabilized argument list. */
2929 if (fallback_fn
!= BUILT_IN_NONE
)
2931 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2932 targets, (int) round (x) should never be transformed into
2933 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2934 a call to lround in the hope that the target provides at least some
2935 C99 functions. This should result in the best user experience for
2936 not full C99 targets. */
2937 tree fallback_fndecl
= mathfn_built_in_1 (TREE_TYPE (arg
),
2940 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2941 fallback_fndecl
, 1, arg
);
2943 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2944 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2945 return convert_to_mode (mode
, target
, 0);
2948 return expand_call (exp
, target
, target
== const0_rtx
);
2951 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2952 a normal call should be emitted rather than expanding the function
2953 in-line. EXP is the expression that is a call to the builtin
2954 function; if convenient, the result should be placed in TARGET. */
2957 expand_builtin_powi (tree exp
, rtx target
)
2961 enum machine_mode mode
;
2962 enum machine_mode mode2
;
2964 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2967 arg0
= CALL_EXPR_ARG (exp
, 0);
2968 arg1
= CALL_EXPR_ARG (exp
, 1);
2969 mode
= TYPE_MODE (TREE_TYPE (exp
));
2971 /* Emit a libcall to libgcc. */
2973 /* Mode of the 2nd argument must match that of an int. */
2974 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2976 if (target
== NULL_RTX
)
2977 target
= gen_reg_rtx (mode
);
2979 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2980 if (GET_MODE (op0
) != mode
)
2981 op0
= convert_to_mode (mode
, op0
, 0);
2982 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2983 if (GET_MODE (op1
) != mode2
)
2984 op1
= convert_to_mode (mode2
, op1
, 0);
2986 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2987 target
, LCT_CONST
, mode
, 2,
2988 op0
, mode
, op1
, mode2
);
2993 /* Expand expression EXP which is a call to the strlen builtin. Return
2994 NULL_RTX if we failed the caller should emit a normal call, otherwise
2995 try to get the result in TARGET, if convenient. */
2998 expand_builtin_strlen (tree exp
, rtx target
,
2999 enum machine_mode target_mode
)
3001 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
3005 struct expand_operand ops
[4];
3008 tree src
= CALL_EXPR_ARG (exp
, 0);
3009 rtx src_reg
, before_strlen
;
3010 enum machine_mode insn_mode
= target_mode
;
3011 enum insn_code icode
= CODE_FOR_nothing
;
3014 /* If the length can be computed at compile-time, return it. */
3015 len
= c_strlen (src
, 0);
3017 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3019 /* If the length can be computed at compile-time and is constant
3020 integer, but there are side-effects in src, evaluate
3021 src for side-effects, then return len.
3022 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3023 can be optimized into: i++; x = 3; */
3024 len
= c_strlen (src
, 1);
3025 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3027 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3028 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3031 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
3033 /* If SRC is not a pointer type, don't do this operation inline. */
3037 /* Bail out if we can't compute strlen in the right mode. */
3038 while (insn_mode
!= VOIDmode
)
3040 icode
= optab_handler (strlen_optab
, insn_mode
);
3041 if (icode
!= CODE_FOR_nothing
)
3044 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3046 if (insn_mode
== VOIDmode
)
3049 /* Make a place to hold the source address. We will not expand
3050 the actual source until we are sure that the expansion will
3051 not fail -- there are trees that cannot be expanded twice. */
3052 src_reg
= gen_reg_rtx (Pmode
);
3054 /* Mark the beginning of the strlen sequence so we can emit the
3055 source operand later. */
3056 before_strlen
= get_last_insn ();
3058 create_output_operand (&ops
[0], target
, insn_mode
);
3059 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3060 create_integer_operand (&ops
[2], 0);
3061 create_integer_operand (&ops
[3], align
);
3062 if (!maybe_expand_insn (icode
, 4, ops
))
3065 /* Now that we are assured of success, expand the source. */
3067 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3070 #ifdef POINTERS_EXTEND_UNSIGNED
3071 if (GET_MODE (pat
) != Pmode
)
3072 pat
= convert_to_mode (Pmode
, pat
,
3073 POINTERS_EXTEND_UNSIGNED
);
3075 emit_move_insn (src_reg
, pat
);
3081 emit_insn_after (pat
, before_strlen
);
3083 emit_insn_before (pat
, get_insns ());
3085 /* Return the value in the proper mode for this function. */
3086 if (GET_MODE (ops
[0].value
) == target_mode
)
3087 target
= ops
[0].value
;
3088 else if (target
!= 0)
3089 convert_move (target
, ops
[0].value
, 0);
3091 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3097 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3098 bytes from constant string DATA + OFFSET and return it as target
3102 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3103 enum machine_mode mode
)
3105 const char *str
= (const char *) data
;
3107 gcc_assert (offset
>= 0
3108 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3109 <= strlen (str
) + 1));
3111 return c_readstr (str
+ offset
, mode
);
3114 /* LEN specify length of the block of memcpy/memset operation.
3115 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3116 In some cases we can make very likely guess on max size, then we
3117 set it into PROBABLE_MAX_SIZE. */
3120 determine_block_size (tree len
, rtx len_rtx
,
3121 unsigned HOST_WIDE_INT
*min_size
,
3122 unsigned HOST_WIDE_INT
*max_size
,
3123 unsigned HOST_WIDE_INT
*probable_max_size
)
3125 if (CONST_INT_P (len_rtx
))
3127 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
3132 double_int min
, max
;
3133 enum value_range_type range_type
= VR_UNDEFINED
;
3135 /* Determine bounds from the type. */
3136 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
3137 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
3140 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
3141 *probable_max_size
= *max_size
3142 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
3144 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
3146 if (TREE_CODE (len
) == SSA_NAME
)
3147 range_type
= get_range_info (len
, &min
, &max
);
3148 if (range_type
== VR_RANGE
)
3150 if (min
.fits_uhwi () && *min_size
< min
.to_uhwi ())
3151 *min_size
= min
.to_uhwi ();
3152 if (max
.fits_uhwi () && *max_size
> max
.to_uhwi ())
3153 *probable_max_size
= *max_size
= max
.to_uhwi ();
3155 else if (range_type
== VR_ANTI_RANGE
)
3157 /* Anti range 0...N lets us to determine minimal size to N+1. */
3160 if ((max
+ double_int_one
).fits_uhwi ())
3161 *min_size
= (max
+ double_int_one
).to_uhwi ();
3169 Produce anti range allowing negative values of N. We still
3170 can use the information and make a guess that N is not negative.
3172 else if (!max
.ule (double_int_one
.lshift (30))
3173 && min
.fits_uhwi ())
3174 *probable_max_size
= min
.to_uhwi () - 1;
3177 gcc_checking_assert (*max_size
<=
3178 (unsigned HOST_WIDE_INT
)
3179 GET_MODE_MASK (GET_MODE (len_rtx
)));
3182 /* Expand a call EXP to the memcpy builtin.
3183 Return NULL_RTX if we failed, the caller should emit a normal call,
3184 otherwise try to get the result in TARGET, if convenient (and in
3185 mode MODE if that's convenient). */
3188 expand_builtin_memcpy (tree exp
, rtx target
)
3190 if (!validate_arglist (exp
,
3191 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3195 tree dest
= CALL_EXPR_ARG (exp
, 0);
3196 tree src
= CALL_EXPR_ARG (exp
, 1);
3197 tree len
= CALL_EXPR_ARG (exp
, 2);
3198 const char *src_str
;
3199 unsigned int src_align
= get_pointer_alignment (src
);
3200 unsigned int dest_align
= get_pointer_alignment (dest
);
3201 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3202 HOST_WIDE_INT expected_size
= -1;
3203 unsigned int expected_align
= 0;
3204 unsigned HOST_WIDE_INT min_size
;
3205 unsigned HOST_WIDE_INT max_size
;
3206 unsigned HOST_WIDE_INT probable_max_size
;
3208 /* If DEST is not a pointer type, call the normal function. */
3209 if (dest_align
== 0)
3212 /* If either SRC is not a pointer type, don't do this
3213 operation in-line. */
3217 if (currently_expanding_gimple_stmt
)
3218 stringop_block_profile (currently_expanding_gimple_stmt
,
3219 &expected_align
, &expected_size
);
3221 if (expected_align
< dest_align
)
3222 expected_align
= dest_align
;
3223 dest_mem
= get_memory_rtx (dest
, len
);
3224 set_mem_align (dest_mem
, dest_align
);
3225 len_rtx
= expand_normal (len
);
3226 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3227 &probable_max_size
);
3228 src_str
= c_getstr (src
);
3230 /* If SRC is a string constant and block move would be done
3231 by pieces, we can avoid loading the string from memory
3232 and only stored the computed constants. */
3234 && CONST_INT_P (len_rtx
)
3235 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3236 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3237 CONST_CAST (char *, src_str
),
3240 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3241 builtin_memcpy_read_str
,
3242 CONST_CAST (char *, src_str
),
3243 dest_align
, false, 0);
3244 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3245 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3249 src_mem
= get_memory_rtx (src
, len
);
3250 set_mem_align (src_mem
, src_align
);
3252 /* Copy word part most expediently. */
3253 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3254 CALL_EXPR_TAILCALL (exp
)
3255 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3256 expected_align
, expected_size
,
3257 min_size
, max_size
, probable_max_size
);
3261 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3262 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3268 /* Expand a call EXP to the mempcpy builtin.
3269 Return NULL_RTX if we failed; the caller should emit a normal call,
3270 otherwise try to get the result in TARGET, if convenient (and in
3271 mode MODE if that's convenient). If ENDP is 0 return the
3272 destination pointer, if ENDP is 1 return the end pointer ala
3273 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3277 expand_builtin_mempcpy (tree exp
, rtx target
, enum machine_mode mode
)
3279 if (!validate_arglist (exp
,
3280 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3284 tree dest
= CALL_EXPR_ARG (exp
, 0);
3285 tree src
= CALL_EXPR_ARG (exp
, 1);
3286 tree len
= CALL_EXPR_ARG (exp
, 2);
3287 return expand_builtin_mempcpy_args (dest
, src
, len
,
3288 target
, mode
, /*endp=*/ 1);
3292 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3293 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3294 so that this can also be called without constructing an actual CALL_EXPR.
3295 The other arguments and return value are the same as for
3296 expand_builtin_mempcpy. */
3299 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3300 rtx target
, enum machine_mode mode
, int endp
)
3302 /* If return value is ignored, transform mempcpy into memcpy. */
3303 if (target
== const0_rtx
&& builtin_decl_implicit_p (BUILT_IN_MEMCPY
))
3305 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3306 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3308 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3312 const char *src_str
;
3313 unsigned int src_align
= get_pointer_alignment (src
);
3314 unsigned int dest_align
= get_pointer_alignment (dest
);
3315 rtx dest_mem
, src_mem
, len_rtx
;
3317 /* If either SRC or DEST is not a pointer type, don't do this
3318 operation in-line. */
3319 if (dest_align
== 0 || src_align
== 0)
3322 /* If LEN is not constant, call the normal function. */
3323 if (! tree_fits_uhwi_p (len
))
3326 len_rtx
= expand_normal (len
);
3327 src_str
= c_getstr (src
);
3329 /* If SRC is a string constant and block move would be done
3330 by pieces, we can avoid loading the string from memory
3331 and only stored the computed constants. */
3333 && CONST_INT_P (len_rtx
)
3334 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3335 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3336 CONST_CAST (char *, src_str
),
3339 dest_mem
= get_memory_rtx (dest
, len
);
3340 set_mem_align (dest_mem
, dest_align
);
3341 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3342 builtin_memcpy_read_str
,
3343 CONST_CAST (char *, src_str
),
3344 dest_align
, false, endp
);
3345 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3346 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3350 if (CONST_INT_P (len_rtx
)
3351 && can_move_by_pieces (INTVAL (len_rtx
),
3352 MIN (dest_align
, src_align
)))
3354 dest_mem
= get_memory_rtx (dest
, len
);
3355 set_mem_align (dest_mem
, dest_align
);
3356 src_mem
= get_memory_rtx (src
, len
);
3357 set_mem_align (src_mem
, src_align
);
3358 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3359 MIN (dest_align
, src_align
), endp
);
3360 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3361 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3370 # define HAVE_movstr 0
3371 # define CODE_FOR_movstr CODE_FOR_nothing
3374 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3375 we failed, the caller should emit a normal call, otherwise try to
3376 get the result in TARGET, if convenient. If ENDP is 0 return the
3377 destination pointer, if ENDP is 1 return the end pointer ala
3378 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3382 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3384 struct expand_operand ops
[3];
3391 dest_mem
= get_memory_rtx (dest
, NULL
);
3392 src_mem
= get_memory_rtx (src
, NULL
);
3395 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3396 dest_mem
= replace_equiv_address (dest_mem
, target
);
3399 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3400 create_fixed_operand (&ops
[1], dest_mem
);
3401 create_fixed_operand (&ops
[2], src_mem
);
3402 if (!maybe_expand_insn (CODE_FOR_movstr
, 3, ops
))
3405 if (endp
&& target
!= const0_rtx
)
3407 target
= ops
[0].value
;
3408 /* movstr is supposed to set end to the address of the NUL
3409 terminator. If the caller requested a mempcpy-like return value,
3413 rtx tem
= plus_constant (GET_MODE (target
),
3414 gen_lowpart (GET_MODE (target
), target
), 1);
3415 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3421 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3422 NULL_RTX if we failed the caller should emit a normal call, otherwise
3423 try to get the result in TARGET, if convenient (and in mode MODE if that's
3427 expand_builtin_strcpy (tree exp
, rtx target
)
3429 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3431 tree dest
= CALL_EXPR_ARG (exp
, 0);
3432 tree src
= CALL_EXPR_ARG (exp
, 1);
3433 return expand_builtin_strcpy_args (dest
, src
, target
);
3438 /* Helper function to do the actual work for expand_builtin_strcpy. The
3439 arguments to the builtin_strcpy call DEST and SRC are broken out
3440 so that this can also be called without constructing an actual CALL_EXPR.
3441 The other arguments and return value are the same as for
3442 expand_builtin_strcpy. */
3445 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3447 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3450 /* Expand a call EXP to the stpcpy builtin.
3451 Return NULL_RTX if we failed the caller should emit a normal call,
3452 otherwise try to get the result in TARGET, if convenient (and in
3453 mode MODE if that's convenient). */
3456 expand_builtin_stpcpy (tree exp
, rtx target
, enum machine_mode mode
)
3459 location_t loc
= EXPR_LOCATION (exp
);
3461 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3464 dst
= CALL_EXPR_ARG (exp
, 0);
3465 src
= CALL_EXPR_ARG (exp
, 1);
3467 /* If return value is ignored, transform stpcpy into strcpy. */
3468 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3470 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3471 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3472 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3479 /* Ensure we get an actual string whose length can be evaluated at
3480 compile-time, not an expression containing a string. This is
3481 because the latter will potentially produce pessimized code
3482 when used to produce the return value. */
3483 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3484 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3486 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3487 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3488 target
, mode
, /*endp=*/2);
3493 if (TREE_CODE (len
) == INTEGER_CST
)
3495 rtx len_rtx
= expand_normal (len
);
3497 if (CONST_INT_P (len_rtx
))
3499 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3505 if (mode
!= VOIDmode
)
3506 target
= gen_reg_rtx (mode
);
3508 target
= gen_reg_rtx (GET_MODE (ret
));
3510 if (GET_MODE (target
) != GET_MODE (ret
))
3511 ret
= gen_lowpart (GET_MODE (target
), ret
);
3513 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3514 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3522 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3526 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3527 bytes from constant string DATA + OFFSET and return it as target
3531 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3532 enum machine_mode mode
)
3534 const char *str
= (const char *) data
;
3536 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3539 return c_readstr (str
+ offset
, mode
);
3542 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3543 NULL_RTX if we failed the caller should emit a normal call. */
3546 expand_builtin_strncpy (tree exp
, rtx target
)
3548 location_t loc
= EXPR_LOCATION (exp
);
3550 if (validate_arglist (exp
,
3551 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3553 tree dest
= CALL_EXPR_ARG (exp
, 0);
3554 tree src
= CALL_EXPR_ARG (exp
, 1);
3555 tree len
= CALL_EXPR_ARG (exp
, 2);
3556 tree slen
= c_strlen (src
, 1);
3558 /* We must be passed a constant len and src parameter. */
3559 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
3562 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3564 /* We're required to pad with trailing zeros if the requested
3565 len is greater than strlen(s2)+1. In that case try to
3566 use store_by_pieces, if it fails, punt. */
3567 if (tree_int_cst_lt (slen
, len
))
3569 unsigned int dest_align
= get_pointer_alignment (dest
);
3570 const char *p
= c_getstr (src
);
3573 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
3574 || !can_store_by_pieces (tree_to_uhwi (len
),
3575 builtin_strncpy_read_str
,
3576 CONST_CAST (char *, p
),
3580 dest_mem
= get_memory_rtx (dest
, len
);
3581 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3582 builtin_strncpy_read_str
,
3583 CONST_CAST (char *, p
), dest_align
, false, 0);
3584 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3585 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3592 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3593 bytes from constant string DATA + OFFSET and return it as target
3597 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3598 enum machine_mode mode
)
3600 const char *c
= (const char *) data
;
3601 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3603 memset (p
, *c
, GET_MODE_SIZE (mode
));
3605 return c_readstr (p
, mode
);
3608 /* Callback routine for store_by_pieces. Return the RTL of a register
3609 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3610 char value given in the RTL register data. For example, if mode is
3611 4 bytes wide, return the RTL for 0x01010101*data. */
3614 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3615 enum machine_mode mode
)
3621 size
= GET_MODE_SIZE (mode
);
3625 p
= XALLOCAVEC (char, size
);
3626 memset (p
, 1, size
);
3627 coeff
= c_readstr (p
, mode
);
3629 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3630 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3631 return force_reg (mode
, target
);
3634 /* Expand expression EXP, which is a call to the memset builtin. Return
3635 NULL_RTX if we failed the caller should emit a normal call, otherwise
3636 try to get the result in TARGET, if convenient (and in mode MODE if that's
3640 expand_builtin_memset (tree exp
, rtx target
, enum machine_mode mode
)
3642 if (!validate_arglist (exp
,
3643 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3647 tree dest
= CALL_EXPR_ARG (exp
, 0);
3648 tree val
= CALL_EXPR_ARG (exp
, 1);
3649 tree len
= CALL_EXPR_ARG (exp
, 2);
3650 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3654 /* Helper function to do the actual work for expand_builtin_memset. The
3655 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3656 so that this can also be called without constructing an actual CALL_EXPR.
3657 The other arguments and return value are the same as for
3658 expand_builtin_memset. */
3661 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3662 rtx target
, enum machine_mode mode
, tree orig_exp
)
3665 enum built_in_function fcode
;
3666 enum machine_mode val_mode
;
3668 unsigned int dest_align
;
3669 rtx dest_mem
, dest_addr
, len_rtx
;
3670 HOST_WIDE_INT expected_size
= -1;
3671 unsigned int expected_align
= 0;
3672 unsigned HOST_WIDE_INT min_size
;
3673 unsigned HOST_WIDE_INT max_size
;
3674 unsigned HOST_WIDE_INT probable_max_size
;
3676 dest_align
= get_pointer_alignment (dest
);
3678 /* If DEST is not a pointer type, don't do this operation in-line. */
3679 if (dest_align
== 0)
3682 if (currently_expanding_gimple_stmt
)
3683 stringop_block_profile (currently_expanding_gimple_stmt
,
3684 &expected_align
, &expected_size
);
3686 if (expected_align
< dest_align
)
3687 expected_align
= dest_align
;
3689 /* If the LEN parameter is zero, return DEST. */
3690 if (integer_zerop (len
))
3692 /* Evaluate and ignore VAL in case it has side-effects. */
3693 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3694 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3697 /* Stabilize the arguments in case we fail. */
3698 dest
= builtin_save_expr (dest
);
3699 val
= builtin_save_expr (val
);
3700 len
= builtin_save_expr (len
);
3702 len_rtx
= expand_normal (len
);
3703 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3704 &probable_max_size
);
3705 dest_mem
= get_memory_rtx (dest
, len
);
3706 val_mode
= TYPE_MODE (unsigned_char_type_node
);
3708 if (TREE_CODE (val
) != INTEGER_CST
)
3712 val_rtx
= expand_normal (val
);
3713 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
3715 /* Assume that we can memset by pieces if we can store
3716 * the coefficients by pieces (in the required modes).
3717 * We can't pass builtin_memset_gen_str as that emits RTL. */
3719 if (tree_fits_uhwi_p (len
)
3720 && can_store_by_pieces (tree_to_uhwi (len
),
3721 builtin_memset_read_str
, &c
, dest_align
,
3724 val_rtx
= force_reg (val_mode
, val_rtx
);
3725 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3726 builtin_memset_gen_str
, val_rtx
, dest_align
,
3729 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3730 dest_align
, expected_align
,
3731 expected_size
, min_size
, max_size
,
3735 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3736 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3740 if (target_char_cast (val
, &c
))
3745 if (tree_fits_uhwi_p (len
)
3746 && can_store_by_pieces (tree_to_uhwi (len
),
3747 builtin_memset_read_str
, &c
, dest_align
,
3749 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3750 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3751 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
3752 gen_int_mode (c
, val_mode
),
3753 dest_align
, expected_align
,
3754 expected_size
, min_size
, max_size
,
3758 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3759 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3763 set_mem_align (dest_mem
, dest_align
);
3764 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3765 CALL_EXPR_TAILCALL (orig_exp
)
3766 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3767 expected_align
, expected_size
,
3773 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3774 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3780 fndecl
= get_callee_fndecl (orig_exp
);
3781 fcode
= DECL_FUNCTION_CODE (fndecl
);
3782 if (fcode
== BUILT_IN_MEMSET
)
3783 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
3785 else if (fcode
== BUILT_IN_BZERO
)
3786 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
3790 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3791 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3792 return expand_call (fn
, target
, target
== const0_rtx
);
3795 /* Expand expression EXP, which is a call to the bzero builtin. Return
3796 NULL_RTX if we failed the caller should emit a normal call. */
3799 expand_builtin_bzero (tree exp
)
3802 location_t loc
= EXPR_LOCATION (exp
);
3804 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3807 dest
= CALL_EXPR_ARG (exp
, 0);
3808 size
= CALL_EXPR_ARG (exp
, 1);
3810 /* New argument list transforming bzero(ptr x, int y) to
3811 memset(ptr x, int 0, size_t y). This is done this way
3812 so that if it isn't expanded inline, we fallback to
3813 calling bzero instead of memset. */
3815 return expand_builtin_memset_args (dest
, integer_zero_node
,
3816 fold_convert_loc (loc
,
3817 size_type_node
, size
),
3818 const0_rtx
, VOIDmode
, exp
);
3821 /* Expand expression EXP, which is a call to the memcmp built-in function.
3822 Return NULL_RTX if we failed and the caller should emit a normal call,
3823 otherwise try to get the result in TARGET, if convenient (and in mode
3824 MODE, if that's convenient). */
3827 expand_builtin_memcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
3828 ATTRIBUTE_UNUSED
enum machine_mode mode
)
3830 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
3832 if (!validate_arglist (exp
,
3833 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3836 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3837 implementing memcmp because it will stop if it encounters two
3839 #if defined HAVE_cmpmemsi
3841 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3844 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3845 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3846 tree len
= CALL_EXPR_ARG (exp
, 2);
3848 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3849 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3850 enum machine_mode insn_mode
;
3853 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
3857 /* If we don't have POINTER_TYPE, call the function. */
3858 if (arg1_align
== 0 || arg2_align
== 0)
3861 /* Make a place to write the result of the instruction. */
3864 && REG_P (result
) && GET_MODE (result
) == insn_mode
3865 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3866 result
= gen_reg_rtx (insn_mode
);
3868 arg1_rtx
= get_memory_rtx (arg1
, len
);
3869 arg2_rtx
= get_memory_rtx (arg2
, len
);
3870 arg3_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
3872 /* Set MEM_SIZE as appropriate. */
3873 if (CONST_INT_P (arg3_rtx
))
3875 set_mem_size (arg1_rtx
, INTVAL (arg3_rtx
));
3876 set_mem_size (arg2_rtx
, INTVAL (arg3_rtx
));
3880 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3881 GEN_INT (MIN (arg1_align
, arg2_align
)));
3888 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
3889 TYPE_MODE (integer_type_node
), 3,
3890 XEXP (arg1_rtx
, 0), Pmode
,
3891 XEXP (arg2_rtx
, 0), Pmode
,
3892 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
3893 TYPE_UNSIGNED (sizetype
)),
3894 TYPE_MODE (sizetype
));
3896 /* Return the value in the proper mode for this function. */
3897 mode
= TYPE_MODE (TREE_TYPE (exp
));
3898 if (GET_MODE (result
) == mode
)
3900 else if (target
!= 0)
3902 convert_move (target
, result
, 0);
3906 return convert_to_mode (mode
, result
, 0);
3908 #endif /* HAVE_cmpmemsi. */
3913 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3914 if we failed the caller should emit a normal call, otherwise try to get
3915 the result in TARGET, if convenient. */
3918 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
3920 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3923 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3924 if (direct_optab_handler (cmpstr_optab
, SImode
) != CODE_FOR_nothing
3925 || direct_optab_handler (cmpstrn_optab
, SImode
) != CODE_FOR_nothing
)
3927 rtx arg1_rtx
, arg2_rtx
;
3928 rtx result
, insn
= NULL_RTX
;
3930 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3931 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3933 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3934 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3936 /* If we don't have POINTER_TYPE, call the function. */
3937 if (arg1_align
== 0 || arg2_align
== 0)
3940 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3941 arg1
= builtin_save_expr (arg1
);
3942 arg2
= builtin_save_expr (arg2
);
3944 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
3945 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
3947 #ifdef HAVE_cmpstrsi
3948 /* Try to call cmpstrsi. */
3951 enum machine_mode insn_mode
3952 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
3954 /* Make a place to write the result of the instruction. */
3957 && REG_P (result
) && GET_MODE (result
) == insn_mode
3958 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3959 result
= gen_reg_rtx (insn_mode
);
3961 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
3962 GEN_INT (MIN (arg1_align
, arg2_align
)));
3965 #ifdef HAVE_cmpstrnsi
3966 /* Try to determine at least one length and call cmpstrnsi. */
3967 if (!insn
&& HAVE_cmpstrnsi
)
3972 enum machine_mode insn_mode
3973 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
3974 tree len1
= c_strlen (arg1
, 1);
3975 tree len2
= c_strlen (arg2
, 1);
3978 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
3980 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
3982 /* If we don't have a constant length for the first, use the length
3983 of the second, if we know it. We don't require a constant for
3984 this case; some cost analysis could be done if both are available
3985 but neither is constant. For now, assume they're equally cheap,
3986 unless one has side effects. If both strings have constant lengths,
3993 else if (TREE_SIDE_EFFECTS (len1
))
3995 else if (TREE_SIDE_EFFECTS (len2
))
3997 else if (TREE_CODE (len1
) != INTEGER_CST
)
3999 else if (TREE_CODE (len2
) != INTEGER_CST
)
4001 else if (tree_int_cst_lt (len1
, len2
))
4006 /* If both arguments have side effects, we cannot optimize. */
4007 if (!len
|| TREE_SIDE_EFFECTS (len
))
4010 arg3_rtx
= expand_normal (len
);
4012 /* Make a place to write the result of the instruction. */
4015 && REG_P (result
) && GET_MODE (result
) == insn_mode
4016 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4017 result
= gen_reg_rtx (insn_mode
);
4019 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4020 GEN_INT (MIN (arg1_align
, arg2_align
)));
4026 enum machine_mode mode
;
4029 /* Return the value in the proper mode for this function. */
4030 mode
= TYPE_MODE (TREE_TYPE (exp
));
4031 if (GET_MODE (result
) == mode
)
4034 return convert_to_mode (mode
, result
, 0);
4035 convert_move (target
, result
, 0);
4039 /* Expand the library call ourselves using a stabilized argument
4040 list to avoid re-evaluating the function's arguments twice. */
4041 #ifdef HAVE_cmpstrnsi
4044 fndecl
= get_callee_fndecl (exp
);
4045 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4046 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4047 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4048 return expand_call (fn
, target
, target
== const0_rtx
);
4054 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4055 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4056 the result in TARGET, if convenient. */
4059 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4060 ATTRIBUTE_UNUSED
enum machine_mode mode
)
4062 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
4064 if (!validate_arglist (exp
,
4065 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4068 /* If c_strlen can determine an expression for one of the string
4069 lengths, and it doesn't have side effects, then emit cmpstrnsi
4070 using length MIN(strlen(string)+1, arg3). */
4071 #ifdef HAVE_cmpstrnsi
4074 tree len
, len1
, len2
;
4075 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4078 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4079 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4080 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4082 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4083 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4084 enum machine_mode insn_mode
4085 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4087 len1
= c_strlen (arg1
, 1);
4088 len2
= c_strlen (arg2
, 1);
4091 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4093 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4095 /* If we don't have a constant length for the first, use the length
4096 of the second, if we know it. We don't require a constant for
4097 this case; some cost analysis could be done if both are available
4098 but neither is constant. For now, assume they're equally cheap,
4099 unless one has side effects. If both strings have constant lengths,
4106 else if (TREE_SIDE_EFFECTS (len1
))
4108 else if (TREE_SIDE_EFFECTS (len2
))
4110 else if (TREE_CODE (len1
) != INTEGER_CST
)
4112 else if (TREE_CODE (len2
) != INTEGER_CST
)
4114 else if (tree_int_cst_lt (len1
, len2
))
4119 /* If both arguments have side effects, we cannot optimize. */
4120 if (!len
|| TREE_SIDE_EFFECTS (len
))
4123 /* The actual new length parameter is MIN(len,arg3). */
4124 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
4125 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
4127 /* If we don't have POINTER_TYPE, call the function. */
4128 if (arg1_align
== 0 || arg2_align
== 0)
4131 /* Make a place to write the result of the instruction. */
4134 && REG_P (result
) && GET_MODE (result
) == insn_mode
4135 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4136 result
= gen_reg_rtx (insn_mode
);
4138 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4139 arg1
= builtin_save_expr (arg1
);
4140 arg2
= builtin_save_expr (arg2
);
4141 len
= builtin_save_expr (len
);
4143 arg1_rtx
= get_memory_rtx (arg1
, len
);
4144 arg2_rtx
= get_memory_rtx (arg2
, len
);
4145 arg3_rtx
= expand_normal (len
);
4146 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4147 GEN_INT (MIN (arg1_align
, arg2_align
)));
4152 /* Return the value in the proper mode for this function. */
4153 mode
= TYPE_MODE (TREE_TYPE (exp
));
4154 if (GET_MODE (result
) == mode
)
4157 return convert_to_mode (mode
, result
, 0);
4158 convert_move (target
, result
, 0);
4162 /* Expand the library call ourselves using a stabilized argument
4163 list to avoid re-evaluating the function's arguments twice. */
4164 fndecl
= get_callee_fndecl (exp
);
4165 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4167 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4168 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4169 return expand_call (fn
, target
, target
== const0_rtx
);
4175 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4176 if that's convenient. */
4179 expand_builtin_saveregs (void)
4183 /* Don't do __builtin_saveregs more than once in a function.
4184 Save the result of the first call and reuse it. */
4185 if (saveregs_value
!= 0)
4186 return saveregs_value
;
4188 /* When this function is called, it means that registers must be
4189 saved on entry to this function. So we migrate the call to the
4190 first insn of this function. */
4194 /* Do whatever the machine needs done in this case. */
4195 val
= targetm
.calls
.expand_builtin_saveregs ();
4200 saveregs_value
= val
;
4202 /* Put the insns after the NOTE that starts the function. If this
4203 is inside a start_sequence, make the outer-level insn chain current, so
4204 the code is placed at the start of the function. */
4205 push_topmost_sequence ();
4206 emit_insn_after (seq
, entry_of_function ());
4207 pop_topmost_sequence ();
4212 /* Expand a call to __builtin_next_arg. */
4215 expand_builtin_next_arg (void)
4217 /* Checking arguments is already done in fold_builtin_next_arg
4218 that must be called before this function. */
4219 return expand_binop (ptr_mode
, add_optab
,
4220 crtl
->args
.internal_arg_pointer
,
4221 crtl
->args
.arg_offset_rtx
,
4222 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4225 /* Make it easier for the backends by protecting the valist argument
4226 from multiple evaluations. */
4229 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4231 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4233 /* The current way of determining the type of valist is completely
4234 bogus. We should have the information on the va builtin instead. */
4236 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4238 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4240 if (TREE_SIDE_EFFECTS (valist
))
4241 valist
= save_expr (valist
);
4243 /* For this case, the backends will be expecting a pointer to
4244 vatype, but it's possible we've actually been given an array
4245 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4247 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4249 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4250 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4255 tree pt
= build_pointer_type (vatype
);
4259 if (! TREE_SIDE_EFFECTS (valist
))
4262 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4263 TREE_SIDE_EFFECTS (valist
) = 1;
4266 if (TREE_SIDE_EFFECTS (valist
))
4267 valist
= save_expr (valist
);
4268 valist
= fold_build2_loc (loc
, MEM_REF
,
4269 vatype
, valist
, build_int_cst (pt
, 0));
4275 /* The "standard" definition of va_list is void*. */
4278 std_build_builtin_va_list (void)
4280 return ptr_type_node
;
4283 /* The "standard" abi va_list is va_list_type_node. */
4286 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4288 return va_list_type_node
;
4291 /* The "standard" type of va_list is va_list_type_node. */
4294 std_canonical_va_list_type (tree type
)
4298 if (INDIRECT_REF_P (type
))
4299 type
= TREE_TYPE (type
);
4300 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE (type
)))
4301 type
= TREE_TYPE (type
);
4302 wtype
= va_list_type_node
;
4304 /* Treat structure va_list types. */
4305 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4306 htype
= TREE_TYPE (htype
);
4307 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4309 /* If va_list is an array type, the argument may have decayed
4310 to a pointer type, e.g. by being passed to another function.
4311 In that case, unwrap both types so that we can compare the
4312 underlying records. */
4313 if (TREE_CODE (htype
) == ARRAY_TYPE
4314 || POINTER_TYPE_P (htype
))
4316 wtype
= TREE_TYPE (wtype
);
4317 htype
= TREE_TYPE (htype
);
4320 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4321 return va_list_type_node
;
4326 /* The "standard" implementation of va_start: just assign `nextarg' to
4330 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4332 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4333 convert_move (va_r
, nextarg
, 0);
4336 /* Expand EXP, a call to __builtin_va_start. */
4339 expand_builtin_va_start (tree exp
)
4343 location_t loc
= EXPR_LOCATION (exp
);
4345 if (call_expr_nargs (exp
) < 2)
4347 error_at (loc
, "too few arguments to function %<va_start%>");
4351 if (fold_builtin_next_arg (exp
, true))
4354 nextarg
= expand_builtin_next_arg ();
4355 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4357 if (targetm
.expand_builtin_va_start
)
4358 targetm
.expand_builtin_va_start (valist
, nextarg
);
4360 std_expand_builtin_va_start (valist
, nextarg
);
4365 /* Expand EXP, a call to __builtin_va_end. */
4368 expand_builtin_va_end (tree exp
)
4370 tree valist
= CALL_EXPR_ARG (exp
, 0);
4372 /* Evaluate for side effects, if needed. I hate macros that don't
4374 if (TREE_SIDE_EFFECTS (valist
))
4375 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4380 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4381 builtin rather than just as an assignment in stdarg.h because of the
4382 nastiness of array-type va_list types. */
4385 expand_builtin_va_copy (tree exp
)
4388 location_t loc
= EXPR_LOCATION (exp
);
4390 dst
= CALL_EXPR_ARG (exp
, 0);
4391 src
= CALL_EXPR_ARG (exp
, 1);
4393 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4394 src
= stabilize_va_list_loc (loc
, src
, 0);
4396 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4398 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4400 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4401 TREE_SIDE_EFFECTS (t
) = 1;
4402 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4406 rtx dstb
, srcb
, size
;
4408 /* Evaluate to pointers. */
4409 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4410 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4411 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4412 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4414 dstb
= convert_memory_address (Pmode
, dstb
);
4415 srcb
= convert_memory_address (Pmode
, srcb
);
4417 /* "Dereference" to BLKmode memories. */
4418 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4419 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4420 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4421 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4422 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4423 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4426 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4432 /* Expand a call to one of the builtin functions __builtin_frame_address or
4433 __builtin_return_address. */
4436 expand_builtin_frame_address (tree fndecl
, tree exp
)
4438 /* The argument must be a nonnegative integer constant.
4439 It counts the number of frames to scan up the stack.
4440 The value is the return address saved in that frame. */
4441 if (call_expr_nargs (exp
) == 0)
4442 /* Warning about missing arg was already issued. */
4444 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
4446 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4447 error ("invalid argument to %<__builtin_frame_address%>");
4449 error ("invalid argument to %<__builtin_return_address%>");
4455 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
4456 tree_to_uhwi (CALL_EXPR_ARG (exp
, 0)));
4458 /* Some ports cannot access arbitrary stack frames. */
4461 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4462 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4464 warning (0, "unsupported argument to %<__builtin_return_address%>");
4468 /* For __builtin_frame_address, return what we've got. */
4469 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4473 && ! CONSTANT_P (tem
))
4474 tem
= copy_addr_to_reg (tem
);
4479 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4480 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4481 is the same as for allocate_dynamic_stack_space. */
4484 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
4490 bool alloca_with_align
= (DECL_FUNCTION_CODE (get_callee_fndecl (exp
))
4491 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4494 = (alloca_with_align
4495 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4496 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4501 /* Compute the argument. */
4502 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4504 /* Compute the alignment. */
4505 align
= (alloca_with_align
4506 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1))
4507 : BIGGEST_ALIGNMENT
);
4509 /* Allocate the desired space. */
4510 result
= allocate_dynamic_stack_space (op0
, 0, align
, cannot_accumulate
);
4511 result
= convert_memory_address (ptr_mode
, result
);
4516 /* Expand a call to bswap builtin in EXP.
4517 Return NULL_RTX if a normal call should be emitted rather than expanding the
4518 function in-line. If convenient, the result should be placed in TARGET.
4519 SUBTARGET may be used as the target for computing one of EXP's operands. */
4522 expand_builtin_bswap (enum machine_mode target_mode
, tree exp
, rtx target
,
4528 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4531 arg
= CALL_EXPR_ARG (exp
, 0);
4532 op0
= expand_expr (arg
,
4533 subtarget
&& GET_MODE (subtarget
) == target_mode
4534 ? subtarget
: NULL_RTX
,
4535 target_mode
, EXPAND_NORMAL
);
4536 if (GET_MODE (op0
) != target_mode
)
4537 op0
= convert_to_mode (target_mode
, op0
, 1);
4539 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4541 gcc_assert (target
);
4543 return convert_to_mode (target_mode
, target
, 1);
4546 /* Expand a call to a unary builtin in EXP.
4547 Return NULL_RTX if a normal call should be emitted rather than expanding the
4548 function in-line. If convenient, the result should be placed in TARGET.
4549 SUBTARGET may be used as the target for computing one of EXP's operands. */
4552 expand_builtin_unop (enum machine_mode target_mode
, tree exp
, rtx target
,
4553 rtx subtarget
, optab op_optab
)
4557 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4560 /* Compute the argument. */
4561 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4563 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4564 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4565 VOIDmode
, EXPAND_NORMAL
);
4566 /* Compute op, into TARGET if possible.
4567 Set TARGET to wherever the result comes back. */
4568 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4569 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4570 gcc_assert (target
);
4572 return convert_to_mode (target_mode
, target
, 0);
4575 /* Expand a call to __builtin_expect. We just return our argument
4576 as the builtin_expect semantic should've been already executed by
4577 tree branch prediction pass. */
4580 expand_builtin_expect (tree exp
, rtx target
)
4584 if (call_expr_nargs (exp
) < 2)
4586 arg
= CALL_EXPR_ARG (exp
, 0);
4588 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4589 /* When guessing was done, the hints should be already stripped away. */
4590 gcc_assert (!flag_guess_branch_prob
4591 || optimize
== 0 || seen_error ());
4595 /* Expand a call to __builtin_assume_aligned. We just return our first
4596 argument as the builtin_assume_aligned semantic should've been already
4600 expand_builtin_assume_aligned (tree exp
, rtx target
)
4602 if (call_expr_nargs (exp
) < 2)
4604 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
4606 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
4607 && (call_expr_nargs (exp
) < 3
4608 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
4613 expand_builtin_trap (void)
4618 rtx insn
= emit_insn (gen_trap ());
4619 /* For trap insns when not accumulating outgoing args force
4620 REG_ARGS_SIZE note to prevent crossjumping of calls with
4621 different args sizes. */
4622 if (!ACCUMULATE_OUTGOING_ARGS
)
4623 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4627 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4631 /* Expand a call to __builtin_unreachable. We do nothing except emit
4632 a barrier saying that control flow will not pass here.
4634 It is the responsibility of the program being compiled to ensure
4635 that control flow does never reach __builtin_unreachable. */
4637 expand_builtin_unreachable (void)
4642 /* Expand EXP, a call to fabs, fabsf or fabsl.
4643 Return NULL_RTX if a normal call should be emitted rather than expanding
4644 the function inline. If convenient, the result should be placed
4645 in TARGET. SUBTARGET may be used as the target for computing
4649 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
4651 enum machine_mode mode
;
4655 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4658 arg
= CALL_EXPR_ARG (exp
, 0);
4659 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
4660 mode
= TYPE_MODE (TREE_TYPE (arg
));
4661 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4662 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4665 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4666 Return NULL is a normal call should be emitted rather than expanding the
4667 function inline. If convenient, the result should be placed in TARGET.
4668 SUBTARGET may be used as the target for computing the operand. */
4671 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
4676 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4679 arg
= CALL_EXPR_ARG (exp
, 0);
4680 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4682 arg
= CALL_EXPR_ARG (exp
, 1);
4683 op1
= expand_normal (arg
);
4685 return expand_copysign (op0
, op1
, target
);
4688 /* Create a new constant string literal and return a char* pointer to it.
4689 The STRING_CST value is the LEN characters at STR. */
4691 build_string_literal (int len
, const char *str
)
4693 tree t
, elem
, index
, type
;
4695 t
= build_string (len
, str
);
4696 elem
= build_type_variant (char_type_node
, 1, 0);
4697 index
= build_index_type (size_int (len
- 1));
4698 type
= build_array_type (elem
, index
);
4699 TREE_TYPE (t
) = type
;
4700 TREE_CONSTANT (t
) = 1;
4701 TREE_READONLY (t
) = 1;
4702 TREE_STATIC (t
) = 1;
4704 type
= build_pointer_type (elem
);
4705 t
= build1 (ADDR_EXPR
, type
,
4706 build4 (ARRAY_REF
, elem
,
4707 t
, integer_zero_node
, NULL_TREE
, NULL_TREE
));
4711 /* Expand a call to __builtin___clear_cache. */
4714 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED
)
4716 #ifndef HAVE_clear_cache
4717 #ifdef CLEAR_INSN_CACHE
4718 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4719 does something. Just do the default expansion to a call to
4723 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4724 does nothing. There is no need to call it. Do nothing. */
4726 #endif /* CLEAR_INSN_CACHE */
4728 /* We have a "clear_cache" insn, and it will handle everything. */
4730 rtx begin_rtx
, end_rtx
;
4732 /* We must not expand to a library call. If we did, any
4733 fallback library function in libgcc that might contain a call to
4734 __builtin___clear_cache() would recurse infinitely. */
4735 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4737 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4741 if (HAVE_clear_cache
)
4743 struct expand_operand ops
[2];
4745 begin
= CALL_EXPR_ARG (exp
, 0);
4746 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4748 end
= CALL_EXPR_ARG (exp
, 1);
4749 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4751 create_address_operand (&ops
[0], begin_rtx
);
4752 create_address_operand (&ops
[1], end_rtx
);
4753 if (maybe_expand_insn (CODE_FOR_clear_cache
, 2, ops
))
4757 #endif /* HAVE_clear_cache */
4760 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4763 round_trampoline_addr (rtx tramp
)
4765 rtx temp
, addend
, mask
;
4767 /* If we don't need too much alignment, we'll have been guaranteed
4768 proper alignment by get_trampoline_type. */
4769 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
4772 /* Round address up to desired boundary. */
4773 temp
= gen_reg_rtx (Pmode
);
4774 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
4775 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
4777 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
4778 temp
, 0, OPTAB_LIB_WIDEN
);
4779 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
4780 temp
, 0, OPTAB_LIB_WIDEN
);
4786 expand_builtin_init_trampoline (tree exp
, bool onstack
)
4788 tree t_tramp
, t_func
, t_chain
;
4789 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
4791 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
4792 POINTER_TYPE
, VOID_TYPE
))
4795 t_tramp
= CALL_EXPR_ARG (exp
, 0);
4796 t_func
= CALL_EXPR_ARG (exp
, 1);
4797 t_chain
= CALL_EXPR_ARG (exp
, 2);
4799 r_tramp
= expand_normal (t_tramp
);
4800 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
4801 MEM_NOTRAP_P (m_tramp
) = 1;
4803 /* If ONSTACK, the TRAMP argument should be the address of a field
4804 within the local function's FRAME decl. Either way, let's see if
4805 we can fill in the MEM_ATTRs for this memory. */
4806 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
4807 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
4809 /* Creator of a heap trampoline is responsible for making sure the
4810 address is aligned to at least STACK_BOUNDARY. Normally malloc
4811 will ensure this anyhow. */
4812 tmp
= round_trampoline_addr (r_tramp
);
4815 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
4816 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
4817 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
4820 /* The FUNC argument should be the address of the nested function.
4821 Extract the actual function decl to pass to the hook. */
4822 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
4823 t_func
= TREE_OPERAND (t_func
, 0);
4824 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
4826 r_chain
= expand_normal (t_chain
);
4828 /* Generate insns to initialize the trampoline. */
4829 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
4833 trampolines_created
= 1;
4835 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
4836 "trampoline generated for nested function %qD", t_func
);
4843 expand_builtin_adjust_trampoline (tree exp
)
4847 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4850 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4851 tramp
= round_trampoline_addr (tramp
);
4852 if (targetm
.calls
.trampoline_adjust_address
)
4853 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
4858 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4859 function. The function first checks whether the back end provides
4860 an insn to implement signbit for the respective mode. If not, it
4861 checks whether the floating point format of the value is such that
4862 the sign bit can be extracted. If that is not the case, the
4863 function returns NULL_RTX to indicate that a normal call should be
4864 emitted rather than expanding the function in-line. EXP is the
4865 expression that is a call to the builtin function; if convenient,
4866 the result should be placed in TARGET. */
4868 expand_builtin_signbit (tree exp
, rtx target
)
4870 const struct real_format
*fmt
;
4871 enum machine_mode fmode
, imode
, rmode
;
4874 enum insn_code icode
;
4876 location_t loc
= EXPR_LOCATION (exp
);
4878 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4881 arg
= CALL_EXPR_ARG (exp
, 0);
4882 fmode
= TYPE_MODE (TREE_TYPE (arg
));
4883 rmode
= TYPE_MODE (TREE_TYPE (exp
));
4884 fmt
= REAL_MODE_FORMAT (fmode
);
4886 arg
= builtin_save_expr (arg
);
4888 /* Expand the argument yielding a RTX expression. */
4889 temp
= expand_normal (arg
);
4891 /* Check if the back end provides an insn that handles signbit for the
4893 icode
= optab_handler (signbit_optab
, fmode
);
4894 if (icode
!= CODE_FOR_nothing
)
4896 rtx last
= get_last_insn ();
4897 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
4898 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
4900 delete_insns_since (last
);
4903 /* For floating point formats without a sign bit, implement signbit
4905 bitpos
= fmt
->signbit_ro
;
4908 /* But we can't do this if the format supports signed zero. */
4909 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
4912 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
4913 build_real (TREE_TYPE (arg
), dconst0
));
4914 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4917 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
4919 imode
= int_mode_for_mode (fmode
);
4920 if (imode
== BLKmode
)
4922 temp
= gen_lowpart (imode
, temp
);
4927 /* Handle targets with different FP word orders. */
4928 if (FLOAT_WORDS_BIG_ENDIAN
)
4929 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
4931 word
= bitpos
/ BITS_PER_WORD
;
4932 temp
= operand_subword_force (temp
, word
, fmode
);
4933 bitpos
= bitpos
% BITS_PER_WORD
;
4936 /* Force the intermediate word_mode (or narrower) result into a
4937 register. This avoids attempting to create paradoxical SUBREGs
4938 of floating point modes below. */
4939 temp
= force_reg (imode
, temp
);
4941 /* If the bitpos is within the "result mode" lowpart, the operation
4942 can be implement with a single bitwise AND. Otherwise, we need
4943 a right shift and an AND. */
4945 if (bitpos
< GET_MODE_BITSIZE (rmode
))
4947 double_int mask
= double_int_zero
.set_bit (bitpos
);
4949 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
4950 temp
= gen_lowpart (rmode
, temp
);
4951 temp
= expand_binop (rmode
, and_optab
, temp
,
4952 immed_double_int_const (mask
, rmode
),
4953 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
4957 /* Perform a logical right shift to place the signbit in the least
4958 significant bit, then truncate the result to the desired mode
4959 and mask just this bit. */
4960 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
4961 temp
= gen_lowpart (rmode
, temp
);
4962 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
4963 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
4969 /* Expand fork or exec calls. TARGET is the desired target of the
4970 call. EXP is the call. FN is the
4971 identificator of the actual function. IGNORE is nonzero if the
4972 value is to be ignored. */
4975 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
4980 /* If we are not profiling, just call the function. */
4981 if (!profile_arc_flag
)
4984 /* Otherwise call the wrapper. This should be equivalent for the rest of
4985 compiler, so the code does not diverge, and the wrapper may run the
4986 code necessary for keeping the profiling sane. */
4988 switch (DECL_FUNCTION_CODE (fn
))
4991 id
= get_identifier ("__gcov_fork");
4994 case BUILT_IN_EXECL
:
4995 id
= get_identifier ("__gcov_execl");
4998 case BUILT_IN_EXECV
:
4999 id
= get_identifier ("__gcov_execv");
5002 case BUILT_IN_EXECLP
:
5003 id
= get_identifier ("__gcov_execlp");
5006 case BUILT_IN_EXECLE
:
5007 id
= get_identifier ("__gcov_execle");
5010 case BUILT_IN_EXECVP
:
5011 id
= get_identifier ("__gcov_execvp");
5014 case BUILT_IN_EXECVE
:
5015 id
= get_identifier ("__gcov_execve");
5022 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5023 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5024 DECL_EXTERNAL (decl
) = 1;
5025 TREE_PUBLIC (decl
) = 1;
5026 DECL_ARTIFICIAL (decl
) = 1;
5027 TREE_NOTHROW (decl
) = 1;
5028 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5029 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5030 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5031 return expand_call (call
, target
, ignore
);
5036 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5037 the pointer in these functions is void*, the tree optimizers may remove
5038 casts. The mode computed in expand_builtin isn't reliable either, due
5039 to __sync_bool_compare_and_swap.
5041 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5042 group of builtins. This gives us log2 of the mode size. */
5044 static inline enum machine_mode
5045 get_builtin_sync_mode (int fcode_diff
)
5047 /* The size is not negotiable, so ask not to get BLKmode in return
5048 if the target indicates that a smaller size would be better. */
5049 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5052 /* Expand the memory expression LOC and return the appropriate memory operand
5053 for the builtin_sync operations. */
5056 get_builtin_sync_mem (tree loc
, enum machine_mode mode
)
5060 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5061 addr
= convert_memory_address (Pmode
, addr
);
5063 /* Note that we explicitly do not want any alias information for this
5064 memory, so that we kill all other live memories. Otherwise we don't
5065 satisfy the full barrier semantics of the intrinsic. */
5066 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5068 /* The alignment needs to be at least according to that of the mode. */
5069 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5070 get_pointer_alignment (loc
)));
5071 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5072 MEM_VOLATILE_P (mem
) = 1;
5077 /* Make sure an argument is in the right mode.
5078 EXP is the tree argument.
5079 MODE is the mode it should be in. */
5082 expand_expr_force_mode (tree exp
, enum machine_mode mode
)
5085 enum machine_mode old_mode
;
5087 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5088 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5089 of CONST_INTs, where we know the old_mode only from the call argument. */
5091 old_mode
= GET_MODE (val
);
5092 if (old_mode
== VOIDmode
)
5093 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5094 val
= convert_modes (mode
, old_mode
, val
, 1);
5099 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5100 EXP is the CALL_EXPR. CODE is the rtx code
5101 that corresponds to the arithmetic or logical operation from the name;
5102 an exception here is that NOT actually means NAND. TARGET is an optional
5103 place for us to store the results; AFTER is true if this is the
5104 fetch_and_xxx form. */
5107 expand_builtin_sync_operation (enum machine_mode mode
, tree exp
,
5108 enum rtx_code code
, bool after
,
5112 location_t loc
= EXPR_LOCATION (exp
);
5114 if (code
== NOT
&& warn_sync_nand
)
5116 tree fndecl
= get_callee_fndecl (exp
);
5117 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5119 static bool warned_f_a_n
, warned_n_a_f
;
5123 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5124 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5125 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5126 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5127 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5131 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5132 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5133 warned_f_a_n
= true;
5136 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5137 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5138 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5139 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5140 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5144 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5145 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5146 warned_n_a_f
= true;
5154 /* Expand the operands. */
5155 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5156 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5158 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SEQ_CST
,
5162 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5163 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5164 true if this is the boolean form. TARGET is a place for us to store the
5165 results; this is NOT optional if IS_BOOL is true. */
5168 expand_builtin_compare_and_swap (enum machine_mode mode
, tree exp
,
5169 bool is_bool
, rtx target
)
5171 rtx old_val
, new_val
, mem
;
5174 /* Expand the operands. */
5175 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5176 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5177 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5179 pbool
= poval
= NULL
;
5180 if (target
!= const0_rtx
)
5187 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5188 false, MEMMODEL_SEQ_CST
,
5195 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5196 general form is actually an atomic exchange, and some targets only
5197 support a reduced form with the second argument being a constant 1.
5198 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5202 expand_builtin_sync_lock_test_and_set (enum machine_mode mode
, tree exp
,
5207 /* Expand the operands. */
5208 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5209 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5211 return expand_sync_lock_test_and_set (target
, mem
, val
);
5214 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5217 expand_builtin_sync_lock_release (enum machine_mode mode
, tree exp
)
5221 /* Expand the operands. */
5222 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5224 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_RELEASE
, true);
5227 /* Given an integer representing an ``enum memmodel'', verify its
5228 correctness and return the memory model enum. */
5230 static enum memmodel
5231 get_memmodel (tree exp
)
5234 unsigned HOST_WIDE_INT val
;
5236 /* If the parameter is not a constant, it's a run time value so we'll just
5237 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5238 if (TREE_CODE (exp
) != INTEGER_CST
)
5239 return MEMMODEL_SEQ_CST
;
5241 op
= expand_normal (exp
);
5244 if (targetm
.memmodel_check
)
5245 val
= targetm
.memmodel_check (val
);
5246 else if (val
& ~MEMMODEL_MASK
)
5248 warning (OPT_Winvalid_memory_model
,
5249 "Unknown architecture specifier in memory model to builtin.");
5250 return MEMMODEL_SEQ_CST
;
5253 if ((INTVAL (op
) & MEMMODEL_MASK
) >= MEMMODEL_LAST
)
5255 warning (OPT_Winvalid_memory_model
,
5256 "invalid memory model argument to builtin");
5257 return MEMMODEL_SEQ_CST
;
5260 return (enum memmodel
) val
;
5263 /* Expand the __atomic_exchange intrinsic:
5264 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5265 EXP is the CALL_EXPR.
5266 TARGET is an optional place for us to store the results. */
5269 expand_builtin_atomic_exchange (enum machine_mode mode
, tree exp
, rtx target
)
5272 enum memmodel model
;
5274 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5275 if ((model
& MEMMODEL_MASK
) == MEMMODEL_CONSUME
)
5277 error ("invalid memory model for %<__atomic_exchange%>");
5281 if (!flag_inline_atomics
)
5284 /* Expand the operands. */
5285 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5286 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5288 return expand_atomic_exchange (target
, mem
, val
, model
);
5291 /* Expand the __atomic_compare_exchange intrinsic:
5292 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5293 TYPE desired, BOOL weak,
5294 enum memmodel success,
5295 enum memmodel failure)
5296 EXP is the CALL_EXPR.
5297 TARGET is an optional place for us to store the results. */
5300 expand_builtin_atomic_compare_exchange (enum machine_mode mode
, tree exp
,
5303 rtx expect
, desired
, mem
, oldval
, label
;
5304 enum memmodel success
, failure
;
5308 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5309 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5311 if ((failure
& MEMMODEL_MASK
) == MEMMODEL_RELEASE
5312 || (failure
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5314 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5318 if (failure
> success
)
5320 error ("failure memory model cannot be stronger than success "
5321 "memory model for %<__atomic_compare_exchange%>");
5325 if (!flag_inline_atomics
)
5328 /* Expand the operands. */
5329 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5331 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5332 expect
= convert_memory_address (Pmode
, expect
);
5333 expect
= gen_rtx_MEM (mode
, expect
);
5334 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5336 weak
= CALL_EXPR_ARG (exp
, 3);
5338 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5341 if (target
== const0_rtx
)
5344 /* Lest the rtl backend create a race condition with an imporoper store
5345 to memory, always create a new pseudo for OLDVAL. */
5348 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
5349 is_weak
, success
, failure
))
5352 /* Conditionally store back to EXPECT, lest we create a race condition
5353 with an improper store to memory. */
5354 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5355 the normal case where EXPECT is totally private, i.e. a register. At
5356 which point the store can be unconditional. */
5357 label
= gen_label_rtx ();
5358 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
, VOIDmode
, 1, label
);
5359 emit_move_insn (expect
, oldval
);
5365 /* Expand the __atomic_load intrinsic:
5366 TYPE __atomic_load (TYPE *object, enum memmodel)
5367 EXP is the CALL_EXPR.
5368 TARGET is an optional place for us to store the results. */
5371 expand_builtin_atomic_load (enum machine_mode mode
, tree exp
, rtx target
)
5374 enum memmodel model
;
5376 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5377 if ((model
& MEMMODEL_MASK
) == MEMMODEL_RELEASE
5378 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5380 error ("invalid memory model for %<__atomic_load%>");
5384 if (!flag_inline_atomics
)
5387 /* Expand the operand. */
5388 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5390 return expand_atomic_load (target
, mem
, model
);
5394 /* Expand the __atomic_store intrinsic:
5395 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5396 EXP is the CALL_EXPR.
5397 TARGET is an optional place for us to store the results. */
5400 expand_builtin_atomic_store (enum machine_mode mode
, tree exp
)
5403 enum memmodel model
;
5405 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5406 if ((model
& MEMMODEL_MASK
) != MEMMODEL_RELAXED
5407 && (model
& MEMMODEL_MASK
) != MEMMODEL_SEQ_CST
5408 && (model
& MEMMODEL_MASK
) != MEMMODEL_RELEASE
)
5410 error ("invalid memory model for %<__atomic_store%>");
5414 if (!flag_inline_atomics
)
5417 /* Expand the operands. */
5418 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5419 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5421 return expand_atomic_store (mem
, val
, model
, false);
5424 /* Expand the __atomic_fetch_XXX intrinsic:
5425 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5426 EXP is the CALL_EXPR.
5427 TARGET is an optional place for us to store the results.
5428 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5429 FETCH_AFTER is true if returning the result of the operation.
5430 FETCH_AFTER is false if returning the value before the operation.
5431 IGNORE is true if the result is not used.
5432 EXT_CALL is the correct builtin for an external call if this cannot be
5433 resolved to an instruction sequence. */
5436 expand_builtin_atomic_fetch_op (enum machine_mode mode
, tree exp
, rtx target
,
5437 enum rtx_code code
, bool fetch_after
,
5438 bool ignore
, enum built_in_function ext_call
)
5441 enum memmodel model
;
5445 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5447 /* Expand the operands. */
5448 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5449 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5451 /* Only try generating instructions if inlining is turned on. */
5452 if (flag_inline_atomics
)
5454 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
5459 /* Return if a different routine isn't needed for the library call. */
5460 if (ext_call
== BUILT_IN_NONE
)
5463 /* Change the call to the specified function. */
5464 fndecl
= get_callee_fndecl (exp
);
5465 addr
= CALL_EXPR_FN (exp
);
5468 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
5469 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
5471 /* Expand the call here so we can emit trailing code. */
5472 ret
= expand_call (exp
, target
, ignore
);
5474 /* Replace the original function just in case it matters. */
5475 TREE_OPERAND (addr
, 0) = fndecl
;
5477 /* Then issue the arithmetic correction to return the right result. */
5482 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
5484 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
5487 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
5494 #ifndef HAVE_atomic_clear
5495 # define HAVE_atomic_clear 0
5496 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5499 /* Expand an atomic clear operation.
5500 void _atomic_clear (BOOL *obj, enum memmodel)
5501 EXP is the call expression. */
5504 expand_builtin_atomic_clear (tree exp
)
5506 enum machine_mode mode
;
5508 enum memmodel model
;
5510 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5511 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5512 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5514 if ((model
& MEMMODEL_MASK
) == MEMMODEL_ACQUIRE
5515 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5517 error ("invalid memory model for %<__atomic_store%>");
5521 if (HAVE_atomic_clear
)
5523 emit_insn (gen_atomic_clear (mem
, model
));
5527 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5528 Failing that, a store is issued by __atomic_store. The only way this can
5529 fail is if the bool type is larger than a word size. Unlikely, but
5530 handle it anyway for completeness. Assume a single threaded model since
5531 there is no atomic support in this case, and no barriers are required. */
5532 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
5534 emit_move_insn (mem
, const0_rtx
);
5538 /* Expand an atomic test_and_set operation.
5539 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5540 EXP is the call expression. */
5543 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
5546 enum memmodel model
;
5547 enum machine_mode mode
;
5549 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5550 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5551 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5553 return expand_atomic_test_and_set (target
, mem
, model
);
5557 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5558 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5561 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
5564 enum machine_mode mode
;
5565 unsigned int mode_align
, type_align
;
5567 if (TREE_CODE (arg0
) != INTEGER_CST
)
5570 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
5571 mode
= mode_for_size (size
, MODE_INT
, 0);
5572 mode_align
= GET_MODE_ALIGNMENT (mode
);
5574 if (TREE_CODE (arg1
) == INTEGER_CST
&& INTVAL (expand_normal (arg1
)) == 0)
5575 type_align
= mode_align
;
5578 tree ttype
= TREE_TYPE (arg1
);
5580 /* This function is usually invoked and folded immediately by the front
5581 end before anything else has a chance to look at it. The pointer
5582 parameter at this point is usually cast to a void *, so check for that
5583 and look past the cast. */
5584 if (TREE_CODE (arg1
) == NOP_EXPR
&& POINTER_TYPE_P (ttype
)
5585 && VOID_TYPE_P (TREE_TYPE (ttype
)))
5586 arg1
= TREE_OPERAND (arg1
, 0);
5588 ttype
= TREE_TYPE (arg1
);
5589 gcc_assert (POINTER_TYPE_P (ttype
));
5591 /* Get the underlying type of the object. */
5592 ttype
= TREE_TYPE (ttype
);
5593 type_align
= TYPE_ALIGN (ttype
);
5596 /* If the object has smaller alignment, the the lock free routines cannot
5598 if (type_align
< mode_align
)
5599 return boolean_false_node
;
5601 /* Check if a compare_and_swap pattern exists for the mode which represents
5602 the required size. The pattern is not allowed to fail, so the existence
5603 of the pattern indicates support is present. */
5604 if (can_compare_and_swap_p (mode
, true))
5605 return boolean_true_node
;
5607 return boolean_false_node
;
5610 /* Return true if the parameters to call EXP represent an object which will
5611 always generate lock free instructions. The first argument represents the
5612 size of the object, and the second parameter is a pointer to the object
5613 itself. If NULL is passed for the object, then the result is based on
5614 typical alignment for an object of the specified size. Otherwise return
5618 expand_builtin_atomic_always_lock_free (tree exp
)
5621 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5622 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5624 if (TREE_CODE (arg0
) != INTEGER_CST
)
5626 error ("non-constant argument 1 to __atomic_always_lock_free");
5630 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
5631 if (size
== boolean_true_node
)
5636 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5637 is lock free on this architecture. */
5640 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
5642 if (!flag_inline_atomics
)
5645 /* If it isn't always lock free, don't generate a result. */
5646 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
5647 return boolean_true_node
;
5652 /* Return true if the parameters to call EXP represent an object which will
5653 always generate lock free instructions. The first argument represents the
5654 size of the object, and the second parameter is a pointer to the object
5655 itself. If NULL is passed for the object, then the result is based on
5656 typical alignment for an object of the specified size. Otherwise return
5660 expand_builtin_atomic_is_lock_free (tree exp
)
5663 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5664 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5666 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5668 error ("non-integer argument 1 to __atomic_is_lock_free");
5672 if (!flag_inline_atomics
)
5675 /* If the value is known at compile time, return the RTX for it. */
5676 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
5677 if (size
== boolean_true_node
)
5683 /* Expand the __atomic_thread_fence intrinsic:
5684 void __atomic_thread_fence (enum memmodel)
5685 EXP is the CALL_EXPR. */
5688 expand_builtin_atomic_thread_fence (tree exp
)
5690 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5691 expand_mem_thread_fence (model
);
5694 /* Expand the __atomic_signal_fence intrinsic:
5695 void __atomic_signal_fence (enum memmodel)
5696 EXP is the CALL_EXPR. */
5699 expand_builtin_atomic_signal_fence (tree exp
)
5701 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5702 expand_mem_signal_fence (model
);
5705 /* Expand the __sync_synchronize intrinsic. */
5708 expand_builtin_sync_synchronize (void)
5710 expand_mem_thread_fence (MEMMODEL_SEQ_CST
);
5714 expand_builtin_thread_pointer (tree exp
, rtx target
)
5716 enum insn_code icode
;
5717 if (!validate_arglist (exp
, VOID_TYPE
))
5719 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
5720 if (icode
!= CODE_FOR_nothing
)
5722 struct expand_operand op
;
5723 /* If the target is not sutitable then create a new target. */
5724 if (target
== NULL_RTX
5726 || GET_MODE (target
) != Pmode
)
5727 target
= gen_reg_rtx (Pmode
);
5728 create_output_operand (&op
, target
, Pmode
);
5729 expand_insn (icode
, 1, &op
);
5732 error ("__builtin_thread_pointer is not supported on this target");
5737 expand_builtin_set_thread_pointer (tree exp
)
5739 enum insn_code icode
;
5740 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5742 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
5743 if (icode
!= CODE_FOR_nothing
)
5745 struct expand_operand op
;
5746 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
5747 Pmode
, EXPAND_NORMAL
);
5748 create_input_operand (&op
, val
, Pmode
);
5749 expand_insn (icode
, 1, &op
);
5752 error ("__builtin_set_thread_pointer is not supported on this target");
5756 /* Emit code to restore the current value of stack. */
5759 expand_stack_restore (tree var
)
5761 rtx prev
, sa
= expand_normal (var
);
5763 sa
= convert_memory_address (Pmode
, sa
);
5765 prev
= get_last_insn ();
5766 emit_stack_restore (SAVE_BLOCK
, sa
);
5767 fixup_args_size_notes (prev
, get_last_insn (), 0);
5771 /* Emit code to save the current value of stack. */
5774 expand_stack_save (void)
5778 do_pending_stack_adjust ();
5779 emit_stack_save (SAVE_BLOCK
, &ret
);
5783 /* Expand an expression EXP that calls a built-in function,
5784 with result going to TARGET if that's convenient
5785 (and in mode MODE if that's convenient).
5786 SUBTARGET may be used as the target for computing one of EXP's operands.
5787 IGNORE is nonzero if the value is to be ignored. */
5790 expand_builtin (tree exp
, rtx target
, rtx subtarget
, enum machine_mode mode
,
5793 tree fndecl
= get_callee_fndecl (exp
);
5794 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5795 enum machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5798 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5799 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5801 /* When not optimizing, generate calls to library functions for a certain
5804 && !called_as_built_in (fndecl
)
5805 && fcode
!= BUILT_IN_FORK
5806 && fcode
!= BUILT_IN_EXECL
5807 && fcode
!= BUILT_IN_EXECV
5808 && fcode
!= BUILT_IN_EXECLP
5809 && fcode
!= BUILT_IN_EXECLE
5810 && fcode
!= BUILT_IN_EXECVP
5811 && fcode
!= BUILT_IN_EXECVE
5812 && fcode
!= BUILT_IN_ALLOCA
5813 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
5814 && fcode
!= BUILT_IN_FREE
)
5815 return expand_call (exp
, target
, ignore
);
5817 /* The built-in function expanders test for target == const0_rtx
5818 to determine whether the function's result will be ignored. */
5820 target
= const0_rtx
;
5822 /* If the result of a pure or const built-in function is ignored, and
5823 none of its arguments are volatile, we can avoid expanding the
5824 built-in call and just evaluate the arguments for side-effects. */
5825 if (target
== const0_rtx
5826 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
5827 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
5829 bool volatilep
= false;
5831 call_expr_arg_iterator iter
;
5833 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5834 if (TREE_THIS_VOLATILE (arg
))
5842 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5843 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5850 CASE_FLT_FN (BUILT_IN_FABS
):
5851 case BUILT_IN_FABSD32
:
5852 case BUILT_IN_FABSD64
:
5853 case BUILT_IN_FABSD128
:
5854 target
= expand_builtin_fabs (exp
, target
, subtarget
);
5859 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
5860 target
= expand_builtin_copysign (exp
, target
, subtarget
);
5865 /* Just do a normal library call if we were unable to fold
5867 CASE_FLT_FN (BUILT_IN_CABS
):
5870 CASE_FLT_FN (BUILT_IN_EXP
):
5871 CASE_FLT_FN (BUILT_IN_EXP10
):
5872 CASE_FLT_FN (BUILT_IN_POW10
):
5873 CASE_FLT_FN (BUILT_IN_EXP2
):
5874 CASE_FLT_FN (BUILT_IN_EXPM1
):
5875 CASE_FLT_FN (BUILT_IN_LOGB
):
5876 CASE_FLT_FN (BUILT_IN_LOG
):
5877 CASE_FLT_FN (BUILT_IN_LOG10
):
5878 CASE_FLT_FN (BUILT_IN_LOG2
):
5879 CASE_FLT_FN (BUILT_IN_LOG1P
):
5880 CASE_FLT_FN (BUILT_IN_TAN
):
5881 CASE_FLT_FN (BUILT_IN_ASIN
):
5882 CASE_FLT_FN (BUILT_IN_ACOS
):
5883 CASE_FLT_FN (BUILT_IN_ATAN
):
5884 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
5885 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5886 because of possible accuracy problems. */
5887 if (! flag_unsafe_math_optimizations
)
5889 CASE_FLT_FN (BUILT_IN_SQRT
):
5890 CASE_FLT_FN (BUILT_IN_FLOOR
):
5891 CASE_FLT_FN (BUILT_IN_CEIL
):
5892 CASE_FLT_FN (BUILT_IN_TRUNC
):
5893 CASE_FLT_FN (BUILT_IN_ROUND
):
5894 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
5895 CASE_FLT_FN (BUILT_IN_RINT
):
5896 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
5901 CASE_FLT_FN (BUILT_IN_FMA
):
5902 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
5907 CASE_FLT_FN (BUILT_IN_ILOGB
):
5908 if (! flag_unsafe_math_optimizations
)
5910 CASE_FLT_FN (BUILT_IN_ISINF
):
5911 CASE_FLT_FN (BUILT_IN_FINITE
):
5912 case BUILT_IN_ISFINITE
:
5913 case BUILT_IN_ISNORMAL
:
5914 target
= expand_builtin_interclass_mathfn (exp
, target
);
5919 CASE_FLT_FN (BUILT_IN_ICEIL
):
5920 CASE_FLT_FN (BUILT_IN_LCEIL
):
5921 CASE_FLT_FN (BUILT_IN_LLCEIL
):
5922 CASE_FLT_FN (BUILT_IN_LFLOOR
):
5923 CASE_FLT_FN (BUILT_IN_IFLOOR
):
5924 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
5925 target
= expand_builtin_int_roundingfn (exp
, target
);
5930 CASE_FLT_FN (BUILT_IN_IRINT
):
5931 CASE_FLT_FN (BUILT_IN_LRINT
):
5932 CASE_FLT_FN (BUILT_IN_LLRINT
):
5933 CASE_FLT_FN (BUILT_IN_IROUND
):
5934 CASE_FLT_FN (BUILT_IN_LROUND
):
5935 CASE_FLT_FN (BUILT_IN_LLROUND
):
5936 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
5941 CASE_FLT_FN (BUILT_IN_POWI
):
5942 target
= expand_builtin_powi (exp
, target
);
5947 CASE_FLT_FN (BUILT_IN_ATAN2
):
5948 CASE_FLT_FN (BUILT_IN_LDEXP
):
5949 CASE_FLT_FN (BUILT_IN_SCALB
):
5950 CASE_FLT_FN (BUILT_IN_SCALBN
):
5951 CASE_FLT_FN (BUILT_IN_SCALBLN
):
5952 if (! flag_unsafe_math_optimizations
)
5955 CASE_FLT_FN (BUILT_IN_FMOD
):
5956 CASE_FLT_FN (BUILT_IN_REMAINDER
):
5957 CASE_FLT_FN (BUILT_IN_DREM
):
5958 CASE_FLT_FN (BUILT_IN_POW
):
5959 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
5964 CASE_FLT_FN (BUILT_IN_CEXPI
):
5965 target
= expand_builtin_cexpi (exp
, target
);
5966 gcc_assert (target
);
5969 CASE_FLT_FN (BUILT_IN_SIN
):
5970 CASE_FLT_FN (BUILT_IN_COS
):
5971 if (! flag_unsafe_math_optimizations
)
5973 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
5978 CASE_FLT_FN (BUILT_IN_SINCOS
):
5979 if (! flag_unsafe_math_optimizations
)
5981 target
= expand_builtin_sincos (exp
);
5986 case BUILT_IN_APPLY_ARGS
:
5987 return expand_builtin_apply_args ();
5989 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5990 FUNCTION with a copy of the parameters described by
5991 ARGUMENTS, and ARGSIZE. It returns a block of memory
5992 allocated on the stack into which is stored all the registers
5993 that might possibly be used for returning the result of a
5994 function. ARGUMENTS is the value returned by
5995 __builtin_apply_args. ARGSIZE is the number of bytes of
5996 arguments that must be copied. ??? How should this value be
5997 computed? We'll also need a safe worst case value for varargs
5999 case BUILT_IN_APPLY
:
6000 if (!validate_arglist (exp
, POINTER_TYPE
,
6001 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6002 && !validate_arglist (exp
, REFERENCE_TYPE
,
6003 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6009 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6010 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6011 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6013 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6016 /* __builtin_return (RESULT) causes the function to return the
6017 value described by RESULT. RESULT is address of the block of
6018 memory returned by __builtin_apply. */
6019 case BUILT_IN_RETURN
:
6020 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6021 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6024 case BUILT_IN_SAVEREGS
:
6025 return expand_builtin_saveregs ();
6027 case BUILT_IN_VA_ARG_PACK
:
6028 /* All valid uses of __builtin_va_arg_pack () are removed during
6030 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6033 case BUILT_IN_VA_ARG_PACK_LEN
:
6034 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6036 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6039 /* Return the address of the first anonymous stack arg. */
6040 case BUILT_IN_NEXT_ARG
:
6041 if (fold_builtin_next_arg (exp
, false))
6043 return expand_builtin_next_arg ();
6045 case BUILT_IN_CLEAR_CACHE
:
6046 target
= expand_builtin___clear_cache (exp
);
6051 case BUILT_IN_CLASSIFY_TYPE
:
6052 return expand_builtin_classify_type (exp
);
6054 case BUILT_IN_CONSTANT_P
:
6057 case BUILT_IN_FRAME_ADDRESS
:
6058 case BUILT_IN_RETURN_ADDRESS
:
6059 return expand_builtin_frame_address (fndecl
, exp
);
6061 /* Returns the address of the area where the structure is returned.
6063 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6064 if (call_expr_nargs (exp
) != 0
6065 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6066 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6069 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6071 case BUILT_IN_ALLOCA
:
6072 case BUILT_IN_ALLOCA_WITH_ALIGN
:
6073 /* If the allocation stems from the declaration of a variable-sized
6074 object, it cannot accumulate. */
6075 target
= expand_builtin_alloca (exp
, CALL_ALLOCA_FOR_VAR_P (exp
));
6080 case BUILT_IN_STACK_SAVE
:
6081 return expand_stack_save ();
6083 case BUILT_IN_STACK_RESTORE
:
6084 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6087 case BUILT_IN_BSWAP16
:
6088 case BUILT_IN_BSWAP32
:
6089 case BUILT_IN_BSWAP64
:
6090 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6095 CASE_INT_FN (BUILT_IN_FFS
):
6096 target
= expand_builtin_unop (target_mode
, exp
, target
,
6097 subtarget
, ffs_optab
);
6102 CASE_INT_FN (BUILT_IN_CLZ
):
6103 target
= expand_builtin_unop (target_mode
, exp
, target
,
6104 subtarget
, clz_optab
);
6109 CASE_INT_FN (BUILT_IN_CTZ
):
6110 target
= expand_builtin_unop (target_mode
, exp
, target
,
6111 subtarget
, ctz_optab
);
6116 CASE_INT_FN (BUILT_IN_CLRSB
):
6117 target
= expand_builtin_unop (target_mode
, exp
, target
,
6118 subtarget
, clrsb_optab
);
6123 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6124 target
= expand_builtin_unop (target_mode
, exp
, target
,
6125 subtarget
, popcount_optab
);
6130 CASE_INT_FN (BUILT_IN_PARITY
):
6131 target
= expand_builtin_unop (target_mode
, exp
, target
,
6132 subtarget
, parity_optab
);
6137 case BUILT_IN_STRLEN
:
6138 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6143 case BUILT_IN_STRCPY
:
6144 target
= expand_builtin_strcpy (exp
, target
);
6149 case BUILT_IN_STRNCPY
:
6150 target
= expand_builtin_strncpy (exp
, target
);
6155 case BUILT_IN_STPCPY
:
6156 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6161 case BUILT_IN_MEMCPY
:
6162 target
= expand_builtin_memcpy (exp
, target
);
6167 case BUILT_IN_MEMPCPY
:
6168 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6173 case BUILT_IN_MEMSET
:
6174 target
= expand_builtin_memset (exp
, target
, mode
);
6179 case BUILT_IN_BZERO
:
6180 target
= expand_builtin_bzero (exp
);
6185 case BUILT_IN_STRCMP
:
6186 target
= expand_builtin_strcmp (exp
, target
);
6191 case BUILT_IN_STRNCMP
:
6192 target
= expand_builtin_strncmp (exp
, target
, mode
);
6198 case BUILT_IN_MEMCMP
:
6199 target
= expand_builtin_memcmp (exp
, target
, mode
);
6204 case BUILT_IN_SETJMP
:
6205 /* This should have been lowered to the builtins below. */
6208 case BUILT_IN_SETJMP_SETUP
:
6209 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6210 and the receiver label. */
6211 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6213 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6214 VOIDmode
, EXPAND_NORMAL
);
6215 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6216 rtx label_r
= label_rtx (label
);
6218 /* This is copied from the handling of non-local gotos. */
6219 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6220 nonlocal_goto_handler_labels
6221 = gen_rtx_EXPR_LIST (VOIDmode
, label_r
,
6222 nonlocal_goto_handler_labels
);
6223 /* ??? Do not let expand_label treat us as such since we would
6224 not want to be both on the list of non-local labels and on
6225 the list of forced labels. */
6226 FORCED_LABEL (label
) = 0;
6231 case BUILT_IN_SETJMP_RECEIVER
:
6232 /* __builtin_setjmp_receiver is passed the receiver label. */
6233 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6235 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6236 rtx label_r
= label_rtx (label
);
6238 expand_builtin_setjmp_receiver (label_r
);
6243 /* __builtin_longjmp is passed a pointer to an array of five words.
6244 It's similar to the C library longjmp function but works with
6245 __builtin_setjmp above. */
6246 case BUILT_IN_LONGJMP
:
6247 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6249 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6250 VOIDmode
, EXPAND_NORMAL
);
6251 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6253 if (value
!= const1_rtx
)
6255 error ("%<__builtin_longjmp%> second argument must be 1");
6259 expand_builtin_longjmp (buf_addr
, value
);
6264 case BUILT_IN_NONLOCAL_GOTO
:
6265 target
= expand_builtin_nonlocal_goto (exp
);
6270 /* This updates the setjmp buffer that is its argument with the value
6271 of the current stack pointer. */
6272 case BUILT_IN_UPDATE_SETJMP_BUF
:
6273 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6276 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6278 expand_builtin_update_setjmp_buf (buf_addr
);
6284 expand_builtin_trap ();
6287 case BUILT_IN_UNREACHABLE
:
6288 expand_builtin_unreachable ();
6291 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6292 case BUILT_IN_SIGNBITD32
:
6293 case BUILT_IN_SIGNBITD64
:
6294 case BUILT_IN_SIGNBITD128
:
6295 target
= expand_builtin_signbit (exp
, target
);
6300 /* Various hooks for the DWARF 2 __throw routine. */
6301 case BUILT_IN_UNWIND_INIT
:
6302 expand_builtin_unwind_init ();
6304 case BUILT_IN_DWARF_CFA
:
6305 return virtual_cfa_rtx
;
6306 #ifdef DWARF2_UNWIND_INFO
6307 case BUILT_IN_DWARF_SP_COLUMN
:
6308 return expand_builtin_dwarf_sp_column ();
6309 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6310 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6313 case BUILT_IN_FROB_RETURN_ADDR
:
6314 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6315 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6316 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6317 case BUILT_IN_EH_RETURN
:
6318 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6319 CALL_EXPR_ARG (exp
, 1));
6321 #ifdef EH_RETURN_DATA_REGNO
6322 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6323 return expand_builtin_eh_return_data_regno (exp
);
6325 case BUILT_IN_EXTEND_POINTER
:
6326 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6327 case BUILT_IN_EH_POINTER
:
6328 return expand_builtin_eh_pointer (exp
);
6329 case BUILT_IN_EH_FILTER
:
6330 return expand_builtin_eh_filter (exp
);
6331 case BUILT_IN_EH_COPY_VALUES
:
6332 return expand_builtin_eh_copy_values (exp
);
6334 case BUILT_IN_VA_START
:
6335 return expand_builtin_va_start (exp
);
6336 case BUILT_IN_VA_END
:
6337 return expand_builtin_va_end (exp
);
6338 case BUILT_IN_VA_COPY
:
6339 return expand_builtin_va_copy (exp
);
6340 case BUILT_IN_EXPECT
:
6341 return expand_builtin_expect (exp
, target
);
6342 case BUILT_IN_ASSUME_ALIGNED
:
6343 return expand_builtin_assume_aligned (exp
, target
);
6344 case BUILT_IN_PREFETCH
:
6345 expand_builtin_prefetch (exp
);
6348 case BUILT_IN_INIT_TRAMPOLINE
:
6349 return expand_builtin_init_trampoline (exp
, true);
6350 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
6351 return expand_builtin_init_trampoline (exp
, false);
6352 case BUILT_IN_ADJUST_TRAMPOLINE
:
6353 return expand_builtin_adjust_trampoline (exp
);
6356 case BUILT_IN_EXECL
:
6357 case BUILT_IN_EXECV
:
6358 case BUILT_IN_EXECLP
:
6359 case BUILT_IN_EXECLE
:
6360 case BUILT_IN_EXECVP
:
6361 case BUILT_IN_EXECVE
:
6362 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6367 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
6368 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
6369 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
6370 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
6371 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
6372 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
6373 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
6378 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
6379 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
6380 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
6381 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
6382 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
6383 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
6384 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
6389 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
6390 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
6391 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
6392 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
6393 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
6394 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
6395 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
6400 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
6401 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
6402 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
6403 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
6404 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
6405 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
6406 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
6411 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
6412 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
6413 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
6414 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
6415 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
6416 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
6417 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
6422 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6423 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6424 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6425 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6426 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6427 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
6428 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
6433 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
6434 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
6435 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
6436 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
6437 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
6438 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
6439 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
6444 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
6445 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
6446 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
6447 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
6448 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
6449 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
6450 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
6455 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
6456 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
6457 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
6458 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
6459 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
6460 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
6461 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
6466 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
6467 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
6468 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
6469 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
6470 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
6471 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
6472 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
6477 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
6478 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
6479 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
6480 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
6481 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
6482 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
6483 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
6488 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6489 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6490 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6491 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6492 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6493 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
6494 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
6499 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
6500 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
6501 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
6502 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
6503 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
6504 if (mode
== VOIDmode
)
6505 mode
= TYPE_MODE (boolean_type_node
);
6506 if (!target
|| !register_operand (target
, mode
))
6507 target
= gen_reg_rtx (mode
);
6509 mode
= get_builtin_sync_mode
6510 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
6511 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6516 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
6517 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
6518 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
6519 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
6520 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
6521 mode
= get_builtin_sync_mode
6522 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
6523 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6528 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
6529 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
6530 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
6531 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
6532 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
6533 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
6534 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
6539 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
6540 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
6541 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
6542 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
6543 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
6544 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
6545 expand_builtin_sync_lock_release (mode
, exp
);
6548 case BUILT_IN_SYNC_SYNCHRONIZE
:
6549 expand_builtin_sync_synchronize ();
6552 case BUILT_IN_ATOMIC_EXCHANGE_1
:
6553 case BUILT_IN_ATOMIC_EXCHANGE_2
:
6554 case BUILT_IN_ATOMIC_EXCHANGE_4
:
6555 case BUILT_IN_ATOMIC_EXCHANGE_8
:
6556 case BUILT_IN_ATOMIC_EXCHANGE_16
:
6557 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
6558 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
6563 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
6564 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
6565 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
6566 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
6567 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
6569 unsigned int nargs
, z
;
6570 vec
<tree
, va_gc
> *vec
;
6573 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
6574 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
6578 /* If this is turned into an external library call, the weak parameter
6579 must be dropped to match the expected parameter list. */
6580 nargs
= call_expr_nargs (exp
);
6581 vec_alloc (vec
, nargs
- 1);
6582 for (z
= 0; z
< 3; z
++)
6583 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6584 /* Skip the boolean weak parameter. */
6585 for (z
= 4; z
< 6; z
++)
6586 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6587 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
6591 case BUILT_IN_ATOMIC_LOAD_1
:
6592 case BUILT_IN_ATOMIC_LOAD_2
:
6593 case BUILT_IN_ATOMIC_LOAD_4
:
6594 case BUILT_IN_ATOMIC_LOAD_8
:
6595 case BUILT_IN_ATOMIC_LOAD_16
:
6596 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
6597 target
= expand_builtin_atomic_load (mode
, exp
, target
);
6602 case BUILT_IN_ATOMIC_STORE_1
:
6603 case BUILT_IN_ATOMIC_STORE_2
:
6604 case BUILT_IN_ATOMIC_STORE_4
:
6605 case BUILT_IN_ATOMIC_STORE_8
:
6606 case BUILT_IN_ATOMIC_STORE_16
:
6607 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
6608 target
= expand_builtin_atomic_store (mode
, exp
);
6613 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
6614 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
6615 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
6616 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
6617 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
6619 enum built_in_function lib
;
6620 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
6621 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
6622 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
6623 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
6629 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
6630 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
6631 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
6632 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
6633 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
6635 enum built_in_function lib
;
6636 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
6637 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
6638 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
6639 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
6645 case BUILT_IN_ATOMIC_AND_FETCH_1
:
6646 case BUILT_IN_ATOMIC_AND_FETCH_2
:
6647 case BUILT_IN_ATOMIC_AND_FETCH_4
:
6648 case BUILT_IN_ATOMIC_AND_FETCH_8
:
6649 case BUILT_IN_ATOMIC_AND_FETCH_16
:
6651 enum built_in_function lib
;
6652 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
6653 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
6654 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
6655 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
6661 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
6662 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
6663 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
6664 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
6665 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
6667 enum built_in_function lib
;
6668 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
6669 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
6670 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
6671 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
6677 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
6678 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
6679 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
6680 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
6681 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
6683 enum built_in_function lib
;
6684 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
6685 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
6686 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
6687 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
6693 case BUILT_IN_ATOMIC_OR_FETCH_1
:
6694 case BUILT_IN_ATOMIC_OR_FETCH_2
:
6695 case BUILT_IN_ATOMIC_OR_FETCH_4
:
6696 case BUILT_IN_ATOMIC_OR_FETCH_8
:
6697 case BUILT_IN_ATOMIC_OR_FETCH_16
:
6699 enum built_in_function lib
;
6700 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
6701 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
6702 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
6703 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
6709 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
6710 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
6711 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
6712 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
6713 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
6714 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
6715 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
6716 ignore
, BUILT_IN_NONE
);
6721 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
6722 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
6723 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
6724 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
6725 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
6726 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
6727 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
6728 ignore
, BUILT_IN_NONE
);
6733 case BUILT_IN_ATOMIC_FETCH_AND_1
:
6734 case BUILT_IN_ATOMIC_FETCH_AND_2
:
6735 case BUILT_IN_ATOMIC_FETCH_AND_4
:
6736 case BUILT_IN_ATOMIC_FETCH_AND_8
:
6737 case BUILT_IN_ATOMIC_FETCH_AND_16
:
6738 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
6739 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
6740 ignore
, BUILT_IN_NONE
);
6745 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
6746 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
6747 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
6748 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
6749 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
6750 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
6751 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
6752 ignore
, BUILT_IN_NONE
);
6757 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
6758 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
6759 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
6760 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
6761 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
6762 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
6763 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
6764 ignore
, BUILT_IN_NONE
);
6769 case BUILT_IN_ATOMIC_FETCH_OR_1
:
6770 case BUILT_IN_ATOMIC_FETCH_OR_2
:
6771 case BUILT_IN_ATOMIC_FETCH_OR_4
:
6772 case BUILT_IN_ATOMIC_FETCH_OR_8
:
6773 case BUILT_IN_ATOMIC_FETCH_OR_16
:
6774 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
6775 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
6776 ignore
, BUILT_IN_NONE
);
6781 case BUILT_IN_ATOMIC_TEST_AND_SET
:
6782 return expand_builtin_atomic_test_and_set (exp
, target
);
6784 case BUILT_IN_ATOMIC_CLEAR
:
6785 return expand_builtin_atomic_clear (exp
);
6787 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
6788 return expand_builtin_atomic_always_lock_free (exp
);
6790 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
6791 target
= expand_builtin_atomic_is_lock_free (exp
);
6796 case BUILT_IN_ATOMIC_THREAD_FENCE
:
6797 expand_builtin_atomic_thread_fence (exp
);
6800 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
6801 expand_builtin_atomic_signal_fence (exp
);
6804 case BUILT_IN_OBJECT_SIZE
:
6805 return expand_builtin_object_size (exp
);
6807 case BUILT_IN_MEMCPY_CHK
:
6808 case BUILT_IN_MEMPCPY_CHK
:
6809 case BUILT_IN_MEMMOVE_CHK
:
6810 case BUILT_IN_MEMSET_CHK
:
6811 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6816 case BUILT_IN_STRCPY_CHK
:
6817 case BUILT_IN_STPCPY_CHK
:
6818 case BUILT_IN_STRNCPY_CHK
:
6819 case BUILT_IN_STPNCPY_CHK
:
6820 case BUILT_IN_STRCAT_CHK
:
6821 case BUILT_IN_STRNCAT_CHK
:
6822 case BUILT_IN_SNPRINTF_CHK
:
6823 case BUILT_IN_VSNPRINTF_CHK
:
6824 maybe_emit_chk_warning (exp
, fcode
);
6827 case BUILT_IN_SPRINTF_CHK
:
6828 case BUILT_IN_VSPRINTF_CHK
:
6829 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6833 if (warn_free_nonheap_object
)
6834 maybe_emit_free_warning (exp
);
6837 case BUILT_IN_THREAD_POINTER
:
6838 return expand_builtin_thread_pointer (exp
, target
);
6840 case BUILT_IN_SET_THREAD_POINTER
:
6841 expand_builtin_set_thread_pointer (exp
);
6844 case BUILT_IN_CILK_DETACH
:
6845 expand_builtin_cilk_detach (exp
);
6848 case BUILT_IN_CILK_POP_FRAME
:
6849 expand_builtin_cilk_pop_frame (exp
);
6852 default: /* just do library call, if unknown builtin */
6856 /* The switch statement above can drop through to cause the function
6857 to be called normally. */
6858 return expand_call (exp
, target
, ignore
);
6861 /* Determine whether a tree node represents a call to a built-in
6862 function. If the tree T is a call to a built-in function with
6863 the right number of arguments of the appropriate types, return
6864 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6865 Otherwise the return value is END_BUILTINS. */
6867 enum built_in_function
6868 builtin_mathfn_code (const_tree t
)
6870 const_tree fndecl
, arg
, parmlist
;
6871 const_tree argtype
, parmtype
;
6872 const_call_expr_arg_iterator iter
;
6874 if (TREE_CODE (t
) != CALL_EXPR
6875 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
6876 return END_BUILTINS
;
6878 fndecl
= get_callee_fndecl (t
);
6879 if (fndecl
== NULL_TREE
6880 || TREE_CODE (fndecl
) != FUNCTION_DECL
6881 || ! DECL_BUILT_IN (fndecl
)
6882 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6883 return END_BUILTINS
;
6885 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
6886 init_const_call_expr_arg_iterator (t
, &iter
);
6887 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
6889 /* If a function doesn't take a variable number of arguments,
6890 the last element in the list will have type `void'. */
6891 parmtype
= TREE_VALUE (parmlist
);
6892 if (VOID_TYPE_P (parmtype
))
6894 if (more_const_call_expr_args_p (&iter
))
6895 return END_BUILTINS
;
6896 return DECL_FUNCTION_CODE (fndecl
);
6899 if (! more_const_call_expr_args_p (&iter
))
6900 return END_BUILTINS
;
6902 arg
= next_const_call_expr_arg (&iter
);
6903 argtype
= TREE_TYPE (arg
);
6905 if (SCALAR_FLOAT_TYPE_P (parmtype
))
6907 if (! SCALAR_FLOAT_TYPE_P (argtype
))
6908 return END_BUILTINS
;
6910 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
6912 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
6913 return END_BUILTINS
;
6915 else if (POINTER_TYPE_P (parmtype
))
6917 if (! POINTER_TYPE_P (argtype
))
6918 return END_BUILTINS
;
6920 else if (INTEGRAL_TYPE_P (parmtype
))
6922 if (! INTEGRAL_TYPE_P (argtype
))
6923 return END_BUILTINS
;
6926 return END_BUILTINS
;
6929 /* Variable-length argument list. */
6930 return DECL_FUNCTION_CODE (fndecl
);
6933 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6934 evaluate to a constant. */
6937 fold_builtin_constant_p (tree arg
)
6939 /* We return 1 for a numeric type that's known to be a constant
6940 value at compile-time or for an aggregate type that's a
6941 literal constant. */
6944 /* If we know this is a constant, emit the constant of one. */
6945 if (CONSTANT_CLASS_P (arg
)
6946 || (TREE_CODE (arg
) == CONSTRUCTOR
6947 && TREE_CONSTANT (arg
)))
6948 return integer_one_node
;
6949 if (TREE_CODE (arg
) == ADDR_EXPR
)
6951 tree op
= TREE_OPERAND (arg
, 0);
6952 if (TREE_CODE (op
) == STRING_CST
6953 || (TREE_CODE (op
) == ARRAY_REF
6954 && integer_zerop (TREE_OPERAND (op
, 1))
6955 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
6956 return integer_one_node
;
6959 /* If this expression has side effects, show we don't know it to be a
6960 constant. Likewise if it's a pointer or aggregate type since in
6961 those case we only want literals, since those are only optimized
6962 when generating RTL, not later.
6963 And finally, if we are compiling an initializer, not code, we
6964 need to return a definite result now; there's not going to be any
6965 more optimization done. */
6966 if (TREE_SIDE_EFFECTS (arg
)
6967 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
6968 || POINTER_TYPE_P (TREE_TYPE (arg
))
6970 || folding_initializer
6971 || force_folding_builtin_constant_p
)
6972 return integer_zero_node
;
6977 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6978 return it as a truthvalue. */
6981 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
)
6983 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
6985 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
6986 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
6987 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
6988 pred_type
= TREE_VALUE (arg_types
);
6989 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
6991 pred
= fold_convert_loc (loc
, pred_type
, pred
);
6992 expected
= fold_convert_loc (loc
, expected_type
, expected
);
6993 call_expr
= build_call_expr_loc (loc
, fn
, 2, pred
, expected
);
6995 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
6996 build_int_cst (ret_type
, 0));
6999 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7000 NULL_TREE if no simplification is possible. */
7003 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
)
7005 tree inner
, fndecl
, inner_arg0
;
7006 enum tree_code code
;
7008 /* Distribute the expected value over short-circuiting operators.
7009 See through the cast from truthvalue_type_node to long. */
7011 while (TREE_CODE (inner_arg0
) == NOP_EXPR
7012 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
7013 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
7014 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
7016 /* If this is a builtin_expect within a builtin_expect keep the
7017 inner one. See through a comparison against a constant. It
7018 might have been added to create a thruthvalue. */
7021 if (COMPARISON_CLASS_P (inner
)
7022 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7023 inner
= TREE_OPERAND (inner
, 0);
7025 if (TREE_CODE (inner
) == CALL_EXPR
7026 && (fndecl
= get_callee_fndecl (inner
))
7027 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7028 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7032 code
= TREE_CODE (inner
);
7033 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7035 tree op0
= TREE_OPERAND (inner
, 0);
7036 tree op1
= TREE_OPERAND (inner
, 1);
7038 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
);
7039 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
);
7040 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7042 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
7045 /* If the argument isn't invariant then there's nothing else we can do. */
7046 if (!TREE_CONSTANT (inner_arg0
))
7049 /* If we expect that a comparison against the argument will fold to
7050 a constant return the constant. In practice, this means a true
7051 constant or the address of a non-weak symbol. */
7054 if (TREE_CODE (inner
) == ADDR_EXPR
)
7058 inner
= TREE_OPERAND (inner
, 0);
7060 while (TREE_CODE (inner
) == COMPONENT_REF
7061 || TREE_CODE (inner
) == ARRAY_REF
);
7062 if ((TREE_CODE (inner
) == VAR_DECL
7063 || TREE_CODE (inner
) == FUNCTION_DECL
)
7064 && DECL_WEAK (inner
))
7068 /* Otherwise, ARG0 already has the proper type for the return value. */
7072 /* Fold a call to __builtin_classify_type with argument ARG. */
7075 fold_builtin_classify_type (tree arg
)
7078 return build_int_cst (integer_type_node
, no_type_class
);
7080 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7083 /* Fold a call to __builtin_strlen with argument ARG. */
7086 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7088 if (!validate_arg (arg
, POINTER_TYPE
))
7092 tree len
= c_strlen (arg
, 0);
7095 return fold_convert_loc (loc
, type
, len
);
7101 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7104 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7106 REAL_VALUE_TYPE real
;
7108 /* __builtin_inff is intended to be usable to define INFINITY on all
7109 targets. If an infinity is not available, INFINITY expands "to a
7110 positive constant of type float that overflows at translation
7111 time", footnote "In this case, using INFINITY will violate the
7112 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7113 Thus we pedwarn to ensure this constraint violation is
7115 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7116 pedwarn (loc
, 0, "target format does not support infinity");
7119 return build_real (type
, real
);
7122 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7125 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7127 REAL_VALUE_TYPE real
;
7130 if (!validate_arg (arg
, POINTER_TYPE
))
7132 str
= c_getstr (arg
);
7136 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7139 return build_real (type
, real
);
7142 /* Return true if the floating point expression T has an integer value.
7143 We also allow +Inf, -Inf and NaN to be considered integer values. */
7146 integer_valued_real_p (tree t
)
7148 switch (TREE_CODE (t
))
7155 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7160 return integer_valued_real_p (TREE_OPERAND (t
, 1));
7167 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7168 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7171 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7172 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7175 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7179 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7180 if (TREE_CODE (type
) == INTEGER_TYPE
)
7182 if (TREE_CODE (type
) == REAL_TYPE
)
7183 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7188 switch (builtin_mathfn_code (t
))
7190 CASE_FLT_FN (BUILT_IN_CEIL
):
7191 CASE_FLT_FN (BUILT_IN_FLOOR
):
7192 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7193 CASE_FLT_FN (BUILT_IN_RINT
):
7194 CASE_FLT_FN (BUILT_IN_ROUND
):
7195 CASE_FLT_FN (BUILT_IN_TRUNC
):
7198 CASE_FLT_FN (BUILT_IN_FMIN
):
7199 CASE_FLT_FN (BUILT_IN_FMAX
):
7200 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7201 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7214 /* FNDECL is assumed to be a builtin where truncation can be propagated
7215 across (for instance floor((double)f) == (double)floorf (f).
7216 Do the transformation for a call with argument ARG. */
7219 fold_trunc_transparent_mathfn (location_t loc
, tree fndecl
, tree arg
)
7221 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7223 if (!validate_arg (arg
, REAL_TYPE
))
7226 /* Integer rounding functions are idempotent. */
7227 if (fcode
== builtin_mathfn_code (arg
))
7230 /* If argument is already integer valued, and we don't need to worry
7231 about setting errno, there's no need to perform rounding. */
7232 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7237 tree arg0
= strip_float_extensions (arg
);
7238 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7239 tree newtype
= TREE_TYPE (arg0
);
7242 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7243 && (decl
= mathfn_built_in (newtype
, fcode
)))
7244 return fold_convert_loc (loc
, ftype
,
7245 build_call_expr_loc (loc
, decl
, 1,
7246 fold_convert_loc (loc
,
7253 /* FNDECL is assumed to be builtin which can narrow the FP type of
7254 the argument, for instance lround((double)f) -> lroundf (f).
7255 Do the transformation for a call with argument ARG. */
7258 fold_fixed_mathfn (location_t loc
, tree fndecl
, tree arg
)
7260 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7262 if (!validate_arg (arg
, REAL_TYPE
))
7265 /* If argument is already integer valued, and we don't need to worry
7266 about setting errno, there's no need to perform rounding. */
7267 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7268 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7269 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7273 tree ftype
= TREE_TYPE (arg
);
7274 tree arg0
= strip_float_extensions (arg
);
7275 tree newtype
= TREE_TYPE (arg0
);
7278 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7279 && (decl
= mathfn_built_in (newtype
, fcode
)))
7280 return build_call_expr_loc (loc
, decl
, 1,
7281 fold_convert_loc (loc
, newtype
, arg0
));
7284 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7285 sizeof (int) == sizeof (long). */
7286 if (TYPE_PRECISION (integer_type_node
)
7287 == TYPE_PRECISION (long_integer_type_node
))
7289 tree newfn
= NULL_TREE
;
7292 CASE_FLT_FN (BUILT_IN_ICEIL
):
7293 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7296 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7297 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7300 CASE_FLT_FN (BUILT_IN_IROUND
):
7301 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7304 CASE_FLT_FN (BUILT_IN_IRINT
):
7305 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7314 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7315 return fold_convert_loc (loc
,
7316 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7320 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7321 sizeof (long long) == sizeof (long). */
7322 if (TYPE_PRECISION (long_long_integer_type_node
)
7323 == TYPE_PRECISION (long_integer_type_node
))
7325 tree newfn
= NULL_TREE
;
7328 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7329 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7332 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7333 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7336 CASE_FLT_FN (BUILT_IN_LLROUND
):
7337 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7340 CASE_FLT_FN (BUILT_IN_LLRINT
):
7341 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7350 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7351 return fold_convert_loc (loc
,
7352 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7359 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7360 return type. Return NULL_TREE if no simplification can be made. */
7363 fold_builtin_cabs (location_t loc
, tree arg
, tree type
, tree fndecl
)
7367 if (!validate_arg (arg
, COMPLEX_TYPE
)
7368 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7371 /* Calculate the result when the argument is a constant. */
7372 if (TREE_CODE (arg
) == COMPLEX_CST
7373 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7377 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7379 tree real
= TREE_OPERAND (arg
, 0);
7380 tree imag
= TREE_OPERAND (arg
, 1);
7382 /* If either part is zero, cabs is fabs of the other. */
7383 if (real_zerop (real
))
7384 return fold_build1_loc (loc
, ABS_EXPR
, type
, imag
);
7385 if (real_zerop (imag
))
7386 return fold_build1_loc (loc
, ABS_EXPR
, type
, real
);
7388 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7389 if (flag_unsafe_math_optimizations
7390 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7392 const REAL_VALUE_TYPE sqrt2_trunc
7393 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
7395 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7396 fold_build1_loc (loc
, ABS_EXPR
, type
, real
),
7397 build_real (type
, sqrt2_trunc
));
7401 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7402 if (TREE_CODE (arg
) == NEGATE_EXPR
7403 || TREE_CODE (arg
) == CONJ_EXPR
)
7404 return build_call_expr_loc (loc
, fndecl
, 1, TREE_OPERAND (arg
, 0));
7406 /* Don't do this when optimizing for size. */
7407 if (flag_unsafe_math_optimizations
7408 && optimize
&& optimize_function_for_speed_p (cfun
))
7410 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7412 if (sqrtfn
!= NULL_TREE
)
7414 tree rpart
, ipart
, result
;
7416 arg
= builtin_save_expr (arg
);
7418 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, type
, arg
);
7419 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg
);
7421 rpart
= builtin_save_expr (rpart
);
7422 ipart
= builtin_save_expr (ipart
);
7424 result
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
7425 fold_build2_loc (loc
, MULT_EXPR
, type
,
7427 fold_build2_loc (loc
, MULT_EXPR
, type
,
7430 return build_call_expr_loc (loc
, sqrtfn
, 1, result
);
7437 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7438 complex tree type of the result. If NEG is true, the imaginary
7439 zero is negative. */
7442 build_complex_cproj (tree type
, bool neg
)
7444 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
7448 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
7449 build_real (TREE_TYPE (type
), rzero
));
7452 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7453 return type. Return NULL_TREE if no simplification can be made. */
7456 fold_builtin_cproj (location_t loc
, tree arg
, tree type
)
7458 if (!validate_arg (arg
, COMPLEX_TYPE
)
7459 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7462 /* If there are no infinities, return arg. */
7463 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type
))))
7464 return non_lvalue_loc (loc
, arg
);
7466 /* Calculate the result when the argument is a constant. */
7467 if (TREE_CODE (arg
) == COMPLEX_CST
)
7469 const REAL_VALUE_TYPE
*real
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
7470 const REAL_VALUE_TYPE
*imag
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
7472 if (real_isinf (real
) || real_isinf (imag
))
7473 return build_complex_cproj (type
, imag
->sign
);
7477 else if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7479 tree real
= TREE_OPERAND (arg
, 0);
7480 tree imag
= TREE_OPERAND (arg
, 1);
7485 /* If the real part is inf and the imag part is known to be
7486 nonnegative, return (inf + 0i). Remember side-effects are
7487 possible in the imag part. */
7488 if (TREE_CODE (real
) == REAL_CST
7489 && real_isinf (TREE_REAL_CST_PTR (real
))
7490 && tree_expr_nonnegative_p (imag
))
7491 return omit_one_operand_loc (loc
, type
,
7492 build_complex_cproj (type
, false),
7495 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7496 Remember side-effects are possible in the real part. */
7497 if (TREE_CODE (imag
) == REAL_CST
7498 && real_isinf (TREE_REAL_CST_PTR (imag
)))
7500 omit_one_operand_loc (loc
, type
,
7501 build_complex_cproj (type
, TREE_REAL_CST_PTR
7502 (imag
)->sign
), arg
);
7508 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7509 Return NULL_TREE if no simplification can be made. */
7512 fold_builtin_sqrt (location_t loc
, tree arg
, tree type
)
7515 enum built_in_function fcode
;
7518 if (!validate_arg (arg
, REAL_TYPE
))
7521 /* Calculate the result when the argument is a constant. */
7522 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7525 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7526 fcode
= builtin_mathfn_code (arg
);
7527 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7529 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7530 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7531 CALL_EXPR_ARG (arg
, 0),
7532 build_real (type
, dconsthalf
));
7533 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7536 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7537 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7539 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7543 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7545 /* The inner root was either sqrt or cbrt. */
7546 /* This was a conditional expression but it triggered a bug
7548 REAL_VALUE_TYPE dconstroot
;
7549 if (BUILTIN_SQRT_P (fcode
))
7550 dconstroot
= dconsthalf
;
7552 dconstroot
= dconst_third ();
7554 /* Adjust for the outer root. */
7555 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7556 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7557 tree_root
= build_real (type
, dconstroot
);
7558 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7562 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7563 if (flag_unsafe_math_optimizations
7564 && (fcode
== BUILT_IN_POW
7565 || fcode
== BUILT_IN_POWF
7566 || fcode
== BUILT_IN_POWL
))
7568 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7569 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7570 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7572 if (!tree_expr_nonnegative_p (arg0
))
7573 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7574 narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
7575 build_real (type
, dconsthalf
));
7576 return build_call_expr_loc (loc
, powfn
, 2, arg0
, narg1
);
7582 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7583 Return NULL_TREE if no simplification can be made. */
7586 fold_builtin_cbrt (location_t loc
, tree arg
, tree type
)
7588 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7591 if (!validate_arg (arg
, REAL_TYPE
))
7594 /* Calculate the result when the argument is a constant. */
7595 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7598 if (flag_unsafe_math_optimizations
)
7600 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7601 if (BUILTIN_EXPONENT_P (fcode
))
7603 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7604 const REAL_VALUE_TYPE third_trunc
=
7605 real_value_truncate (TYPE_MODE (type
), dconst_third ());
7606 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7607 CALL_EXPR_ARG (arg
, 0),
7608 build_real (type
, third_trunc
));
7609 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7612 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7613 if (BUILTIN_SQRT_P (fcode
))
7615 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7619 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7621 REAL_VALUE_TYPE dconstroot
= dconst_third ();
7623 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7624 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7625 tree_root
= build_real (type
, dconstroot
);
7626 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7630 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7631 if (BUILTIN_CBRT_P (fcode
))
7633 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7634 if (tree_expr_nonnegative_p (arg0
))
7636 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7641 REAL_VALUE_TYPE dconstroot
;
7643 real_arithmetic (&dconstroot
, MULT_EXPR
,
7644 dconst_third_ptr (), dconst_third_ptr ());
7645 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7646 tree_root
= build_real (type
, dconstroot
);
7647 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7652 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7653 if (fcode
== BUILT_IN_POW
7654 || fcode
== BUILT_IN_POWF
7655 || fcode
== BUILT_IN_POWL
)
7657 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7658 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7659 if (tree_expr_nonnegative_p (arg00
))
7661 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7662 const REAL_VALUE_TYPE dconstroot
7663 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
7664 tree narg01
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
,
7665 build_real (type
, dconstroot
));
7666 return build_call_expr_loc (loc
, powfn
, 2, arg00
, narg01
);
7673 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7674 TYPE is the type of the return value. Return NULL_TREE if no
7675 simplification can be made. */
7678 fold_builtin_cos (location_t loc
,
7679 tree arg
, tree type
, tree fndecl
)
7683 if (!validate_arg (arg
, REAL_TYPE
))
7686 /* Calculate the result when the argument is a constant. */
7687 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7690 /* Optimize cos(-x) into cos (x). */
7691 if ((narg
= fold_strip_sign_ops (arg
)))
7692 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7697 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7698 Return NULL_TREE if no simplification can be made. */
7701 fold_builtin_cosh (location_t loc
, tree arg
, tree type
, tree fndecl
)
7703 if (validate_arg (arg
, REAL_TYPE
))
7707 /* Calculate the result when the argument is a constant. */
7708 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7711 /* Optimize cosh(-x) into cosh (x). */
7712 if ((narg
= fold_strip_sign_ops (arg
)))
7713 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7719 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7720 argument ARG. TYPE is the type of the return value. Return
7721 NULL_TREE if no simplification can be made. */
7724 fold_builtin_ccos (location_t loc
, tree arg
, tree type
, tree fndecl
,
7727 if (validate_arg (arg
, COMPLEX_TYPE
)
7728 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
7732 /* Calculate the result when the argument is a constant. */
7733 if ((tmp
= do_mpc_arg1 (arg
, type
, (hyper
? mpc_cosh
: mpc_cos
))))
7736 /* Optimize fn(-x) into fn(x). */
7737 if ((tmp
= fold_strip_sign_ops (arg
)))
7738 return build_call_expr_loc (loc
, fndecl
, 1, tmp
);
7744 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7745 Return NULL_TREE if no simplification can be made. */
7748 fold_builtin_tan (tree arg
, tree type
)
7750 enum built_in_function fcode
;
7753 if (!validate_arg (arg
, REAL_TYPE
))
7756 /* Calculate the result when the argument is a constant. */
7757 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
7760 /* Optimize tan(atan(x)) = x. */
7761 fcode
= builtin_mathfn_code (arg
);
7762 if (flag_unsafe_math_optimizations
7763 && (fcode
== BUILT_IN_ATAN
7764 || fcode
== BUILT_IN_ATANF
7765 || fcode
== BUILT_IN_ATANL
))
7766 return CALL_EXPR_ARG (arg
, 0);
7771 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7772 NULL_TREE if no simplification can be made. */
7775 fold_builtin_sincos (location_t loc
,
7776 tree arg0
, tree arg1
, tree arg2
)
7781 if (!validate_arg (arg0
, REAL_TYPE
)
7782 || !validate_arg (arg1
, POINTER_TYPE
)
7783 || !validate_arg (arg2
, POINTER_TYPE
))
7786 type
= TREE_TYPE (arg0
);
7788 /* Calculate the result when the argument is a constant. */
7789 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7792 /* Canonicalize sincos to cexpi. */
7793 if (!targetm
.libc_has_function (function_c99_math_complex
))
7795 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
7799 call
= build_call_expr_loc (loc
, fn
, 1, arg0
);
7800 call
= builtin_save_expr (call
);
7802 return build2 (COMPOUND_EXPR
, void_type_node
,
7803 build2 (MODIFY_EXPR
, void_type_node
,
7804 build_fold_indirect_ref_loc (loc
, arg1
),
7805 build1 (IMAGPART_EXPR
, type
, call
)),
7806 build2 (MODIFY_EXPR
, void_type_node
,
7807 build_fold_indirect_ref_loc (loc
, arg2
),
7808 build1 (REALPART_EXPR
, type
, call
)));
7811 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7812 NULL_TREE if no simplification can be made. */
7815 fold_builtin_cexp (location_t loc
, tree arg0
, tree type
)
7818 tree realp
, imagp
, ifn
;
7821 if (!validate_arg (arg0
, COMPLEX_TYPE
)
7822 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) != REAL_TYPE
)
7825 /* Calculate the result when the argument is a constant. */
7826 if ((res
= do_mpc_arg1 (arg0
, type
, mpc_exp
)))
7829 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
7831 /* In case we can figure out the real part of arg0 and it is constant zero
7833 if (!targetm
.libc_has_function (function_c99_math_complex
))
7835 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
7839 if ((realp
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
))
7840 && real_zerop (realp
))
7842 tree narg
= fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7843 return build_call_expr_loc (loc
, ifn
, 1, narg
);
7846 /* In case we can easily decompose real and imaginary parts split cexp
7847 to exp (r) * cexpi (i). */
7848 if (flag_unsafe_math_optimizations
7851 tree rfn
, rcall
, icall
;
7853 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
7857 imagp
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7861 icall
= build_call_expr_loc (loc
, ifn
, 1, imagp
);
7862 icall
= builtin_save_expr (icall
);
7863 rcall
= build_call_expr_loc (loc
, rfn
, 1, realp
);
7864 rcall
= builtin_save_expr (rcall
);
7865 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
7866 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7868 fold_build1_loc (loc
, REALPART_EXPR
,
7870 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7872 fold_build1_loc (loc
, IMAGPART_EXPR
,
7879 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7880 Return NULL_TREE if no simplification can be made. */
7883 fold_builtin_trunc (location_t loc
, tree fndecl
, tree arg
)
7885 if (!validate_arg (arg
, REAL_TYPE
))
7888 /* Optimize trunc of constant value. */
7889 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7891 REAL_VALUE_TYPE r
, x
;
7892 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7894 x
= TREE_REAL_CST (arg
);
7895 real_trunc (&r
, TYPE_MODE (type
), &x
);
7896 return build_real (type
, r
);
7899 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7902 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7903 Return NULL_TREE if no simplification can be made. */
7906 fold_builtin_floor (location_t loc
, tree fndecl
, tree arg
)
7908 if (!validate_arg (arg
, REAL_TYPE
))
7911 /* Optimize floor of constant value. */
7912 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7916 x
= TREE_REAL_CST (arg
);
7917 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7919 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7922 real_floor (&r
, TYPE_MODE (type
), &x
);
7923 return build_real (type
, r
);
7927 /* Fold floor (x) where x is nonnegative to trunc (x). */
7928 if (tree_expr_nonnegative_p (arg
))
7930 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
7932 return build_call_expr_loc (loc
, truncfn
, 1, arg
);
7935 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7938 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7939 Return NULL_TREE if no simplification can be made. */
7942 fold_builtin_ceil (location_t loc
, tree fndecl
, tree arg
)
7944 if (!validate_arg (arg
, REAL_TYPE
))
7947 /* Optimize ceil of constant value. */
7948 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7952 x
= TREE_REAL_CST (arg
);
7953 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7955 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7958 real_ceil (&r
, TYPE_MODE (type
), &x
);
7959 return build_real (type
, r
);
7963 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7966 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7967 Return NULL_TREE if no simplification can be made. */
7970 fold_builtin_round (location_t loc
, tree fndecl
, tree arg
)
7972 if (!validate_arg (arg
, REAL_TYPE
))
7975 /* Optimize round of constant value. */
7976 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7980 x
= TREE_REAL_CST (arg
);
7981 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7983 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7986 real_round (&r
, TYPE_MODE (type
), &x
);
7987 return build_real (type
, r
);
7991 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7994 /* Fold function call to builtin lround, lroundf or lroundl (or the
7995 corresponding long long versions) and other rounding functions. ARG
7996 is the argument to the call. Return NULL_TREE if no simplification
8000 fold_builtin_int_roundingfn (location_t loc
, tree fndecl
, tree arg
)
8002 if (!validate_arg (arg
, REAL_TYPE
))
8005 /* Optimize lround of constant value. */
8006 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8008 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
8010 if (real_isfinite (&x
))
8012 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
8013 tree ftype
= TREE_TYPE (arg
);
8017 switch (DECL_FUNCTION_CODE (fndecl
))
8019 CASE_FLT_FN (BUILT_IN_IFLOOR
):
8020 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8021 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8022 real_floor (&r
, TYPE_MODE (ftype
), &x
);
8025 CASE_FLT_FN (BUILT_IN_ICEIL
):
8026 CASE_FLT_FN (BUILT_IN_LCEIL
):
8027 CASE_FLT_FN (BUILT_IN_LLCEIL
):
8028 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
8031 CASE_FLT_FN (BUILT_IN_IROUND
):
8032 CASE_FLT_FN (BUILT_IN_LROUND
):
8033 CASE_FLT_FN (BUILT_IN_LLROUND
):
8034 real_round (&r
, TYPE_MODE (ftype
), &x
);
8041 real_to_integer2 ((HOST_WIDE_INT
*)&val
.low
, &val
.high
, &r
);
8042 if (double_int_fits_to_tree_p (itype
, val
))
8043 return double_int_to_tree (itype
, val
);
8047 switch (DECL_FUNCTION_CODE (fndecl
))
8049 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8050 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8051 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8052 if (tree_expr_nonnegative_p (arg
))
8053 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
8054 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
8059 return fold_fixed_mathfn (loc
, fndecl
, arg
);
8062 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8063 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8064 the argument to the call. Return NULL_TREE if no simplification can
8068 fold_builtin_bitop (tree fndecl
, tree arg
)
8070 if (!validate_arg (arg
, INTEGER_TYPE
))
8073 /* Optimize for constant argument. */
8074 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8076 HOST_WIDE_INT hi
, width
, result
;
8077 unsigned HOST_WIDE_INT lo
;
8080 type
= TREE_TYPE (arg
);
8081 width
= TYPE_PRECISION (type
);
8082 lo
= TREE_INT_CST_LOW (arg
);
8084 /* Clear all the bits that are beyond the type's precision. */
8085 if (width
> HOST_BITS_PER_WIDE_INT
)
8087 hi
= TREE_INT_CST_HIGH (arg
);
8088 if (width
< HOST_BITS_PER_DOUBLE_INT
)
8089 hi
&= ~(HOST_WIDE_INT_M1U
<< (width
- HOST_BITS_PER_WIDE_INT
));
8094 if (width
< HOST_BITS_PER_WIDE_INT
)
8095 lo
&= ~(HOST_WIDE_INT_M1U
<< width
);
8098 switch (DECL_FUNCTION_CODE (fndecl
))
8100 CASE_INT_FN (BUILT_IN_FFS
):
8102 result
= ffs_hwi (lo
);
8104 result
= HOST_BITS_PER_WIDE_INT
+ ffs_hwi (hi
);
8109 CASE_INT_FN (BUILT_IN_CLZ
):
8111 result
= width
- floor_log2 (hi
) - 1 - HOST_BITS_PER_WIDE_INT
;
8113 result
= width
- floor_log2 (lo
) - 1;
8114 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8118 CASE_INT_FN (BUILT_IN_CTZ
):
8120 result
= ctz_hwi (lo
);
8122 result
= HOST_BITS_PER_WIDE_INT
+ ctz_hwi (hi
);
8123 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8127 CASE_INT_FN (BUILT_IN_CLRSB
):
8128 if (width
> 2 * HOST_BITS_PER_WIDE_INT
)
8130 if (width
> HOST_BITS_PER_WIDE_INT
8131 && (hi
& ((unsigned HOST_WIDE_INT
) 1
8132 << (width
- HOST_BITS_PER_WIDE_INT
- 1))) != 0)
8134 hi
= ~hi
& ~(HOST_WIDE_INT_M1U
8135 << (width
- HOST_BITS_PER_WIDE_INT
- 1));
8138 else if (width
<= HOST_BITS_PER_WIDE_INT
8139 && (lo
& ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))) != 0)
8140 lo
= ~lo
& ~(HOST_WIDE_INT_M1U
<< (width
- 1));
8142 result
= width
- floor_log2 (hi
) - 2 - HOST_BITS_PER_WIDE_INT
;
8144 result
= width
- floor_log2 (lo
) - 2;
8149 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8152 result
++, lo
&= lo
- 1;
8154 result
++, hi
&= (unsigned HOST_WIDE_INT
) hi
- 1;
8157 CASE_INT_FN (BUILT_IN_PARITY
):
8160 result
++, lo
&= lo
- 1;
8162 result
++, hi
&= (unsigned HOST_WIDE_INT
) hi
- 1;
8170 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
8176 /* Fold function call to builtin_bswap and the short, long and long long
8177 variants. Return NULL_TREE if no simplification can be made. */
8179 fold_builtin_bswap (tree fndecl
, tree arg
)
8181 if (! validate_arg (arg
, INTEGER_TYPE
))
8184 /* Optimize constant value. */
8185 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8187 HOST_WIDE_INT hi
, width
, r_hi
= 0;
8188 unsigned HOST_WIDE_INT lo
, r_lo
= 0;
8189 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8191 width
= TYPE_PRECISION (type
);
8192 lo
= TREE_INT_CST_LOW (arg
);
8193 hi
= TREE_INT_CST_HIGH (arg
);
8195 switch (DECL_FUNCTION_CODE (fndecl
))
8197 case BUILT_IN_BSWAP16
:
8198 case BUILT_IN_BSWAP32
:
8199 case BUILT_IN_BSWAP64
:
8203 for (s
= 0; s
< width
; s
+= 8)
8205 int d
= width
- s
- 8;
8206 unsigned HOST_WIDE_INT byte
;
8208 if (s
< HOST_BITS_PER_WIDE_INT
)
8209 byte
= (lo
>> s
) & 0xff;
8211 byte
= (hi
>> (s
- HOST_BITS_PER_WIDE_INT
)) & 0xff;
8213 if (d
< HOST_BITS_PER_WIDE_INT
)
8216 r_hi
|= byte
<< (d
- HOST_BITS_PER_WIDE_INT
);
8226 if (width
< HOST_BITS_PER_WIDE_INT
)
8227 return build_int_cst (type
, r_lo
);
8229 return build_int_cst_wide (type
, r_lo
, r_hi
);
8235 /* A subroutine of fold_builtin to fold the various logarithmic
8236 functions. Return NULL_TREE if no simplification can me made.
8237 FUNC is the corresponding MPFR logarithm function. */
8240 fold_builtin_logarithm (location_t loc
, tree fndecl
, tree arg
,
8241 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8243 if (validate_arg (arg
, REAL_TYPE
))
8245 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8247 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8249 /* Calculate the result when the argument is a constant. */
8250 if ((res
= do_mpfr_arg1 (arg
, type
, func
, &dconst0
, NULL
, false)))
8253 /* Special case, optimize logN(expN(x)) = x. */
8254 if (flag_unsafe_math_optimizations
8255 && ((func
== mpfr_log
8256 && (fcode
== BUILT_IN_EXP
8257 || fcode
== BUILT_IN_EXPF
8258 || fcode
== BUILT_IN_EXPL
))
8259 || (func
== mpfr_log2
8260 && (fcode
== BUILT_IN_EXP2
8261 || fcode
== BUILT_IN_EXP2F
8262 || fcode
== BUILT_IN_EXP2L
))
8263 || (func
== mpfr_log10
&& (BUILTIN_EXP10_P (fcode
)))))
8264 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8266 /* Optimize logN(func()) for various exponential functions. We
8267 want to determine the value "x" and the power "exponent" in
8268 order to transform logN(x**exponent) into exponent*logN(x). */
8269 if (flag_unsafe_math_optimizations
)
8271 tree exponent
= 0, x
= 0;
8275 CASE_FLT_FN (BUILT_IN_EXP
):
8276 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8277 x
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8279 exponent
= CALL_EXPR_ARG (arg
, 0);
8281 CASE_FLT_FN (BUILT_IN_EXP2
):
8282 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8283 x
= build_real (type
, dconst2
);
8284 exponent
= CALL_EXPR_ARG (arg
, 0);
8286 CASE_FLT_FN (BUILT_IN_EXP10
):
8287 CASE_FLT_FN (BUILT_IN_POW10
):
8288 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8290 REAL_VALUE_TYPE dconst10
;
8291 real_from_integer (&dconst10
, VOIDmode
, 10, 0, 0);
8292 x
= build_real (type
, dconst10
);
8294 exponent
= CALL_EXPR_ARG (arg
, 0);
8296 CASE_FLT_FN (BUILT_IN_SQRT
):
8297 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8298 x
= CALL_EXPR_ARG (arg
, 0);
8299 exponent
= build_real (type
, dconsthalf
);
8301 CASE_FLT_FN (BUILT_IN_CBRT
):
8302 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8303 x
= CALL_EXPR_ARG (arg
, 0);
8304 exponent
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8307 CASE_FLT_FN (BUILT_IN_POW
):
8308 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8309 x
= CALL_EXPR_ARG (arg
, 0);
8310 exponent
= CALL_EXPR_ARG (arg
, 1);
8316 /* Now perform the optimization. */
8319 tree logfn
= build_call_expr_loc (loc
, fndecl
, 1, x
);
8320 return fold_build2_loc (loc
, MULT_EXPR
, type
, exponent
, logfn
);
8328 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8329 NULL_TREE if no simplification can be made. */
8332 fold_builtin_hypot (location_t loc
, tree fndecl
,
8333 tree arg0
, tree arg1
, tree type
)
8335 tree res
, narg0
, narg1
;
8337 if (!validate_arg (arg0
, REAL_TYPE
)
8338 || !validate_arg (arg1
, REAL_TYPE
))
8341 /* Calculate the result when the argument is a constant. */
8342 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8345 /* If either argument to hypot has a negate or abs, strip that off.
8346 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8347 narg0
= fold_strip_sign_ops (arg0
);
8348 narg1
= fold_strip_sign_ops (arg1
);
8351 return build_call_expr_loc (loc
, fndecl
, 2, narg0
? narg0
: arg0
,
8352 narg1
? narg1
: arg1
);
8355 /* If either argument is zero, hypot is fabs of the other. */
8356 if (real_zerop (arg0
))
8357 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
);
8358 else if (real_zerop (arg1
))
8359 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
);
8361 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8362 if (flag_unsafe_math_optimizations
8363 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8365 const REAL_VALUE_TYPE sqrt2_trunc
8366 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
8367 return fold_build2_loc (loc
, MULT_EXPR
, type
,
8368 fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
),
8369 build_real (type
, sqrt2_trunc
));
8376 /* Fold a builtin function call to pow, powf, or powl. Return
8377 NULL_TREE if no simplification can be made. */
8379 fold_builtin_pow (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, tree type
)
8383 if (!validate_arg (arg0
, REAL_TYPE
)
8384 || !validate_arg (arg1
, REAL_TYPE
))
8387 /* Calculate the result when the argument is a constant. */
8388 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8391 /* Optimize pow(1.0,y) = 1.0. */
8392 if (real_onep (arg0
))
8393 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8395 if (TREE_CODE (arg1
) == REAL_CST
8396 && !TREE_OVERFLOW (arg1
))
8398 REAL_VALUE_TYPE cint
;
8402 c
= TREE_REAL_CST (arg1
);
8404 /* Optimize pow(x,0.0) = 1.0. */
8405 if (REAL_VALUES_EQUAL (c
, dconst0
))
8406 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8409 /* Optimize pow(x,1.0) = x. */
8410 if (REAL_VALUES_EQUAL (c
, dconst1
))
8413 /* Optimize pow(x,-1.0) = 1.0/x. */
8414 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8415 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8416 build_real (type
, dconst1
), arg0
);
8418 /* Optimize pow(x,0.5) = sqrt(x). */
8419 if (flag_unsafe_math_optimizations
8420 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8422 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8424 if (sqrtfn
!= NULL_TREE
)
8425 return build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
8428 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8429 if (flag_unsafe_math_optimizations
)
8431 const REAL_VALUE_TYPE dconstroot
8432 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8434 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8436 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8437 if (cbrtfn
!= NULL_TREE
)
8438 return build_call_expr_loc (loc
, cbrtfn
, 1, arg0
);
8442 /* Check for an integer exponent. */
8443 n
= real_to_integer (&c
);
8444 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
8445 if (real_identical (&c
, &cint
))
8447 /* Attempt to evaluate pow at compile-time, unless this should
8448 raise an exception. */
8449 if (TREE_CODE (arg0
) == REAL_CST
8450 && !TREE_OVERFLOW (arg0
)
8452 || (!flag_trapping_math
&& !flag_errno_math
)
8453 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
), dconst0
)))
8458 x
= TREE_REAL_CST (arg0
);
8459 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8460 if (flag_unsafe_math_optimizations
|| !inexact
)
8461 return build_real (type
, x
);
8464 /* Strip sign ops from even integer powers. */
8465 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8467 tree narg0
= fold_strip_sign_ops (arg0
);
8469 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, arg1
);
8474 if (flag_unsafe_math_optimizations
)
8476 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8478 /* Optimize pow(expN(x),y) = expN(x*y). */
8479 if (BUILTIN_EXPONENT_P (fcode
))
8481 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8482 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8483 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg
, arg1
);
8484 return build_call_expr_loc (loc
, expfn
, 1, arg
);
8487 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8488 if (BUILTIN_SQRT_P (fcode
))
8490 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8491 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8492 build_real (type
, dconsthalf
));
8493 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, narg1
);
8496 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8497 if (BUILTIN_CBRT_P (fcode
))
8499 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8500 if (tree_expr_nonnegative_p (arg
))
8502 const REAL_VALUE_TYPE dconstroot
8503 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8504 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8505 build_real (type
, dconstroot
));
8506 return build_call_expr_loc (loc
, fndecl
, 2, arg
, narg1
);
8510 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8511 if (fcode
== BUILT_IN_POW
8512 || fcode
== BUILT_IN_POWF
8513 || fcode
== BUILT_IN_POWL
)
8515 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8516 if (tree_expr_nonnegative_p (arg00
))
8518 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8519 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
, arg1
);
8520 return build_call_expr_loc (loc
, fndecl
, 2, arg00
, narg1
);
8528 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8529 Return NULL_TREE if no simplification can be made. */
8531 fold_builtin_powi (location_t loc
, tree fndecl ATTRIBUTE_UNUSED
,
8532 tree arg0
, tree arg1
, tree type
)
8534 if (!validate_arg (arg0
, REAL_TYPE
)
8535 || !validate_arg (arg1
, INTEGER_TYPE
))
8538 /* Optimize pow(1.0,y) = 1.0. */
8539 if (real_onep (arg0
))
8540 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8542 if (tree_fits_shwi_p (arg1
))
8544 HOST_WIDE_INT c
= tree_to_shwi (arg1
);
8546 /* Evaluate powi at compile-time. */
8547 if (TREE_CODE (arg0
) == REAL_CST
8548 && !TREE_OVERFLOW (arg0
))
8551 x
= TREE_REAL_CST (arg0
);
8552 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8553 return build_real (type
, x
);
8556 /* Optimize pow(x,0) = 1.0. */
8558 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8561 /* Optimize pow(x,1) = x. */
8565 /* Optimize pow(x,-1) = 1.0/x. */
8567 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8568 build_real (type
, dconst1
), arg0
);
8574 /* A subroutine of fold_builtin to fold the various exponent
8575 functions. Return NULL_TREE if no simplification can be made.
8576 FUNC is the corresponding MPFR exponent function. */
8579 fold_builtin_exponent (location_t loc
, tree fndecl
, tree arg
,
8580 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8582 if (validate_arg (arg
, REAL_TYPE
))
8584 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8587 /* Calculate the result when the argument is a constant. */
8588 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8591 /* Optimize expN(logN(x)) = x. */
8592 if (flag_unsafe_math_optimizations
)
8594 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8596 if ((func
== mpfr_exp
8597 && (fcode
== BUILT_IN_LOG
8598 || fcode
== BUILT_IN_LOGF
8599 || fcode
== BUILT_IN_LOGL
))
8600 || (func
== mpfr_exp2
8601 && (fcode
== BUILT_IN_LOG2
8602 || fcode
== BUILT_IN_LOG2F
8603 || fcode
== BUILT_IN_LOG2L
))
8604 || (func
== mpfr_exp10
8605 && (fcode
== BUILT_IN_LOG10
8606 || fcode
== BUILT_IN_LOG10F
8607 || fcode
== BUILT_IN_LOG10L
)))
8608 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8615 /* Return true if VAR is a VAR_DECL or a component thereof. */
8618 var_decl_component_p (tree var
)
8621 while (handled_component_p (inner
))
8622 inner
= TREE_OPERAND (inner
, 0);
8623 return SSA_VAR_P (inner
);
8626 /* Fold function call to builtin memset. Return
8627 NULL_TREE if no simplification can be made. */
8630 fold_builtin_memset (location_t loc
, tree dest
, tree c
, tree len
,
8631 tree type
, bool ignore
)
8633 tree var
, ret
, etype
;
8634 unsigned HOST_WIDE_INT length
, cval
;
8636 if (! validate_arg (dest
, POINTER_TYPE
)
8637 || ! validate_arg (c
, INTEGER_TYPE
)
8638 || ! validate_arg (len
, INTEGER_TYPE
))
8641 if (! tree_fits_uhwi_p (len
))
8644 /* If the LEN parameter is zero, return DEST. */
8645 if (integer_zerop (len
))
8646 return omit_one_operand_loc (loc
, type
, dest
, c
);
8648 if (TREE_CODE (c
) != INTEGER_CST
|| TREE_SIDE_EFFECTS (dest
))
8653 if (TREE_CODE (var
) != ADDR_EXPR
)
8656 var
= TREE_OPERAND (var
, 0);
8657 if (TREE_THIS_VOLATILE (var
))
8660 etype
= TREE_TYPE (var
);
8661 if (TREE_CODE (etype
) == ARRAY_TYPE
)
8662 etype
= TREE_TYPE (etype
);
8664 if (!INTEGRAL_TYPE_P (etype
)
8665 && !POINTER_TYPE_P (etype
))
8668 if (! var_decl_component_p (var
))
8671 length
= tree_to_uhwi (len
);
8672 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
8673 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
8676 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
8679 if (integer_zerop (c
))
8683 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
8686 cval
= TREE_INT_CST_LOW (c
);
8690 cval
|= (cval
<< 31) << 1;
8693 ret
= build_int_cst_type (etype
, cval
);
8694 var
= build_fold_indirect_ref_loc (loc
,
8695 fold_convert_loc (loc
,
8696 build_pointer_type (etype
),
8698 ret
= build2 (MODIFY_EXPR
, etype
, var
, ret
);
8702 return omit_one_operand_loc (loc
, type
, dest
, ret
);
8705 /* Fold function call to builtin memset. Return
8706 NULL_TREE if no simplification can be made. */
8709 fold_builtin_bzero (location_t loc
, tree dest
, tree size
, bool ignore
)
8711 if (! validate_arg (dest
, POINTER_TYPE
)
8712 || ! validate_arg (size
, INTEGER_TYPE
))
8718 /* New argument list transforming bzero(ptr x, int y) to
8719 memset(ptr x, int 0, size_t y). This is done this way
8720 so that if it isn't expanded inline, we fallback to
8721 calling bzero instead of memset. */
8723 return fold_builtin_memset (loc
, dest
, integer_zero_node
,
8724 fold_convert_loc (loc
, size_type_node
, size
),
8725 void_type_node
, ignore
);
8728 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8729 NULL_TREE if no simplification can be made.
8730 If ENDP is 0, return DEST (like memcpy).
8731 If ENDP is 1, return DEST+LEN (like mempcpy).
8732 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8733 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8737 fold_builtin_memory_op (location_t loc
, tree dest
, tree src
,
8738 tree len
, tree type
, bool ignore
, int endp
)
8740 tree destvar
, srcvar
, expr
;
8742 if (! validate_arg (dest
, POINTER_TYPE
)
8743 || ! validate_arg (src
, POINTER_TYPE
)
8744 || ! validate_arg (len
, INTEGER_TYPE
))
8747 /* If the LEN parameter is zero, return DEST. */
8748 if (integer_zerop (len
))
8749 return omit_one_operand_loc (loc
, type
, dest
, src
);
8751 /* If SRC and DEST are the same (and not volatile), return
8752 DEST{,+LEN,+LEN-1}. */
8753 if (operand_equal_p (src
, dest
, 0))
8757 tree srctype
, desttype
;
8758 unsigned int src_align
, dest_align
;
8763 src_align
= get_pointer_alignment (src
);
8764 dest_align
= get_pointer_alignment (dest
);
8766 /* Both DEST and SRC must be pointer types.
8767 ??? This is what old code did. Is the testing for pointer types
8770 If either SRC is readonly or length is 1, we can use memcpy. */
8771 if (!dest_align
|| !src_align
)
8773 if (readonly_data_expr (src
)
8774 || (tree_fits_uhwi_p (len
)
8775 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
8776 >= tree_to_uhwi (len
))))
8778 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8781 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8784 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8785 if (TREE_CODE (src
) == ADDR_EXPR
8786 && TREE_CODE (dest
) == ADDR_EXPR
)
8788 tree src_base
, dest_base
, fn
;
8789 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
8790 HOST_WIDE_INT size
= -1;
8791 HOST_WIDE_INT maxsize
= -1;
8793 srcvar
= TREE_OPERAND (src
, 0);
8794 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
8796 destvar
= TREE_OPERAND (dest
, 0);
8797 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
8799 if (tree_fits_uhwi_p (len
))
8800 maxsize
= tree_to_uhwi (len
);
8803 src_offset
/= BITS_PER_UNIT
;
8804 dest_offset
/= BITS_PER_UNIT
;
8805 if (SSA_VAR_P (src_base
)
8806 && SSA_VAR_P (dest_base
))
8808 if (operand_equal_p (src_base
, dest_base
, 0)
8809 && ranges_overlap_p (src_offset
, maxsize
,
8810 dest_offset
, maxsize
))
8813 else if (TREE_CODE (src_base
) == MEM_REF
8814 && TREE_CODE (dest_base
) == MEM_REF
)
8817 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
8818 TREE_OPERAND (dest_base
, 0), 0))
8820 off
= mem_ref_offset (src_base
) +
8821 double_int::from_shwi (src_offset
);
8822 if (!off
.fits_shwi ())
8824 src_offset
= off
.low
;
8825 off
= mem_ref_offset (dest_base
) +
8826 double_int::from_shwi (dest_offset
);
8827 if (!off
.fits_shwi ())
8829 dest_offset
= off
.low
;
8830 if (ranges_overlap_p (src_offset
, maxsize
,
8831 dest_offset
, maxsize
))
8837 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8840 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8843 /* If the destination and source do not alias optimize into
8845 if ((is_gimple_min_invariant (dest
)
8846 || TREE_CODE (dest
) == SSA_NAME
)
8847 && (is_gimple_min_invariant (src
)
8848 || TREE_CODE (src
) == SSA_NAME
))
8851 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
8852 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
8853 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
8856 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8859 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8866 if (!tree_fits_shwi_p (len
))
8869 This logic lose for arguments like (type *)malloc (sizeof (type)),
8870 since we strip the casts of up to VOID return value from malloc.
8871 Perhaps we ought to inherit type from non-VOID argument here? */
8874 if (!POINTER_TYPE_P (TREE_TYPE (src
))
8875 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
8877 /* In the following try to find a type that is most natural to be
8878 used for the memcpy source and destination and that allows
8879 the most optimization when memcpy is turned into a plain assignment
8880 using that type. In theory we could always use a char[len] type
8881 but that only gains us that the destination and source possibly
8882 no longer will have their address taken. */
8883 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8884 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
8886 tree tem
= TREE_OPERAND (src
, 0);
8888 if (tem
!= TREE_OPERAND (src
, 0))
8889 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
8891 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
8893 tree tem
= TREE_OPERAND (dest
, 0);
8895 if (tem
!= TREE_OPERAND (dest
, 0))
8896 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
8898 srctype
= TREE_TYPE (TREE_TYPE (src
));
8899 if (TREE_CODE (srctype
) == ARRAY_TYPE
8900 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
8902 srctype
= TREE_TYPE (srctype
);
8904 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
8906 desttype
= TREE_TYPE (TREE_TYPE (dest
));
8907 if (TREE_CODE (desttype
) == ARRAY_TYPE
8908 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8910 desttype
= TREE_TYPE (desttype
);
8912 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
8914 if (TREE_ADDRESSABLE (srctype
)
8915 || TREE_ADDRESSABLE (desttype
))
8918 /* Make sure we are not copying using a floating-point mode or
8919 a type whose size possibly does not match its precision. */
8920 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
8921 || TREE_CODE (desttype
) == BOOLEAN_TYPE
8922 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
8924 /* A more suitable int_mode_for_mode would return a vector
8925 integer mode for a vector float mode or a integer complex
8926 mode for a float complex mode if there isn't a regular
8927 integer mode covering the mode of desttype. */
8928 enum machine_mode mode
= int_mode_for_mode (TYPE_MODE (desttype
));
8929 if (mode
== BLKmode
)
8930 desttype
= NULL_TREE
;
8932 desttype
= build_nonstandard_integer_type (GET_MODE_BITSIZE (mode
),
8935 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
8936 || TREE_CODE (srctype
) == BOOLEAN_TYPE
8937 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
8939 enum machine_mode mode
= int_mode_for_mode (TYPE_MODE (srctype
));
8940 if (mode
== BLKmode
)
8941 srctype
= NULL_TREE
;
8943 srctype
= build_nonstandard_integer_type (GET_MODE_BITSIZE (mode
),
8953 src_align
= get_pointer_alignment (src
);
8954 dest_align
= get_pointer_alignment (dest
);
8955 if (dest_align
< TYPE_ALIGN (desttype
)
8956 || src_align
< TYPE_ALIGN (srctype
))
8960 dest
= builtin_save_expr (dest
);
8962 /* Build accesses at offset zero with a ref-all character type. */
8963 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
8964 ptr_mode
, true), 0);
8967 STRIP_NOPS (destvar
);
8968 if (TREE_CODE (destvar
) == ADDR_EXPR
8969 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
8970 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8971 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
8973 destvar
= NULL_TREE
;
8976 STRIP_NOPS (srcvar
);
8977 if (TREE_CODE (srcvar
) == ADDR_EXPR
8978 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
8979 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
8982 || src_align
>= TYPE_ALIGN (desttype
))
8983 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
8985 else if (!STRICT_ALIGNMENT
)
8987 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
8989 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
8997 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
9000 if (srcvar
== NULL_TREE
)
9003 if (src_align
>= TYPE_ALIGN (desttype
))
9004 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
9007 if (STRICT_ALIGNMENT
)
9009 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
9011 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
9014 else if (destvar
== NULL_TREE
)
9017 if (dest_align
>= TYPE_ALIGN (srctype
))
9018 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
9021 if (STRICT_ALIGNMENT
)
9023 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
9025 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
9029 expr
= build2 (MODIFY_EXPR
, TREE_TYPE (destvar
), destvar
, srcvar
);
9035 if (endp
== 0 || endp
== 3)
9036 return omit_one_operand_loc (loc
, type
, dest
, expr
);
9042 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
9045 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
9046 dest
= fold_convert_loc (loc
, type
, dest
);
9048 dest
= omit_one_operand_loc (loc
, type
, dest
, expr
);
9052 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9053 If LEN is not NULL, it represents the length of the string to be
9054 copied. Return NULL_TREE if no simplification can be made. */
9057 fold_builtin_strcpy (location_t loc
, tree fndecl
, tree dest
, tree src
, tree len
)
9061 if (!validate_arg (dest
, POINTER_TYPE
)
9062 || !validate_arg (src
, POINTER_TYPE
))
9065 /* If SRC and DEST are the same (and not volatile), return DEST. */
9066 if (operand_equal_p (src
, dest
, 0))
9067 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
9069 if (optimize_function_for_size_p (cfun
))
9072 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
9078 len
= c_strlen (src
, 1);
9079 if (! len
|| TREE_SIDE_EFFECTS (len
))
9083 len
= fold_convert_loc (loc
, size_type_node
, len
);
9084 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
9085 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
9086 build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
));
9089 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9090 Return NULL_TREE if no simplification can be made. */
9093 fold_builtin_stpcpy (location_t loc
, tree fndecl
, tree dest
, tree src
)
9095 tree fn
, len
, lenp1
, call
, type
;
9097 if (!validate_arg (dest
, POINTER_TYPE
)
9098 || !validate_arg (src
, POINTER_TYPE
))
9101 len
= c_strlen (src
, 1);
9103 || TREE_CODE (len
) != INTEGER_CST
)
9106 if (optimize_function_for_size_p (cfun
)
9107 /* If length is zero it's small enough. */
9108 && !integer_zerop (len
))
9111 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
9115 lenp1
= size_binop_loc (loc
, PLUS_EXPR
,
9116 fold_convert_loc (loc
, size_type_node
, len
),
9117 build_int_cst (size_type_node
, 1));
9118 /* We use dest twice in building our expression. Save it from
9119 multiple expansions. */
9120 dest
= builtin_save_expr (dest
);
9121 call
= build_call_expr_loc (loc
, fn
, 3, dest
, src
, lenp1
);
9123 type
= TREE_TYPE (TREE_TYPE (fndecl
));
9124 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
9125 dest
= fold_convert_loc (loc
, type
, dest
);
9126 dest
= omit_one_operand_loc (loc
, type
, dest
, call
);
9130 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9131 If SLEN is not NULL, it represents the length of the source string.
9132 Return NULL_TREE if no simplification can be made. */
9135 fold_builtin_strncpy (location_t loc
, tree fndecl
, tree dest
,
9136 tree src
, tree len
, tree slen
)
9140 if (!validate_arg (dest
, POINTER_TYPE
)
9141 || !validate_arg (src
, POINTER_TYPE
)
9142 || !validate_arg (len
, INTEGER_TYPE
))
9145 /* If the LEN parameter is zero, return DEST. */
9146 if (integer_zerop (len
))
9147 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
9149 /* We can't compare slen with len as constants below if len is not a
9151 if (len
== 0 || TREE_CODE (len
) != INTEGER_CST
)
9155 slen
= c_strlen (src
, 1);
9157 /* Now, we must be passed a constant src ptr parameter. */
9158 if (slen
== 0 || TREE_CODE (slen
) != INTEGER_CST
)
9161 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
9163 /* We do not support simplification of this case, though we do
9164 support it when expanding trees into RTL. */
9165 /* FIXME: generate a call to __builtin_memset. */
9166 if (tree_int_cst_lt (slen
, len
))
9169 /* OK transform into builtin memcpy. */
9170 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
9174 len
= fold_convert_loc (loc
, size_type_node
, len
);
9175 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
9176 build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
));
9179 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9180 arguments to the call, and TYPE is its return type.
9181 Return NULL_TREE if no simplification can be made. */
9184 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
9186 if (!validate_arg (arg1
, POINTER_TYPE
)
9187 || !validate_arg (arg2
, INTEGER_TYPE
)
9188 || !validate_arg (len
, INTEGER_TYPE
))
9194 if (TREE_CODE (arg2
) != INTEGER_CST
9195 || !tree_fits_uhwi_p (len
))
9198 p1
= c_getstr (arg1
);
9199 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
9205 if (target_char_cast (arg2
, &c
))
9208 r
= (const char *) memchr (p1
, c
, tree_to_uhwi (len
));
9211 return build_int_cst (TREE_TYPE (arg1
), 0);
9213 tem
= fold_build_pointer_plus_hwi_loc (loc
, arg1
, r
- p1
);
9214 return fold_convert_loc (loc
, type
, tem
);
9220 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9221 Return NULL_TREE if no simplification can be made. */
9224 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
9226 const char *p1
, *p2
;
9228 if (!validate_arg (arg1
, POINTER_TYPE
)
9229 || !validate_arg (arg2
, POINTER_TYPE
)
9230 || !validate_arg (len
, INTEGER_TYPE
))
9233 /* If the LEN parameter is zero, return zero. */
9234 if (integer_zerop (len
))
9235 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
9238 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9239 if (operand_equal_p (arg1
, arg2
, 0))
9240 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
9242 p1
= c_getstr (arg1
);
9243 p2
= c_getstr (arg2
);
9245 /* If all arguments are constant, and the value of len is not greater
9246 than the lengths of arg1 and arg2, evaluate at compile-time. */
9247 if (tree_fits_uhwi_p (len
) && p1
&& p2
9248 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
9249 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
9251 const int r
= memcmp (p1
, p2
, tree_to_uhwi (len
));
9254 return integer_one_node
;
9256 return integer_minus_one_node
;
9258 return integer_zero_node
;
9261 /* If len parameter is one, return an expression corresponding to
9262 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9263 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
9265 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9266 tree cst_uchar_ptr_node
9267 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9270 = fold_convert_loc (loc
, integer_type_node
,
9271 build1 (INDIRECT_REF
, cst_uchar_node
,
9272 fold_convert_loc (loc
,
9276 = fold_convert_loc (loc
, integer_type_node
,
9277 build1 (INDIRECT_REF
, cst_uchar_node
,
9278 fold_convert_loc (loc
,
9281 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9287 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9288 Return NULL_TREE if no simplification can be made. */
9291 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
9293 const char *p1
, *p2
;
9295 if (!validate_arg (arg1
, POINTER_TYPE
)
9296 || !validate_arg (arg2
, POINTER_TYPE
))
9299 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9300 if (operand_equal_p (arg1
, arg2
, 0))
9301 return integer_zero_node
;
9303 p1
= c_getstr (arg1
);
9304 p2
= c_getstr (arg2
);
9308 const int i
= strcmp (p1
, p2
);
9310 return integer_minus_one_node
;
9312 return integer_one_node
;
9314 return integer_zero_node
;
9317 /* If the second arg is "", return *(const unsigned char*)arg1. */
9318 if (p2
&& *p2
== '\0')
9320 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9321 tree cst_uchar_ptr_node
9322 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9324 return fold_convert_loc (loc
, integer_type_node
,
9325 build1 (INDIRECT_REF
, cst_uchar_node
,
9326 fold_convert_loc (loc
,
9331 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9332 if (p1
&& *p1
== '\0')
9334 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9335 tree cst_uchar_ptr_node
9336 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9339 = fold_convert_loc (loc
, integer_type_node
,
9340 build1 (INDIRECT_REF
, cst_uchar_node
,
9341 fold_convert_loc (loc
,
9344 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
9350 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9351 Return NULL_TREE if no simplification can be made. */
9354 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
9356 const char *p1
, *p2
;
9358 if (!validate_arg (arg1
, POINTER_TYPE
)
9359 || !validate_arg (arg2
, POINTER_TYPE
)
9360 || !validate_arg (len
, INTEGER_TYPE
))
9363 /* If the LEN parameter is zero, return zero. */
9364 if (integer_zerop (len
))
9365 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
9368 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9369 if (operand_equal_p (arg1
, arg2
, 0))
9370 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
9372 p1
= c_getstr (arg1
);
9373 p2
= c_getstr (arg2
);
9375 if (tree_fits_uhwi_p (len
) && p1
&& p2
)
9377 const int i
= strncmp (p1
, p2
, tree_to_uhwi (len
));
9379 return integer_one_node
;
9381 return integer_minus_one_node
;
9383 return integer_zero_node
;
9386 /* If the second arg is "", and the length is greater than zero,
9387 return *(const unsigned char*)arg1. */
9388 if (p2
&& *p2
== '\0'
9389 && TREE_CODE (len
) == INTEGER_CST
9390 && tree_int_cst_sgn (len
) == 1)
9392 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9393 tree cst_uchar_ptr_node
9394 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9396 return fold_convert_loc (loc
, integer_type_node
,
9397 build1 (INDIRECT_REF
, cst_uchar_node
,
9398 fold_convert_loc (loc
,
9403 /* If the first arg is "", and the length is greater than zero,
9404 return -*(const unsigned char*)arg2. */
9405 if (p1
&& *p1
== '\0'
9406 && TREE_CODE (len
) == INTEGER_CST
9407 && tree_int_cst_sgn (len
) == 1)
9409 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9410 tree cst_uchar_ptr_node
9411 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9413 tree temp
= fold_convert_loc (loc
, integer_type_node
,
9414 build1 (INDIRECT_REF
, cst_uchar_node
,
9415 fold_convert_loc (loc
,
9418 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
9421 /* If len parameter is one, return an expression corresponding to
9422 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9423 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
9425 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9426 tree cst_uchar_ptr_node
9427 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9429 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
9430 build1 (INDIRECT_REF
, cst_uchar_node
,
9431 fold_convert_loc (loc
,
9434 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
9435 build1 (INDIRECT_REF
, cst_uchar_node
,
9436 fold_convert_loc (loc
,
9439 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9445 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9446 ARG. Return NULL_TREE if no simplification can be made. */
9449 fold_builtin_signbit (location_t loc
, tree arg
, tree type
)
9451 if (!validate_arg (arg
, REAL_TYPE
))
9454 /* If ARG is a compile-time constant, determine the result. */
9455 if (TREE_CODE (arg
) == REAL_CST
9456 && !TREE_OVERFLOW (arg
))
9460 c
= TREE_REAL_CST (arg
);
9461 return (REAL_VALUE_NEGATIVE (c
)
9462 ? build_one_cst (type
)
9463 : build_zero_cst (type
));
9466 /* If ARG is non-negative, the result is always zero. */
9467 if (tree_expr_nonnegative_p (arg
))
9468 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9470 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9471 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg
))))
9472 return fold_convert (type
,
9473 fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, arg
,
9474 build_real (TREE_TYPE (arg
), dconst0
)));
9479 /* Fold function call to builtin copysign, copysignf or copysignl with
9480 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9484 fold_builtin_copysign (location_t loc
, tree fndecl
,
9485 tree arg1
, tree arg2
, tree type
)
9489 if (!validate_arg (arg1
, REAL_TYPE
)
9490 || !validate_arg (arg2
, REAL_TYPE
))
9493 /* copysign(X,X) is X. */
9494 if (operand_equal_p (arg1
, arg2
, 0))
9495 return fold_convert_loc (loc
, type
, arg1
);
9497 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9498 if (TREE_CODE (arg1
) == REAL_CST
9499 && TREE_CODE (arg2
) == REAL_CST
9500 && !TREE_OVERFLOW (arg1
)
9501 && !TREE_OVERFLOW (arg2
))
9503 REAL_VALUE_TYPE c1
, c2
;
9505 c1
= TREE_REAL_CST (arg1
);
9506 c2
= TREE_REAL_CST (arg2
);
9507 /* c1.sign := c2.sign. */
9508 real_copysign (&c1
, &c2
);
9509 return build_real (type
, c1
);
9512 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9513 Remember to evaluate Y for side-effects. */
9514 if (tree_expr_nonnegative_p (arg2
))
9515 return omit_one_operand_loc (loc
, type
,
9516 fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
),
9519 /* Strip sign changing operations for the first argument. */
9520 tem
= fold_strip_sign_ops (arg1
);
9522 return build_call_expr_loc (loc
, fndecl
, 2, tem
, arg2
);
9527 /* Fold a call to builtin isascii with argument ARG. */
9530 fold_builtin_isascii (location_t loc
, tree arg
)
9532 if (!validate_arg (arg
, INTEGER_TYPE
))
9536 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9537 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9538 build_int_cst (integer_type_node
,
9539 ~ (unsigned HOST_WIDE_INT
) 0x7f));
9540 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
9541 arg
, integer_zero_node
);
9545 /* Fold a call to builtin toascii with argument ARG. */
9548 fold_builtin_toascii (location_t loc
, tree arg
)
9550 if (!validate_arg (arg
, INTEGER_TYPE
))
9553 /* Transform toascii(c) -> (c & 0x7f). */
9554 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
9555 build_int_cst (integer_type_node
, 0x7f));
9558 /* Fold a call to builtin isdigit with argument ARG. */
9561 fold_builtin_isdigit (location_t loc
, tree arg
)
9563 if (!validate_arg (arg
, INTEGER_TYPE
))
9567 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9568 /* According to the C standard, isdigit is unaffected by locale.
9569 However, it definitely is affected by the target character set. */
9570 unsigned HOST_WIDE_INT target_digit0
9571 = lang_hooks
.to_target_charset ('0');
9573 if (target_digit0
== 0)
9576 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
9577 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
9578 build_int_cst (unsigned_type_node
, target_digit0
));
9579 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
9580 build_int_cst (unsigned_type_node
, 9));
9584 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9587 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
9589 if (!validate_arg (arg
, REAL_TYPE
))
9592 arg
= fold_convert_loc (loc
, type
, arg
);
9593 if (TREE_CODE (arg
) == REAL_CST
)
9594 return fold_abs_const (arg
, type
);
9595 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9598 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9601 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
9603 if (!validate_arg (arg
, INTEGER_TYPE
))
9606 arg
= fold_convert_loc (loc
, type
, arg
);
9607 if (TREE_CODE (arg
) == INTEGER_CST
)
9608 return fold_abs_const (arg
, type
);
9609 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9612 /* Fold a fma operation with arguments ARG[012]. */
9615 fold_fma (location_t loc ATTRIBUTE_UNUSED
,
9616 tree type
, tree arg0
, tree arg1
, tree arg2
)
9618 if (TREE_CODE (arg0
) == REAL_CST
9619 && TREE_CODE (arg1
) == REAL_CST
9620 && TREE_CODE (arg2
) == REAL_CST
)
9621 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
9626 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9629 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
9631 if (validate_arg (arg0
, REAL_TYPE
)
9632 && validate_arg (arg1
, REAL_TYPE
)
9633 && validate_arg (arg2
, REAL_TYPE
))
9635 tree tem
= fold_fma (loc
, type
, arg0
, arg1
, arg2
);
9639 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9640 if (optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
9641 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
9646 /* Fold a call to builtin fmin or fmax. */
9649 fold_builtin_fmin_fmax (location_t loc
, tree arg0
, tree arg1
,
9650 tree type
, bool max
)
9652 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9654 /* Calculate the result when the argument is a constant. */
9655 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9660 /* If either argument is NaN, return the other one. Avoid the
9661 transformation if we get (and honor) a signalling NaN. Using
9662 omit_one_operand() ensures we create a non-lvalue. */
9663 if (TREE_CODE (arg0
) == REAL_CST
9664 && real_isnan (&TREE_REAL_CST (arg0
))
9665 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9666 || ! TREE_REAL_CST (arg0
).signalling
))
9667 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
9668 if (TREE_CODE (arg1
) == REAL_CST
9669 && real_isnan (&TREE_REAL_CST (arg1
))
9670 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
9671 || ! TREE_REAL_CST (arg1
).signalling
))
9672 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9674 /* Transform fmin/fmax(x,x) -> x. */
9675 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9676 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9678 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9679 functions to return the numeric arg if the other one is NaN.
9680 These tree codes don't honor that, so only transform if
9681 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9682 handled, so we don't have to worry about it either. */
9683 if (flag_finite_math_only
)
9684 return fold_build2_loc (loc
, (max
? MAX_EXPR
: MIN_EXPR
), type
,
9685 fold_convert_loc (loc
, type
, arg0
),
9686 fold_convert_loc (loc
, type
, arg1
));
9691 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9694 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
9696 if (validate_arg (arg
, COMPLEX_TYPE
)
9697 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
9699 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9703 tree new_arg
= builtin_save_expr (arg
);
9704 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
9705 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
9706 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
9713 /* Fold a call to builtin logb/ilogb. */
9716 fold_builtin_logb (location_t loc
, tree arg
, tree rettype
)
9718 if (! validate_arg (arg
, REAL_TYPE
))
9723 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9725 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9731 /* If arg is Inf or NaN and we're logb, return it. */
9732 if (TREE_CODE (rettype
) == REAL_TYPE
)
9734 /* For logb(-Inf) we have to return +Inf. */
9735 if (real_isinf (value
) && real_isneg (value
))
9737 REAL_VALUE_TYPE tem
;
9739 return build_real (rettype
, tem
);
9741 return fold_convert_loc (loc
, rettype
, arg
);
9743 /* Fall through... */
9745 /* Zero may set errno and/or raise an exception for logb, also
9746 for ilogb we don't know FP_ILOGB0. */
9749 /* For normal numbers, proceed iff radix == 2. In GCC,
9750 normalized significands are in the range [0.5, 1.0). We
9751 want the exponent as if they were [1.0, 2.0) so get the
9752 exponent and subtract 1. */
9753 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9754 return fold_convert_loc (loc
, rettype
,
9755 build_int_cst (integer_type_node
,
9756 REAL_EXP (value
)-1));
9764 /* Fold a call to builtin significand, if radix == 2. */
9767 fold_builtin_significand (location_t loc
, tree arg
, tree rettype
)
9769 if (! validate_arg (arg
, REAL_TYPE
))
9774 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9776 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9783 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9784 return fold_convert_loc (loc
, rettype
, arg
);
9786 /* For normal numbers, proceed iff radix == 2. */
9787 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9789 REAL_VALUE_TYPE result
= *value
;
9790 /* In GCC, normalized significands are in the range [0.5,
9791 1.0). We want them to be [1.0, 2.0) so set the
9793 SET_REAL_EXP (&result
, 1);
9794 return build_real (rettype
, result
);
9803 /* Fold a call to builtin frexp, we can assume the base is 2. */
9806 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9808 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9813 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9816 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9818 /* Proceed if a valid pointer type was passed in. */
9819 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9821 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9827 /* For +-0, return (*exp = 0, +-0). */
9828 exp
= integer_zero_node
;
9833 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9834 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
9837 /* Since the frexp function always expects base 2, and in
9838 GCC normalized significands are already in the range
9839 [0.5, 1.0), we have exactly what frexp wants. */
9840 REAL_VALUE_TYPE frac_rvt
= *value
;
9841 SET_REAL_EXP (&frac_rvt
, 0);
9842 frac
= build_real (rettype
, frac_rvt
);
9843 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
9850 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9851 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
9852 TREE_SIDE_EFFECTS (arg1
) = 1;
9853 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
9859 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9860 then we can assume the base is two. If it's false, then we have to
9861 check the mode of the TYPE parameter in certain cases. */
9864 fold_builtin_load_exponent (location_t loc
, tree arg0
, tree arg1
,
9865 tree type
, bool ldexp
)
9867 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9872 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9873 if (real_zerop (arg0
) || integer_zerop (arg1
)
9874 || (TREE_CODE (arg0
) == REAL_CST
9875 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9876 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9878 /* If both arguments are constant, then try to evaluate it. */
9879 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9880 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9881 && tree_fits_shwi_p (arg1
))
9883 /* Bound the maximum adjustment to twice the range of the
9884 mode's valid exponents. Use abs to ensure the range is
9885 positive as a sanity check. */
9886 const long max_exp_adj
= 2 *
9887 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9888 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9890 /* Get the user-requested adjustment. */
9891 const HOST_WIDE_INT req_exp_adj
= tree_to_shwi (arg1
);
9893 /* The requested adjustment must be inside this range. This
9894 is a preliminary cap to avoid things like overflow, we
9895 may still fail to compute the result for other reasons. */
9896 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9898 REAL_VALUE_TYPE initial_result
;
9900 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9902 /* Ensure we didn't overflow. */
9903 if (! real_isinf (&initial_result
))
9905 const REAL_VALUE_TYPE trunc_result
9906 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9908 /* Only proceed if the target mode can hold the
9910 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9911 return build_real (type
, trunc_result
);
9920 /* Fold a call to builtin modf. */
9923 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9925 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9930 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9933 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9935 /* Proceed if a valid pointer type was passed in. */
9936 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9938 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9939 REAL_VALUE_TYPE trunc
, frac
;
9945 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9946 trunc
= frac
= *value
;
9949 /* For +-Inf, return (*arg1 = arg0, +-0). */
9951 frac
.sign
= value
->sign
;
9955 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9956 real_trunc (&trunc
, VOIDmode
, value
);
9957 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9958 /* If the original number was negative and already
9959 integral, then the fractional part is -0.0. */
9960 if (value
->sign
&& frac
.cl
== rvc_zero
)
9961 frac
.sign
= value
->sign
;
9965 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9966 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
9967 build_real (rettype
, trunc
));
9968 TREE_SIDE_EFFECTS (arg1
) = 1;
9969 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
9970 build_real (rettype
, frac
));
9976 /* Given a location LOC, an interclass builtin function decl FNDECL
9977 and its single argument ARG, return an folded expression computing
9978 the same, or NULL_TREE if we either couldn't or didn't want to fold
9979 (the latter happen if there's an RTL instruction available). */
9982 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
9984 enum machine_mode mode
;
9986 if (!validate_arg (arg
, REAL_TYPE
))
9989 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9992 mode
= TYPE_MODE (TREE_TYPE (arg
));
9994 /* If there is no optab, try generic code. */
9995 switch (DECL_FUNCTION_CODE (fndecl
))
9999 CASE_FLT_FN (BUILT_IN_ISINF
):
10001 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
10002 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
10003 tree
const type
= TREE_TYPE (arg
);
10007 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
10008 real_from_string (&r
, buf
);
10009 result
= build_call_expr (isgr_fn
, 2,
10010 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
10011 build_real (type
, r
));
10014 CASE_FLT_FN (BUILT_IN_FINITE
):
10015 case BUILT_IN_ISFINITE
:
10017 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
10018 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
10019 tree
const type
= TREE_TYPE (arg
);
10023 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
10024 real_from_string (&r
, buf
);
10025 result
= build_call_expr (isle_fn
, 2,
10026 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
10027 build_real (type
, r
));
10028 /*result = fold_build2_loc (loc, UNGT_EXPR,
10029 TREE_TYPE (TREE_TYPE (fndecl)),
10030 fold_build1_loc (loc, ABS_EXPR, type, arg),
10031 build_real (type, r));
10032 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
10033 TREE_TYPE (TREE_TYPE (fndecl)),
10037 case BUILT_IN_ISNORMAL
:
10039 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10040 islessequal(fabs(x),DBL_MAX). */
10041 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
10042 tree
const isge_fn
= builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
10043 tree
const type
= TREE_TYPE (arg
);
10044 REAL_VALUE_TYPE rmax
, rmin
;
10047 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
10048 real_from_string (&rmax
, buf
);
10049 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
10050 real_from_string (&rmin
, buf
);
10051 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
10052 result
= build_call_expr (isle_fn
, 2, arg
,
10053 build_real (type
, rmax
));
10054 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
10055 build_call_expr (isge_fn
, 2, arg
,
10056 build_real (type
, rmin
)));
10066 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10067 ARG is the argument for the call. */
10070 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
10072 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10075 if (!validate_arg (arg
, REAL_TYPE
))
10078 switch (builtin_index
)
10080 case BUILT_IN_ISINF
:
10081 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
10082 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
10084 if (TREE_CODE (arg
) == REAL_CST
)
10086 r
= TREE_REAL_CST (arg
);
10087 if (real_isinf (&r
))
10088 return real_compare (GT_EXPR
, &r
, &dconst0
)
10089 ? integer_one_node
: integer_minus_one_node
;
10091 return integer_zero_node
;
10096 case BUILT_IN_ISINF_SIGN
:
10098 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10099 /* In a boolean context, GCC will fold the inner COND_EXPR to
10100 1. So e.g. "if (isinf_sign(x))" would be folded to just
10101 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10102 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
10103 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
10104 tree tmp
= NULL_TREE
;
10106 arg
= builtin_save_expr (arg
);
10108 if (signbit_fn
&& isinf_fn
)
10110 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
10111 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
10113 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
10114 signbit_call
, integer_zero_node
);
10115 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
10116 isinf_call
, integer_zero_node
);
10118 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
10119 integer_minus_one_node
, integer_one_node
);
10120 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
10122 integer_zero_node
);
10128 case BUILT_IN_ISFINITE
:
10129 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
)))
10130 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
10131 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
10133 if (TREE_CODE (arg
) == REAL_CST
)
10135 r
= TREE_REAL_CST (arg
);
10136 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
10141 case BUILT_IN_ISNAN
:
10142 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
))))
10143 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
10145 if (TREE_CODE (arg
) == REAL_CST
)
10147 r
= TREE_REAL_CST (arg
);
10148 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
10151 arg
= builtin_save_expr (arg
);
10152 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
10155 gcc_unreachable ();
10159 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10160 This builtin will generate code to return the appropriate floating
10161 point classification depending on the value of the floating point
10162 number passed in. The possible return values must be supplied as
10163 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10164 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10165 one floating point argument which is "type generic". */
10168 fold_builtin_fpclassify (location_t loc
, tree exp
)
10170 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
10171 arg
, type
, res
, tmp
;
10172 enum machine_mode mode
;
10176 /* Verify the required arguments in the original call. */
10177 if (!validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
,
10178 INTEGER_TYPE
, INTEGER_TYPE
,
10179 INTEGER_TYPE
, REAL_TYPE
, VOID_TYPE
))
10182 fp_nan
= CALL_EXPR_ARG (exp
, 0);
10183 fp_infinite
= CALL_EXPR_ARG (exp
, 1);
10184 fp_normal
= CALL_EXPR_ARG (exp
, 2);
10185 fp_subnormal
= CALL_EXPR_ARG (exp
, 3);
10186 fp_zero
= CALL_EXPR_ARG (exp
, 4);
10187 arg
= CALL_EXPR_ARG (exp
, 5);
10188 type
= TREE_TYPE (arg
);
10189 mode
= TYPE_MODE (type
);
10190 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
10192 /* fpclassify(x) ->
10193 isnan(x) ? FP_NAN :
10194 (fabs(x) == Inf ? FP_INFINITE :
10195 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10196 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10198 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
10199 build_real (type
, dconst0
));
10200 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
10201 tmp
, fp_zero
, fp_subnormal
);
10203 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
10204 real_from_string (&r
, buf
);
10205 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
10206 arg
, build_real (type
, r
));
10207 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
10209 if (HONOR_INFINITIES (mode
))
10212 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
10213 build_real (type
, r
));
10214 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
10218 if (HONOR_NANS (mode
))
10220 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
10221 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
10227 /* Fold a call to an unordered comparison function such as
10228 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10229 being called and ARG0 and ARG1 are the arguments for the call.
10230 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10231 the opposite of the desired result. UNORDERED_CODE is used
10232 for modes that can hold NaNs and ORDERED_CODE is used for
10236 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
10237 enum tree_code unordered_code
,
10238 enum tree_code ordered_code
)
10240 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10241 enum tree_code code
;
10243 enum tree_code code0
, code1
;
10244 tree cmp_type
= NULL_TREE
;
10246 type0
= TREE_TYPE (arg0
);
10247 type1
= TREE_TYPE (arg1
);
10249 code0
= TREE_CODE (type0
);
10250 code1
= TREE_CODE (type1
);
10252 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
10253 /* Choose the wider of two real types. */
10254 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
10256 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
10258 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
10261 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
10262 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
10264 if (unordered_code
== UNORDERED_EXPR
)
10266 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
10267 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
10268 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
10271 code
= HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))) ? unordered_code
10273 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
10274 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
10277 /* Fold a call to built-in function FNDECL with 0 arguments.
10278 IGNORE is true if the result of the function call is ignored. This
10279 function returns NULL_TREE if no simplification was possible. */
10282 fold_builtin_0 (location_t loc
, tree fndecl
, bool ignore ATTRIBUTE_UNUSED
)
10284 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10285 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10288 CASE_FLT_FN (BUILT_IN_INF
):
10289 case BUILT_IN_INFD32
:
10290 case BUILT_IN_INFD64
:
10291 case BUILT_IN_INFD128
:
10292 return fold_builtin_inf (loc
, type
, true);
10294 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
10295 return fold_builtin_inf (loc
, type
, false);
10297 case BUILT_IN_CLASSIFY_TYPE
:
10298 return fold_builtin_classify_type (NULL_TREE
);
10300 case BUILT_IN_UNREACHABLE
:
10301 if (flag_sanitize
& SANITIZE_UNREACHABLE
10302 && (current_function_decl
== NULL
10303 || !lookup_attribute ("no_sanitize_undefined",
10304 DECL_ATTRIBUTES (current_function_decl
))))
10305 return ubsan_instrument_unreachable (loc
);
10314 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10315 IGNORE is true if the result of the function call is ignored. This
10316 function returns NULL_TREE if no simplification was possible. */
10319 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
, bool ignore
)
10321 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10322 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10325 case BUILT_IN_CONSTANT_P
:
10327 tree val
= fold_builtin_constant_p (arg0
);
10329 /* Gimplification will pull the CALL_EXPR for the builtin out of
10330 an if condition. When not optimizing, we'll not CSE it back.
10331 To avoid link error types of regressions, return false now. */
10332 if (!val
&& !optimize
)
10333 val
= integer_zero_node
;
10338 case BUILT_IN_CLASSIFY_TYPE
:
10339 return fold_builtin_classify_type (arg0
);
10341 case BUILT_IN_STRLEN
:
10342 return fold_builtin_strlen (loc
, type
, arg0
);
10344 CASE_FLT_FN (BUILT_IN_FABS
):
10345 case BUILT_IN_FABSD32
:
10346 case BUILT_IN_FABSD64
:
10347 case BUILT_IN_FABSD128
:
10348 return fold_builtin_fabs (loc
, arg0
, type
);
10351 case BUILT_IN_LABS
:
10352 case BUILT_IN_LLABS
:
10353 case BUILT_IN_IMAXABS
:
10354 return fold_builtin_abs (loc
, arg0
, type
);
10356 CASE_FLT_FN (BUILT_IN_CONJ
):
10357 if (validate_arg (arg0
, COMPLEX_TYPE
)
10358 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10359 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
10362 CASE_FLT_FN (BUILT_IN_CREAL
):
10363 if (validate_arg (arg0
, COMPLEX_TYPE
)
10364 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10365 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));;
10368 CASE_FLT_FN (BUILT_IN_CIMAG
):
10369 if (validate_arg (arg0
, COMPLEX_TYPE
)
10370 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10371 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
10374 CASE_FLT_FN (BUILT_IN_CCOS
):
10375 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ false);
10377 CASE_FLT_FN (BUILT_IN_CCOSH
):
10378 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ true);
10380 CASE_FLT_FN (BUILT_IN_CPROJ
):
10381 return fold_builtin_cproj (loc
, arg0
, type
);
10383 CASE_FLT_FN (BUILT_IN_CSIN
):
10384 if (validate_arg (arg0
, COMPLEX_TYPE
)
10385 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10386 return do_mpc_arg1 (arg0
, type
, mpc_sin
);
10389 CASE_FLT_FN (BUILT_IN_CSINH
):
10390 if (validate_arg (arg0
, COMPLEX_TYPE
)
10391 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10392 return do_mpc_arg1 (arg0
, type
, mpc_sinh
);
10395 CASE_FLT_FN (BUILT_IN_CTAN
):
10396 if (validate_arg (arg0
, COMPLEX_TYPE
)
10397 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10398 return do_mpc_arg1 (arg0
, type
, mpc_tan
);
10401 CASE_FLT_FN (BUILT_IN_CTANH
):
10402 if (validate_arg (arg0
, COMPLEX_TYPE
)
10403 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10404 return do_mpc_arg1 (arg0
, type
, mpc_tanh
);
10407 CASE_FLT_FN (BUILT_IN_CLOG
):
10408 if (validate_arg (arg0
, COMPLEX_TYPE
)
10409 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10410 return do_mpc_arg1 (arg0
, type
, mpc_log
);
10413 CASE_FLT_FN (BUILT_IN_CSQRT
):
10414 if (validate_arg (arg0
, COMPLEX_TYPE
)
10415 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10416 return do_mpc_arg1 (arg0
, type
, mpc_sqrt
);
10419 CASE_FLT_FN (BUILT_IN_CASIN
):
10420 if (validate_arg (arg0
, COMPLEX_TYPE
)
10421 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10422 return do_mpc_arg1 (arg0
, type
, mpc_asin
);
10425 CASE_FLT_FN (BUILT_IN_CACOS
):
10426 if (validate_arg (arg0
, COMPLEX_TYPE
)
10427 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10428 return do_mpc_arg1 (arg0
, type
, mpc_acos
);
10431 CASE_FLT_FN (BUILT_IN_CATAN
):
10432 if (validate_arg (arg0
, COMPLEX_TYPE
)
10433 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10434 return do_mpc_arg1 (arg0
, type
, mpc_atan
);
10437 CASE_FLT_FN (BUILT_IN_CASINH
):
10438 if (validate_arg (arg0
, COMPLEX_TYPE
)
10439 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10440 return do_mpc_arg1 (arg0
, type
, mpc_asinh
);
10443 CASE_FLT_FN (BUILT_IN_CACOSH
):
10444 if (validate_arg (arg0
, COMPLEX_TYPE
)
10445 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10446 return do_mpc_arg1 (arg0
, type
, mpc_acosh
);
10449 CASE_FLT_FN (BUILT_IN_CATANH
):
10450 if (validate_arg (arg0
, COMPLEX_TYPE
)
10451 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10452 return do_mpc_arg1 (arg0
, type
, mpc_atanh
);
10455 CASE_FLT_FN (BUILT_IN_CABS
):
10456 return fold_builtin_cabs (loc
, arg0
, type
, fndecl
);
10458 CASE_FLT_FN (BUILT_IN_CARG
):
10459 return fold_builtin_carg (loc
, arg0
, type
);
10461 CASE_FLT_FN (BUILT_IN_SQRT
):
10462 return fold_builtin_sqrt (loc
, arg0
, type
);
10464 CASE_FLT_FN (BUILT_IN_CBRT
):
10465 return fold_builtin_cbrt (loc
, arg0
, type
);
10467 CASE_FLT_FN (BUILT_IN_ASIN
):
10468 if (validate_arg (arg0
, REAL_TYPE
))
10469 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
10470 &dconstm1
, &dconst1
, true);
10473 CASE_FLT_FN (BUILT_IN_ACOS
):
10474 if (validate_arg (arg0
, REAL_TYPE
))
10475 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
10476 &dconstm1
, &dconst1
, true);
10479 CASE_FLT_FN (BUILT_IN_ATAN
):
10480 if (validate_arg (arg0
, REAL_TYPE
))
10481 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
10484 CASE_FLT_FN (BUILT_IN_ASINH
):
10485 if (validate_arg (arg0
, REAL_TYPE
))
10486 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
10489 CASE_FLT_FN (BUILT_IN_ACOSH
):
10490 if (validate_arg (arg0
, REAL_TYPE
))
10491 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
10492 &dconst1
, NULL
, true);
10495 CASE_FLT_FN (BUILT_IN_ATANH
):
10496 if (validate_arg (arg0
, REAL_TYPE
))
10497 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
10498 &dconstm1
, &dconst1
, false);
10501 CASE_FLT_FN (BUILT_IN_SIN
):
10502 if (validate_arg (arg0
, REAL_TYPE
))
10503 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
10506 CASE_FLT_FN (BUILT_IN_COS
):
10507 return fold_builtin_cos (loc
, arg0
, type
, fndecl
);
10509 CASE_FLT_FN (BUILT_IN_TAN
):
10510 return fold_builtin_tan (arg0
, type
);
10512 CASE_FLT_FN (BUILT_IN_CEXP
):
10513 return fold_builtin_cexp (loc
, arg0
, type
);
10515 CASE_FLT_FN (BUILT_IN_CEXPI
):
10516 if (validate_arg (arg0
, REAL_TYPE
))
10517 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
10520 CASE_FLT_FN (BUILT_IN_SINH
):
10521 if (validate_arg (arg0
, REAL_TYPE
))
10522 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
10525 CASE_FLT_FN (BUILT_IN_COSH
):
10526 return fold_builtin_cosh (loc
, arg0
, type
, fndecl
);
10528 CASE_FLT_FN (BUILT_IN_TANH
):
10529 if (validate_arg (arg0
, REAL_TYPE
))
10530 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
10533 CASE_FLT_FN (BUILT_IN_ERF
):
10534 if (validate_arg (arg0
, REAL_TYPE
))
10535 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
10538 CASE_FLT_FN (BUILT_IN_ERFC
):
10539 if (validate_arg (arg0
, REAL_TYPE
))
10540 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
10543 CASE_FLT_FN (BUILT_IN_TGAMMA
):
10544 if (validate_arg (arg0
, REAL_TYPE
))
10545 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
10548 CASE_FLT_FN (BUILT_IN_EXP
):
10549 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp
);
10551 CASE_FLT_FN (BUILT_IN_EXP2
):
10552 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp2
);
10554 CASE_FLT_FN (BUILT_IN_EXP10
):
10555 CASE_FLT_FN (BUILT_IN_POW10
):
10556 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp10
);
10558 CASE_FLT_FN (BUILT_IN_EXPM1
):
10559 if (validate_arg (arg0
, REAL_TYPE
))
10560 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
10563 CASE_FLT_FN (BUILT_IN_LOG
):
10564 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log
);
10566 CASE_FLT_FN (BUILT_IN_LOG2
):
10567 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log2
);
10569 CASE_FLT_FN (BUILT_IN_LOG10
):
10570 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log10
);
10572 CASE_FLT_FN (BUILT_IN_LOG1P
):
10573 if (validate_arg (arg0
, REAL_TYPE
))
10574 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
10575 &dconstm1
, NULL
, false);
10578 CASE_FLT_FN (BUILT_IN_J0
):
10579 if (validate_arg (arg0
, REAL_TYPE
))
10580 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
10584 CASE_FLT_FN (BUILT_IN_J1
):
10585 if (validate_arg (arg0
, REAL_TYPE
))
10586 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
10590 CASE_FLT_FN (BUILT_IN_Y0
):
10591 if (validate_arg (arg0
, REAL_TYPE
))
10592 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
10593 &dconst0
, NULL
, false);
10596 CASE_FLT_FN (BUILT_IN_Y1
):
10597 if (validate_arg (arg0
, REAL_TYPE
))
10598 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
10599 &dconst0
, NULL
, false);
10602 CASE_FLT_FN (BUILT_IN_NAN
):
10603 case BUILT_IN_NAND32
:
10604 case BUILT_IN_NAND64
:
10605 case BUILT_IN_NAND128
:
10606 return fold_builtin_nan (arg0
, type
, true);
10608 CASE_FLT_FN (BUILT_IN_NANS
):
10609 return fold_builtin_nan (arg0
, type
, false);
10611 CASE_FLT_FN (BUILT_IN_FLOOR
):
10612 return fold_builtin_floor (loc
, fndecl
, arg0
);
10614 CASE_FLT_FN (BUILT_IN_CEIL
):
10615 return fold_builtin_ceil (loc
, fndecl
, arg0
);
10617 CASE_FLT_FN (BUILT_IN_TRUNC
):
10618 return fold_builtin_trunc (loc
, fndecl
, arg0
);
10620 CASE_FLT_FN (BUILT_IN_ROUND
):
10621 return fold_builtin_round (loc
, fndecl
, arg0
);
10623 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
10624 CASE_FLT_FN (BUILT_IN_RINT
):
10625 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg0
);
10627 CASE_FLT_FN (BUILT_IN_ICEIL
):
10628 CASE_FLT_FN (BUILT_IN_LCEIL
):
10629 CASE_FLT_FN (BUILT_IN_LLCEIL
):
10630 CASE_FLT_FN (BUILT_IN_LFLOOR
):
10631 CASE_FLT_FN (BUILT_IN_IFLOOR
):
10632 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
10633 CASE_FLT_FN (BUILT_IN_IROUND
):
10634 CASE_FLT_FN (BUILT_IN_LROUND
):
10635 CASE_FLT_FN (BUILT_IN_LLROUND
):
10636 return fold_builtin_int_roundingfn (loc
, fndecl
, arg0
);
10638 CASE_FLT_FN (BUILT_IN_IRINT
):
10639 CASE_FLT_FN (BUILT_IN_LRINT
):
10640 CASE_FLT_FN (BUILT_IN_LLRINT
):
10641 return fold_fixed_mathfn (loc
, fndecl
, arg0
);
10643 case BUILT_IN_BSWAP16
:
10644 case BUILT_IN_BSWAP32
:
10645 case BUILT_IN_BSWAP64
:
10646 return fold_builtin_bswap (fndecl
, arg0
);
10648 CASE_INT_FN (BUILT_IN_FFS
):
10649 CASE_INT_FN (BUILT_IN_CLZ
):
10650 CASE_INT_FN (BUILT_IN_CTZ
):
10651 CASE_INT_FN (BUILT_IN_CLRSB
):
10652 CASE_INT_FN (BUILT_IN_POPCOUNT
):
10653 CASE_INT_FN (BUILT_IN_PARITY
):
10654 return fold_builtin_bitop (fndecl
, arg0
);
10656 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
10657 return fold_builtin_signbit (loc
, arg0
, type
);
10659 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
10660 return fold_builtin_significand (loc
, arg0
, type
);
10662 CASE_FLT_FN (BUILT_IN_ILOGB
):
10663 CASE_FLT_FN (BUILT_IN_LOGB
):
10664 return fold_builtin_logb (loc
, arg0
, type
);
10666 case BUILT_IN_ISASCII
:
10667 return fold_builtin_isascii (loc
, arg0
);
10669 case BUILT_IN_TOASCII
:
10670 return fold_builtin_toascii (loc
, arg0
);
10672 case BUILT_IN_ISDIGIT
:
10673 return fold_builtin_isdigit (loc
, arg0
);
10675 CASE_FLT_FN (BUILT_IN_FINITE
):
10676 case BUILT_IN_FINITED32
:
10677 case BUILT_IN_FINITED64
:
10678 case BUILT_IN_FINITED128
:
10679 case BUILT_IN_ISFINITE
:
10681 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
10684 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10687 CASE_FLT_FN (BUILT_IN_ISINF
):
10688 case BUILT_IN_ISINFD32
:
10689 case BUILT_IN_ISINFD64
:
10690 case BUILT_IN_ISINFD128
:
10692 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
10695 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10698 case BUILT_IN_ISNORMAL
:
10699 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10701 case BUILT_IN_ISINF_SIGN
:
10702 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
10704 CASE_FLT_FN (BUILT_IN_ISNAN
):
10705 case BUILT_IN_ISNAND32
:
10706 case BUILT_IN_ISNAND64
:
10707 case BUILT_IN_ISNAND128
:
10708 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
10710 case BUILT_IN_PRINTF
:
10711 case BUILT_IN_PRINTF_UNLOCKED
:
10712 case BUILT_IN_VPRINTF
:
10713 return fold_builtin_printf (loc
, fndecl
, arg0
, NULL_TREE
, ignore
, fcode
);
10715 case BUILT_IN_FREE
:
10716 if (integer_zerop (arg0
))
10717 return build_empty_stmt (loc
);
10728 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10729 IGNORE is true if the result of the function call is ignored. This
10730 function returns NULL_TREE if no simplification was possible. */
10733 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, bool ignore
)
10735 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10736 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10740 CASE_FLT_FN (BUILT_IN_JN
):
10741 if (validate_arg (arg0
, INTEGER_TYPE
)
10742 && validate_arg (arg1
, REAL_TYPE
))
10743 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10746 CASE_FLT_FN (BUILT_IN_YN
):
10747 if (validate_arg (arg0
, INTEGER_TYPE
)
10748 && validate_arg (arg1
, REAL_TYPE
))
10749 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10753 CASE_FLT_FN (BUILT_IN_DREM
):
10754 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10755 if (validate_arg (arg0
, REAL_TYPE
)
10756 && validate_arg (arg1
, REAL_TYPE
))
10757 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10760 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10761 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10762 if (validate_arg (arg0
, REAL_TYPE
)
10763 && validate_arg (arg1
, POINTER_TYPE
))
10764 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10767 CASE_FLT_FN (BUILT_IN_ATAN2
):
10768 if (validate_arg (arg0
, REAL_TYPE
)
10769 && validate_arg (arg1
, REAL_TYPE
))
10770 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10773 CASE_FLT_FN (BUILT_IN_FDIM
):
10774 if (validate_arg (arg0
, REAL_TYPE
)
10775 && validate_arg (arg1
, REAL_TYPE
))
10776 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10779 CASE_FLT_FN (BUILT_IN_HYPOT
):
10780 return fold_builtin_hypot (loc
, fndecl
, arg0
, arg1
, type
);
10782 CASE_FLT_FN (BUILT_IN_CPOW
):
10783 if (validate_arg (arg0
, COMPLEX_TYPE
)
10784 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10785 && validate_arg (arg1
, COMPLEX_TYPE
)
10786 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
)
10787 return do_mpc_arg2 (arg0
, arg1
, type
, /*do_nonfinite=*/ 0, mpc_pow
);
10790 CASE_FLT_FN (BUILT_IN_LDEXP
):
10791 return fold_builtin_load_exponent (loc
, arg0
, arg1
, type
, /*ldexp=*/true);
10792 CASE_FLT_FN (BUILT_IN_SCALBN
):
10793 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10794 return fold_builtin_load_exponent (loc
, arg0
, arg1
,
10795 type
, /*ldexp=*/false);
10797 CASE_FLT_FN (BUILT_IN_FREXP
):
10798 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
10800 CASE_FLT_FN (BUILT_IN_MODF
):
10801 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
10803 case BUILT_IN_BZERO
:
10804 return fold_builtin_bzero (loc
, arg0
, arg1
, ignore
);
10806 case BUILT_IN_FPUTS
:
10807 return fold_builtin_fputs (loc
, arg0
, arg1
, ignore
, false, NULL_TREE
);
10809 case BUILT_IN_FPUTS_UNLOCKED
:
10810 return fold_builtin_fputs (loc
, arg0
, arg1
, ignore
, true, NULL_TREE
);
10812 case BUILT_IN_STRSTR
:
10813 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
10815 case BUILT_IN_STRCAT
:
10816 return fold_builtin_strcat (loc
, arg0
, arg1
, NULL_TREE
);
10818 case BUILT_IN_STRSPN
:
10819 return fold_builtin_strspn (loc
, arg0
, arg1
);
10821 case BUILT_IN_STRCSPN
:
10822 return fold_builtin_strcspn (loc
, arg0
, arg1
);
10824 case BUILT_IN_STRCHR
:
10825 case BUILT_IN_INDEX
:
10826 return fold_builtin_strchr (loc
, arg0
, arg1
, type
);
10828 case BUILT_IN_STRRCHR
:
10829 case BUILT_IN_RINDEX
:
10830 return fold_builtin_strrchr (loc
, arg0
, arg1
, type
);
10832 case BUILT_IN_STRCPY
:
10833 return fold_builtin_strcpy (loc
, fndecl
, arg0
, arg1
, NULL_TREE
);
10835 case BUILT_IN_STPCPY
:
10838 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
10842 return build_call_expr_loc (loc
, fn
, 2, arg0
, arg1
);
10845 return fold_builtin_stpcpy (loc
, fndecl
, arg0
, arg1
);
10848 case BUILT_IN_STRCMP
:
10849 return fold_builtin_strcmp (loc
, arg0
, arg1
);
10851 case BUILT_IN_STRPBRK
:
10852 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
10854 case BUILT_IN_EXPECT
:
10855 return fold_builtin_expect (loc
, arg0
, arg1
);
10857 CASE_FLT_FN (BUILT_IN_POW
):
10858 return fold_builtin_pow (loc
, fndecl
, arg0
, arg1
, type
);
10860 CASE_FLT_FN (BUILT_IN_POWI
):
10861 return fold_builtin_powi (loc
, fndecl
, arg0
, arg1
, type
);
10863 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10864 return fold_builtin_copysign (loc
, fndecl
, arg0
, arg1
, type
);
10866 CASE_FLT_FN (BUILT_IN_FMIN
):
10867 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/false);
10869 CASE_FLT_FN (BUILT_IN_FMAX
):
10870 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/true);
10872 case BUILT_IN_ISGREATER
:
10873 return fold_builtin_unordered_cmp (loc
, fndecl
,
10874 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10875 case BUILT_IN_ISGREATEREQUAL
:
10876 return fold_builtin_unordered_cmp (loc
, fndecl
,
10877 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10878 case BUILT_IN_ISLESS
:
10879 return fold_builtin_unordered_cmp (loc
, fndecl
,
10880 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10881 case BUILT_IN_ISLESSEQUAL
:
10882 return fold_builtin_unordered_cmp (loc
, fndecl
,
10883 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10884 case BUILT_IN_ISLESSGREATER
:
10885 return fold_builtin_unordered_cmp (loc
, fndecl
,
10886 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10887 case BUILT_IN_ISUNORDERED
:
10888 return fold_builtin_unordered_cmp (loc
, fndecl
,
10889 arg0
, arg1
, UNORDERED_EXPR
,
10892 /* We do the folding for va_start in the expander. */
10893 case BUILT_IN_VA_START
:
10896 case BUILT_IN_SPRINTF
:
10897 return fold_builtin_sprintf (loc
, arg0
, arg1
, NULL_TREE
, ignore
);
10899 case BUILT_IN_OBJECT_SIZE
:
10900 return fold_builtin_object_size (arg0
, arg1
);
10902 case BUILT_IN_PRINTF
:
10903 case BUILT_IN_PRINTF_UNLOCKED
:
10904 case BUILT_IN_VPRINTF
:
10905 return fold_builtin_printf (loc
, fndecl
, arg0
, arg1
, ignore
, fcode
);
10907 case BUILT_IN_PRINTF_CHK
:
10908 case BUILT_IN_VPRINTF_CHK
:
10909 if (!validate_arg (arg0
, INTEGER_TYPE
)
10910 || TREE_SIDE_EFFECTS (arg0
))
10913 return fold_builtin_printf (loc
, fndecl
,
10914 arg1
, NULL_TREE
, ignore
, fcode
);
10917 case BUILT_IN_FPRINTF
:
10918 case BUILT_IN_FPRINTF_UNLOCKED
:
10919 case BUILT_IN_VFPRINTF
:
10920 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg1
, NULL_TREE
,
10923 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
10924 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
10926 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
10927 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
10935 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10936 and ARG2. IGNORE is true if the result of the function call is ignored.
10937 This function returns NULL_TREE if no simplification was possible. */
10940 fold_builtin_3 (location_t loc
, tree fndecl
,
10941 tree arg0
, tree arg1
, tree arg2
, bool ignore
)
10943 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10944 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10948 CASE_FLT_FN (BUILT_IN_SINCOS
):
10949 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
10951 CASE_FLT_FN (BUILT_IN_FMA
):
10952 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
10955 CASE_FLT_FN (BUILT_IN_REMQUO
):
10956 if (validate_arg (arg0
, REAL_TYPE
)
10957 && validate_arg (arg1
, REAL_TYPE
)
10958 && validate_arg (arg2
, POINTER_TYPE
))
10959 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10962 case BUILT_IN_MEMSET
:
10963 return fold_builtin_memset (loc
, arg0
, arg1
, arg2
, type
, ignore
);
10965 case BUILT_IN_BCOPY
:
10966 return fold_builtin_memory_op (loc
, arg1
, arg0
, arg2
,
10967 void_type_node
, true, /*endp=*/3);
10969 case BUILT_IN_MEMCPY
:
10970 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10971 type
, ignore
, /*endp=*/0);
10973 case BUILT_IN_MEMPCPY
:
10974 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10975 type
, ignore
, /*endp=*/1);
10977 case BUILT_IN_MEMMOVE
:
10978 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10979 type
, ignore
, /*endp=*/3);
10981 case BUILT_IN_STRNCAT
:
10982 return fold_builtin_strncat (loc
, arg0
, arg1
, arg2
);
10984 case BUILT_IN_STRNCPY
:
10985 return fold_builtin_strncpy (loc
, fndecl
, arg0
, arg1
, arg2
, NULL_TREE
);
10987 case BUILT_IN_STRNCMP
:
10988 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
10990 case BUILT_IN_MEMCHR
:
10991 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
10993 case BUILT_IN_BCMP
:
10994 case BUILT_IN_MEMCMP
:
10995 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
10997 case BUILT_IN_SPRINTF
:
10998 return fold_builtin_sprintf (loc
, arg0
, arg1
, arg2
, ignore
);
11000 case BUILT_IN_SNPRINTF
:
11001 return fold_builtin_snprintf (loc
, arg0
, arg1
, arg2
, NULL_TREE
, ignore
);
11003 case BUILT_IN_STRCPY_CHK
:
11004 case BUILT_IN_STPCPY_CHK
:
11005 return fold_builtin_stxcpy_chk (loc
, fndecl
, arg0
, arg1
, arg2
, NULL_TREE
,
11008 case BUILT_IN_STRCAT_CHK
:
11009 return fold_builtin_strcat_chk (loc
, fndecl
, arg0
, arg1
, arg2
);
11011 case BUILT_IN_PRINTF_CHK
:
11012 case BUILT_IN_VPRINTF_CHK
:
11013 if (!validate_arg (arg0
, INTEGER_TYPE
)
11014 || TREE_SIDE_EFFECTS (arg0
))
11017 return fold_builtin_printf (loc
, fndecl
, arg1
, arg2
, ignore
, fcode
);
11020 case BUILT_IN_FPRINTF
:
11021 case BUILT_IN_FPRINTF_UNLOCKED
:
11022 case BUILT_IN_VFPRINTF
:
11023 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg1
, arg2
,
11026 case BUILT_IN_FPRINTF_CHK
:
11027 case BUILT_IN_VFPRINTF_CHK
:
11028 if (!validate_arg (arg1
, INTEGER_TYPE
)
11029 || TREE_SIDE_EFFECTS (arg1
))
11032 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg2
, NULL_TREE
,
11041 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
11042 ARG2, and ARG3. IGNORE is true if the result of the function call is
11043 ignored. This function returns NULL_TREE if no simplification was
11047 fold_builtin_4 (location_t loc
, tree fndecl
,
11048 tree arg0
, tree arg1
, tree arg2
, tree arg3
, bool ignore
)
11050 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11054 case BUILT_IN_MEMCPY_CHK
:
11055 case BUILT_IN_MEMPCPY_CHK
:
11056 case BUILT_IN_MEMMOVE_CHK
:
11057 case BUILT_IN_MEMSET_CHK
:
11058 return fold_builtin_memory_chk (loc
, fndecl
, arg0
, arg1
, arg2
, arg3
,
11060 DECL_FUNCTION_CODE (fndecl
));
11062 case BUILT_IN_STRNCPY_CHK
:
11063 case BUILT_IN_STPNCPY_CHK
:
11064 return fold_builtin_stxncpy_chk (loc
, arg0
, arg1
, arg2
, arg3
, NULL_TREE
,
11067 case BUILT_IN_STRNCAT_CHK
:
11068 return fold_builtin_strncat_chk (loc
, fndecl
, arg0
, arg1
, arg2
, arg3
);
11070 case BUILT_IN_SNPRINTF
:
11071 return fold_builtin_snprintf (loc
, arg0
, arg1
, arg2
, arg3
, ignore
);
11073 case BUILT_IN_FPRINTF_CHK
:
11074 case BUILT_IN_VFPRINTF_CHK
:
11075 if (!validate_arg (arg1
, INTEGER_TYPE
)
11076 || TREE_SIDE_EFFECTS (arg1
))
11079 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg2
, arg3
,
11089 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11090 arguments, where NARGS <= 4. IGNORE is true if the result of the
11091 function call is ignored. This function returns NULL_TREE if no
11092 simplification was possible. Note that this only folds builtins with
11093 fixed argument patterns. Foldings that do varargs-to-varargs
11094 transformations, or that match calls with more than 4 arguments,
11095 need to be handled with fold_builtin_varargs instead. */
11097 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11100 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool ignore
)
11102 tree ret
= NULL_TREE
;
11107 ret
= fold_builtin_0 (loc
, fndecl
, ignore
);
11110 ret
= fold_builtin_1 (loc
, fndecl
, args
[0], ignore
);
11113 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1], ignore
);
11116 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2], ignore
);
11119 ret
= fold_builtin_4 (loc
, fndecl
, args
[0], args
[1], args
[2], args
[3],
11127 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11128 SET_EXPR_LOCATION (ret
, loc
);
11129 TREE_NO_WARNING (ret
) = 1;
11135 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11136 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11137 of arguments in ARGS to be omitted. OLDNARGS is the number of
11138 elements in ARGS. */
11141 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
11142 int skip
, tree fndecl
, int n
, va_list newargs
)
11144 int nargs
= oldnargs
- skip
+ n
;
11151 buffer
= XALLOCAVEC (tree
, nargs
);
11152 for (i
= 0; i
< n
; i
++)
11153 buffer
[i
] = va_arg (newargs
, tree
);
11154 for (j
= skip
; j
< oldnargs
; j
++, i
++)
11155 buffer
[i
] = args
[j
];
11158 buffer
= args
+ skip
;
11160 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
11163 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11164 list ARGS along with N new arguments specified as the "..."
11165 parameters. SKIP is the number of arguments in ARGS to be omitted.
11166 OLDNARGS is the number of elements in ARGS. */
11169 rewrite_call_expr_array (location_t loc
, int oldnargs
, tree
*args
,
11170 int skip
, tree fndecl
, int n
, ...)
11176 t
= rewrite_call_expr_valist (loc
, oldnargs
, args
, skip
, fndecl
, n
, ap
);
11182 /* Return true if FNDECL shouldn't be folded right now.
11183 If a built-in function has an inline attribute always_inline
11184 wrapper, defer folding it after always_inline functions have
11185 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11186 might not be performed. */
11189 avoid_folding_inline_builtin (tree fndecl
)
11191 return (DECL_DECLARED_INLINE_P (fndecl
)
11192 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
11194 && !cfun
->always_inline_functions_inlined
11195 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
11198 /* A wrapper function for builtin folding that prevents warnings for
11199 "statement without effect" and the like, caused by removing the
11200 call node earlier than the warning is generated. */
11203 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
11205 tree ret
= NULL_TREE
;
11206 tree fndecl
= get_callee_fndecl (exp
);
11208 && TREE_CODE (fndecl
) == FUNCTION_DECL
11209 && DECL_BUILT_IN (fndecl
)
11210 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11211 yet. Defer folding until we see all the arguments
11212 (after inlining). */
11213 && !CALL_EXPR_VA_ARG_PACK (exp
))
11215 int nargs
= call_expr_nargs (exp
);
11217 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11218 instead last argument is __builtin_va_arg_pack (). Defer folding
11219 even in that case, until arguments are finalized. */
11220 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
11222 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
11224 && TREE_CODE (fndecl2
) == FUNCTION_DECL
11225 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
11226 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
11230 if (avoid_folding_inline_builtin (fndecl
))
11233 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11234 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
11235 CALL_EXPR_ARGP (exp
), ignore
);
11238 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
11240 tree
*args
= CALL_EXPR_ARGP (exp
);
11241 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
11244 ret
= fold_builtin_varargs (loc
, fndecl
, exp
, ignore
);
11252 /* Conveniently construct a function call expression. FNDECL names the
11253 function to be called and N arguments are passed in the array
11257 build_call_expr_loc_array (location_t loc
, tree fndecl
, int n
, tree
*argarray
)
11259 tree fntype
= TREE_TYPE (fndecl
);
11260 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
11262 return fold_builtin_call_array (loc
, TREE_TYPE (fntype
), fn
, n
, argarray
);
11265 /* Conveniently construct a function call expression. FNDECL names the
11266 function to be called and the arguments are passed in the vector
11270 build_call_expr_loc_vec (location_t loc
, tree fndecl
, vec
<tree
, va_gc
> *vec
)
11272 return build_call_expr_loc_array (loc
, fndecl
, vec_safe_length (vec
),
11273 vec_safe_address (vec
));
11277 /* Conveniently construct a function call expression. FNDECL names the
11278 function to be called, N is the number of arguments, and the "..."
11279 parameters are the argument expressions. */
11282 build_call_expr_loc (location_t loc
, tree fndecl
, int n
, ...)
11285 tree
*argarray
= XALLOCAVEC (tree
, n
);
11289 for (i
= 0; i
< n
; i
++)
11290 argarray
[i
] = va_arg (ap
, tree
);
11292 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
11295 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11296 varargs macros aren't supported by all bootstrap compilers. */
11299 build_call_expr (tree fndecl
, int n
, ...)
11302 tree
*argarray
= XALLOCAVEC (tree
, n
);
11306 for (i
= 0; i
< n
; i
++)
11307 argarray
[i
] = va_arg (ap
, tree
);
11309 return build_call_expr_loc_array (UNKNOWN_LOCATION
, fndecl
, n
, argarray
);
11312 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11313 N arguments are passed in the array ARGARRAY. */
11316 fold_builtin_call_array (location_t loc
, tree type
,
11321 tree ret
= NULL_TREE
;
11324 if (TREE_CODE (fn
) == ADDR_EXPR
)
11326 tree fndecl
= TREE_OPERAND (fn
, 0);
11327 if (TREE_CODE (fndecl
) == FUNCTION_DECL
11328 && DECL_BUILT_IN (fndecl
))
11330 /* If last argument is __builtin_va_arg_pack (), arguments to this
11331 function are not finalized yet. Defer folding until they are. */
11332 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
11334 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
11336 && TREE_CODE (fndecl2
) == FUNCTION_DECL
11337 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
11338 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
11339 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11341 if (avoid_folding_inline_builtin (fndecl
))
11342 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11343 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11345 ret
= targetm
.fold_builtin (fndecl
, n
, argarray
, false);
11349 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11351 else if (n
<= MAX_ARGS_TO_FOLD_BUILTIN
)
11353 /* First try the transformations that don't require consing up
11355 ret
= fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
11360 /* If we got this far, we need to build an exp. */
11361 exp
= build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11362 ret
= fold_builtin_varargs (loc
, fndecl
, exp
, false);
11363 return ret
? ret
: exp
;
11367 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11370 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11371 along with N new arguments specified as the "..." parameters. SKIP
11372 is the number of arguments in EXP to be omitted. This function is used
11373 to do varargs-to-varargs transformations. */
11376 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
11382 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
11383 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
11389 /* Validate a single argument ARG against a tree code CODE representing
11393 validate_arg (const_tree arg
, enum tree_code code
)
11397 else if (code
== POINTER_TYPE
)
11398 return POINTER_TYPE_P (TREE_TYPE (arg
));
11399 else if (code
== INTEGER_TYPE
)
11400 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
11401 return code
== TREE_CODE (TREE_TYPE (arg
));
11404 /* This function validates the types of a function call argument list
11405 against a specified list of tree_codes. If the last specifier is a 0,
11406 that represents an ellipses, otherwise the last specifier must be a
11409 This is the GIMPLE version of validate_arglist. Eventually we want to
11410 completely convert builtins.c to work from GIMPLEs and the tree based
11411 validate_arglist will then be removed. */
11414 validate_gimple_arglist (const_gimple call
, ...)
11416 enum tree_code code
;
11422 va_start (ap
, call
);
11427 code
= (enum tree_code
) va_arg (ap
, int);
11431 /* This signifies an ellipses, any further arguments are all ok. */
11435 /* This signifies an endlink, if no arguments remain, return
11436 true, otherwise return false. */
11437 res
= (i
== gimple_call_num_args (call
));
11440 /* If no parameters remain or the parameter's code does not
11441 match the specified code, return false. Otherwise continue
11442 checking any remaining arguments. */
11443 arg
= gimple_call_arg (call
, i
++);
11444 if (!validate_arg (arg
, code
))
11451 /* We need gotos here since we can only have one VA_CLOSE in a
11459 /* Default target-specific builtin expander that does nothing. */
11462 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
11463 rtx target ATTRIBUTE_UNUSED
,
11464 rtx subtarget ATTRIBUTE_UNUSED
,
11465 enum machine_mode mode ATTRIBUTE_UNUSED
,
11466 int ignore ATTRIBUTE_UNUSED
)
11471 /* Returns true is EXP represents data that would potentially reside
11472 in a readonly section. */
11475 readonly_data_expr (tree exp
)
11479 if (TREE_CODE (exp
) != ADDR_EXPR
)
11482 exp
= get_base_address (TREE_OPERAND (exp
, 0));
11486 /* Make sure we call decl_readonly_section only for trees it
11487 can handle (since it returns true for everything it doesn't
11489 if (TREE_CODE (exp
) == STRING_CST
11490 || TREE_CODE (exp
) == CONSTRUCTOR
11491 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
11492 return decl_readonly_section (exp
, 0);
11497 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11498 to the call, and TYPE is its return type.
11500 Return NULL_TREE if no simplification was possible, otherwise return the
11501 simplified form of the call as a tree.
11503 The simplified form may be a constant or other expression which
11504 computes the same value, but in a more efficient manner (including
11505 calls to other builtin functions).
11507 The call may contain arguments which need to be evaluated, but
11508 which are not useful to determine the result of the call. In
11509 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11510 COMPOUND_EXPR will be an argument which must be evaluated.
11511 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11512 COMPOUND_EXPR in the chain will contain the tree for the simplified
11513 form of the builtin function call. */
11516 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
11518 if (!validate_arg (s1
, POINTER_TYPE
)
11519 || !validate_arg (s2
, POINTER_TYPE
))
11524 const char *p1
, *p2
;
11526 p2
= c_getstr (s2
);
11530 p1
= c_getstr (s1
);
11533 const char *r
= strstr (p1
, p2
);
11537 return build_int_cst (TREE_TYPE (s1
), 0);
11539 /* Return an offset into the constant string argument. */
11540 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11541 return fold_convert_loc (loc
, type
, tem
);
11544 /* The argument is const char *, and the result is char *, so we need
11545 a type conversion here to avoid a warning. */
11547 return fold_convert_loc (loc
, type
, s1
);
11552 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11556 /* New argument list transforming strstr(s1, s2) to
11557 strchr(s1, s2[0]). */
11558 return build_call_expr_loc (loc
, fn
, 2, s1
,
11559 build_int_cst (integer_type_node
, p2
[0]));
11563 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11564 the call, and TYPE is its return type.
11566 Return NULL_TREE if no simplification was possible, otherwise return the
11567 simplified form of the call as a tree.
11569 The simplified form may be a constant or other expression which
11570 computes the same value, but in a more efficient manner (including
11571 calls to other builtin functions).
11573 The call may contain arguments which need to be evaluated, but
11574 which are not useful to determine the result of the call. In
11575 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11576 COMPOUND_EXPR will be an argument which must be evaluated.
11577 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11578 COMPOUND_EXPR in the chain will contain the tree for the simplified
11579 form of the builtin function call. */
11582 fold_builtin_strchr (location_t loc
, tree s1
, tree s2
, tree type
)
11584 if (!validate_arg (s1
, POINTER_TYPE
)
11585 || !validate_arg (s2
, INTEGER_TYPE
))
11591 if (TREE_CODE (s2
) != INTEGER_CST
)
11594 p1
= c_getstr (s1
);
11601 if (target_char_cast (s2
, &c
))
11604 r
= strchr (p1
, c
);
11607 return build_int_cst (TREE_TYPE (s1
), 0);
11609 /* Return an offset into the constant string argument. */
11610 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11611 return fold_convert_loc (loc
, type
, tem
);
11617 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11618 the call, and TYPE is its return type.
11620 Return NULL_TREE if no simplification was possible, otherwise return the
11621 simplified form of the call as a tree.
11623 The simplified form may be a constant or other expression which
11624 computes the same value, but in a more efficient manner (including
11625 calls to other builtin functions).
11627 The call may contain arguments which need to be evaluated, but
11628 which are not useful to determine the result of the call. In
11629 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11630 COMPOUND_EXPR will be an argument which must be evaluated.
11631 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11632 COMPOUND_EXPR in the chain will contain the tree for the simplified
11633 form of the builtin function call. */
11636 fold_builtin_strrchr (location_t loc
, tree s1
, tree s2
, tree type
)
11638 if (!validate_arg (s1
, POINTER_TYPE
)
11639 || !validate_arg (s2
, INTEGER_TYPE
))
11646 if (TREE_CODE (s2
) != INTEGER_CST
)
11649 p1
= c_getstr (s1
);
11656 if (target_char_cast (s2
, &c
))
11659 r
= strrchr (p1
, c
);
11662 return build_int_cst (TREE_TYPE (s1
), 0);
11664 /* Return an offset into the constant string argument. */
11665 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11666 return fold_convert_loc (loc
, type
, tem
);
11669 if (! integer_zerop (s2
))
11672 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11676 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11677 return build_call_expr_loc (loc
, fn
, 2, s1
, s2
);
11681 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11682 to the call, and TYPE is its return type.
11684 Return NULL_TREE if no simplification was possible, otherwise return the
11685 simplified form of the call as a tree.
11687 The simplified form may be a constant or other expression which
11688 computes the same value, but in a more efficient manner (including
11689 calls to other builtin functions).
11691 The call may contain arguments which need to be evaluated, but
11692 which are not useful to determine the result of the call. In
11693 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11694 COMPOUND_EXPR will be an argument which must be evaluated.
11695 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11696 COMPOUND_EXPR in the chain will contain the tree for the simplified
11697 form of the builtin function call. */
11700 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
11702 if (!validate_arg (s1
, POINTER_TYPE
)
11703 || !validate_arg (s2
, POINTER_TYPE
))
11708 const char *p1
, *p2
;
11710 p2
= c_getstr (s2
);
11714 p1
= c_getstr (s1
);
11717 const char *r
= strpbrk (p1
, p2
);
11721 return build_int_cst (TREE_TYPE (s1
), 0);
11723 /* Return an offset into the constant string argument. */
11724 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11725 return fold_convert_loc (loc
, type
, tem
);
11729 /* strpbrk(x, "") == NULL.
11730 Evaluate and ignore s1 in case it had side-effects. */
11731 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
11734 return NULL_TREE
; /* Really call strpbrk. */
11736 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11740 /* New argument list transforming strpbrk(s1, s2) to
11741 strchr(s1, s2[0]). */
11742 return build_call_expr_loc (loc
, fn
, 2, s1
,
11743 build_int_cst (integer_type_node
, p2
[0]));
11747 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11750 Return NULL_TREE if no simplification was possible, otherwise return the
11751 simplified form of the call as a tree.
11753 The simplified form may be a constant or other expression which
11754 computes the same value, but in a more efficient manner (including
11755 calls to other builtin functions).
11757 The call may contain arguments which need to be evaluated, but
11758 which are not useful to determine the result of the call. In
11759 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11760 COMPOUND_EXPR will be an argument which must be evaluated.
11761 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11762 COMPOUND_EXPR in the chain will contain the tree for the simplified
11763 form of the builtin function call. */
11766 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED
, tree dst
, tree src
,
11769 if (!validate_arg (dst
, POINTER_TYPE
)
11770 || !validate_arg (src
, POINTER_TYPE
))
11774 const char *p
= c_getstr (src
);
11776 /* If the string length is zero, return the dst parameter. */
11777 if (p
&& *p
== '\0')
11780 if (optimize_insn_for_speed_p ())
11782 /* See if we can store by pieces into (dst + strlen(dst)). */
11784 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
11785 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
11787 if (!strlen_fn
|| !memcpy_fn
)
11790 /* If the length of the source string isn't computable don't
11791 split strcat into strlen and memcpy. */
11793 len
= c_strlen (src
, 1);
11794 if (! len
|| TREE_SIDE_EFFECTS (len
))
11797 /* Stabilize the argument list. */
11798 dst
= builtin_save_expr (dst
);
11800 /* Create strlen (dst). */
11801 newdst
= build_call_expr_loc (loc
, strlen_fn
, 1, dst
);
11802 /* Create (dst p+ strlen (dst)). */
11804 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
11805 newdst
= builtin_save_expr (newdst
);
11807 len
= fold_convert_loc (loc
, size_type_node
, len
);
11808 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
11809 build_int_cst (size_type_node
, 1));
11811 call
= build_call_expr_loc (loc
, memcpy_fn
, 3, newdst
, src
, len
);
11812 return build2 (COMPOUND_EXPR
, TREE_TYPE (dst
), call
, dst
);
11818 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11819 arguments to the call.
11821 Return NULL_TREE if no simplification was possible, otherwise return the
11822 simplified form of the call as a tree.
11824 The simplified form may be a constant or other expression which
11825 computes the same value, but in a more efficient manner (including
11826 calls to other builtin functions).
11828 The call may contain arguments which need to be evaluated, but
11829 which are not useful to determine the result of the call. In
11830 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11831 COMPOUND_EXPR will be an argument which must be evaluated.
11832 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11833 COMPOUND_EXPR in the chain will contain the tree for the simplified
11834 form of the builtin function call. */
11837 fold_builtin_strncat (location_t loc
, tree dst
, tree src
, tree len
)
11839 if (!validate_arg (dst
, POINTER_TYPE
)
11840 || !validate_arg (src
, POINTER_TYPE
)
11841 || !validate_arg (len
, INTEGER_TYPE
))
11845 const char *p
= c_getstr (src
);
11847 /* If the requested length is zero, or the src parameter string
11848 length is zero, return the dst parameter. */
11849 if (integer_zerop (len
) || (p
&& *p
== '\0'))
11850 return omit_two_operands_loc (loc
, TREE_TYPE (dst
), dst
, src
, len
);
11852 /* If the requested len is greater than or equal to the string
11853 length, call strcat. */
11854 if (TREE_CODE (len
) == INTEGER_CST
&& p
11855 && compare_tree_int (len
, strlen (p
)) >= 0)
11857 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
11859 /* If the replacement _DECL isn't initialized, don't do the
11864 return build_call_expr_loc (loc
, fn
, 2, dst
, src
);
11870 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11873 Return NULL_TREE if no simplification was possible, otherwise return the
11874 simplified form of the call as a tree.
11876 The simplified form may be a constant or other expression which
11877 computes the same value, but in a more efficient manner (including
11878 calls to other builtin functions).
11880 The call may contain arguments which need to be evaluated, but
11881 which are not useful to determine the result of the call. In
11882 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11883 COMPOUND_EXPR will be an argument which must be evaluated.
11884 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11885 COMPOUND_EXPR in the chain will contain the tree for the simplified
11886 form of the builtin function call. */
11889 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
11891 if (!validate_arg (s1
, POINTER_TYPE
)
11892 || !validate_arg (s2
, POINTER_TYPE
))
11896 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11898 /* If both arguments are constants, evaluate at compile-time. */
11901 const size_t r
= strspn (p1
, p2
);
11902 return build_int_cst (size_type_node
, r
);
11905 /* If either argument is "", return NULL_TREE. */
11906 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
11907 /* Evaluate and ignore both arguments in case either one has
11909 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
11915 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11918 Return NULL_TREE if no simplification was possible, otherwise return the
11919 simplified form of the call as a tree.
11921 The simplified form may be a constant or other expression which
11922 computes the same value, but in a more efficient manner (including
11923 calls to other builtin functions).
11925 The call may contain arguments which need to be evaluated, but
11926 which are not useful to determine the result of the call. In
11927 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11928 COMPOUND_EXPR will be an argument which must be evaluated.
11929 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11930 COMPOUND_EXPR in the chain will contain the tree for the simplified
11931 form of the builtin function call. */
11934 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
11936 if (!validate_arg (s1
, POINTER_TYPE
)
11937 || !validate_arg (s2
, POINTER_TYPE
))
11941 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11943 /* If both arguments are constants, evaluate at compile-time. */
11946 const size_t r
= strcspn (p1
, p2
);
11947 return build_int_cst (size_type_node
, r
);
11950 /* If the first argument is "", return NULL_TREE. */
11951 if (p1
&& *p1
== '\0')
11953 /* Evaluate and ignore argument s2 in case it has
11955 return omit_one_operand_loc (loc
, size_type_node
,
11956 size_zero_node
, s2
);
11959 /* If the second argument is "", return __builtin_strlen(s1). */
11960 if (p2
&& *p2
== '\0')
11962 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
11964 /* If the replacement _DECL isn't initialized, don't do the
11969 return build_call_expr_loc (loc
, fn
, 1, s1
);
11975 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11976 to the call. IGNORE is true if the value returned
11977 by the builtin will be ignored. UNLOCKED is true is true if this
11978 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11979 the known length of the string. Return NULL_TREE if no simplification
11983 fold_builtin_fputs (location_t loc
, tree arg0
, tree arg1
,
11984 bool ignore
, bool unlocked
, tree len
)
11986 /* If we're using an unlocked function, assume the other unlocked
11987 functions exist explicitly. */
11988 tree
const fn_fputc
= (unlocked
11989 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
11990 : builtin_decl_implicit (BUILT_IN_FPUTC
));
11991 tree
const fn_fwrite
= (unlocked
11992 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
11993 : builtin_decl_implicit (BUILT_IN_FWRITE
));
11995 /* If the return value is used, don't do the transformation. */
11999 /* Verify the arguments in the original call. */
12000 if (!validate_arg (arg0
, POINTER_TYPE
)
12001 || !validate_arg (arg1
, POINTER_TYPE
))
12005 len
= c_strlen (arg0
, 0);
12007 /* Get the length of the string passed to fputs. If the length
12008 can't be determined, punt. */
12010 || TREE_CODE (len
) != INTEGER_CST
)
12013 switch (compare_tree_int (len
, 1))
12015 case -1: /* length is 0, delete the call entirely . */
12016 return omit_one_operand_loc (loc
, integer_type_node
,
12017 integer_zero_node
, arg1
);;
12019 case 0: /* length is 1, call fputc. */
12021 const char *p
= c_getstr (arg0
);
12026 return build_call_expr_loc (loc
, fn_fputc
, 2,
12028 (integer_type_node
, p
[0]), arg1
);
12034 case 1: /* length is greater than 1, call fwrite. */
12036 /* If optimizing for size keep fputs. */
12037 if (optimize_function_for_size_p (cfun
))
12039 /* New argument list transforming fputs(string, stream) to
12040 fwrite(string, 1, len, stream). */
12042 return build_call_expr_loc (loc
, fn_fwrite
, 4, arg0
,
12043 size_one_node
, len
, arg1
);
12048 gcc_unreachable ();
12053 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12054 produced. False otherwise. This is done so that we don't output the error
12055 or warning twice or three times. */
12058 fold_builtin_next_arg (tree exp
, bool va_start_p
)
12060 tree fntype
= TREE_TYPE (current_function_decl
);
12061 int nargs
= call_expr_nargs (exp
);
12063 /* There is good chance the current input_location points inside the
12064 definition of the va_start macro (perhaps on the token for
12065 builtin) in a system header, so warnings will not be emitted.
12066 Use the location in real source code. */
12067 source_location current_location
=
12068 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
12071 if (!stdarg_p (fntype
))
12073 error ("%<va_start%> used in function with fixed args");
12079 if (va_start_p
&& (nargs
!= 2))
12081 error ("wrong number of arguments to function %<va_start%>");
12084 arg
= CALL_EXPR_ARG (exp
, 1);
12086 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12087 when we checked the arguments and if needed issued a warning. */
12092 /* Evidently an out of date version of <stdarg.h>; can't validate
12093 va_start's second argument, but can still work as intended. */
12094 warning_at (current_location
,
12096 "%<__builtin_next_arg%> called without an argument");
12099 else if (nargs
> 1)
12101 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12104 arg
= CALL_EXPR_ARG (exp
, 0);
12107 if (TREE_CODE (arg
) == SSA_NAME
)
12108 arg
= SSA_NAME_VAR (arg
);
12110 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12111 or __builtin_next_arg (0) the first time we see it, after checking
12112 the arguments and if needed issuing a warning. */
12113 if (!integer_zerop (arg
))
12115 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
12117 /* Strip off all nops for the sake of the comparison. This
12118 is not quite the same as STRIP_NOPS. It does more.
12119 We must also strip off INDIRECT_EXPR for C++ reference
12121 while (CONVERT_EXPR_P (arg
)
12122 || TREE_CODE (arg
) == INDIRECT_REF
)
12123 arg
= TREE_OPERAND (arg
, 0);
12124 if (arg
!= last_parm
)
12126 /* FIXME: Sometimes with the tree optimizers we can get the
12127 not the last argument even though the user used the last
12128 argument. We just warn and set the arg to be the last
12129 argument so that we will get wrong-code because of
12131 warning_at (current_location
,
12133 "second parameter of %<va_start%> not last named argument");
12136 /* Undefined by C99 7.15.1.4p4 (va_start):
12137 "If the parameter parmN is declared with the register storage
12138 class, with a function or array type, or with a type that is
12139 not compatible with the type that results after application of
12140 the default argument promotions, the behavior is undefined."
12142 else if (DECL_REGISTER (arg
))
12144 warning_at (current_location
,
12146 "undefined behaviour when second parameter of "
12147 "%<va_start%> is declared with %<register%> storage");
12150 /* We want to verify the second parameter just once before the tree
12151 optimizers are run and then avoid keeping it in the tree,
12152 as otherwise we could warn even for correct code like:
12153 void foo (int i, ...)
12154 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12156 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
12158 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
12164 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12165 ORIG may be null if this is a 2-argument call. We don't attempt to
12166 simplify calls with more than 3 arguments.
12168 Return NULL_TREE if no simplification was possible, otherwise return the
12169 simplified form of the call as a tree. If IGNORED is true, it means that
12170 the caller does not use the returned value of the function. */
12173 fold_builtin_sprintf (location_t loc
, tree dest
, tree fmt
,
12174 tree orig
, int ignored
)
12177 const char *fmt_str
= NULL
;
12179 /* Verify the required arguments in the original call. We deal with two
12180 types of sprintf() calls: 'sprintf (str, fmt)' and
12181 'sprintf (dest, "%s", orig)'. */
12182 if (!validate_arg (dest
, POINTER_TYPE
)
12183 || !validate_arg (fmt
, POINTER_TYPE
))
12185 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
12188 /* Check whether the format is a literal string constant. */
12189 fmt_str
= c_getstr (fmt
);
12190 if (fmt_str
== NULL
)
12194 retval
= NULL_TREE
;
12196 if (!init_target_chars ())
12199 /* If the format doesn't contain % args or %%, use strcpy. */
12200 if (strchr (fmt_str
, target_percent
) == NULL
)
12202 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12207 /* Don't optimize sprintf (buf, "abc", ptr++). */
12211 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12212 'format' is known to contain no % formats. */
12213 call
= build_call_expr_loc (loc
, fn
, 2, dest
, fmt
);
12215 retval
= build_int_cst (integer_type_node
, strlen (fmt_str
));
12218 /* If the format is "%s", use strcpy if the result isn't used. */
12219 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
12222 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12227 /* Don't crash on sprintf (str1, "%s"). */
12231 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12234 retval
= c_strlen (orig
, 1);
12235 if (!retval
|| TREE_CODE (retval
) != INTEGER_CST
)
12238 call
= build_call_expr_loc (loc
, fn
, 2, dest
, orig
);
12241 if (call
&& retval
)
12243 retval
= fold_convert_loc
12244 (loc
, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF
))),
12246 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
12252 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12253 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12254 attempt to simplify calls with more than 4 arguments.
12256 Return NULL_TREE if no simplification was possible, otherwise return the
12257 simplified form of the call as a tree. If IGNORED is true, it means that
12258 the caller does not use the returned value of the function. */
12261 fold_builtin_snprintf (location_t loc
, tree dest
, tree destsize
, tree fmt
,
12262 tree orig
, int ignored
)
12265 const char *fmt_str
= NULL
;
12266 unsigned HOST_WIDE_INT destlen
;
12268 /* Verify the required arguments in the original call. We deal with two
12269 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12270 'snprintf (dest, cst, "%s", orig)'. */
12271 if (!validate_arg (dest
, POINTER_TYPE
)
12272 || !validate_arg (destsize
, INTEGER_TYPE
)
12273 || !validate_arg (fmt
, POINTER_TYPE
))
12275 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
12278 if (!tree_fits_uhwi_p (destsize
))
12281 /* Check whether the format is a literal string constant. */
12282 fmt_str
= c_getstr (fmt
);
12283 if (fmt_str
== NULL
)
12287 retval
= NULL_TREE
;
12289 if (!init_target_chars ())
12292 destlen
= tree_to_uhwi (destsize
);
12294 /* If the format doesn't contain % args or %%, use strcpy. */
12295 if (strchr (fmt_str
, target_percent
) == NULL
)
12297 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12298 size_t len
= strlen (fmt_str
);
12300 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12304 /* We could expand this as
12305 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12307 memcpy (str, fmt_with_nul_at_cstm1, cst);
12308 but in the former case that might increase code size
12309 and in the latter case grow .rodata section too much.
12310 So punt for now. */
12311 if (len
>= destlen
)
12317 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12318 'format' is known to contain no % formats and
12319 strlen (fmt) < cst. */
12320 call
= build_call_expr_loc (loc
, fn
, 2, dest
, fmt
);
12323 retval
= build_int_cst (integer_type_node
, strlen (fmt_str
));
12326 /* If the format is "%s", use strcpy if the result isn't used. */
12327 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
12329 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12330 unsigned HOST_WIDE_INT origlen
;
12332 /* Don't crash on snprintf (str1, cst, "%s"). */
12336 retval
= c_strlen (orig
, 1);
12337 if (!retval
|| !tree_fits_uhwi_p (retval
))
12340 origlen
= tree_to_uhwi (retval
);
12341 /* We could expand this as
12342 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12344 memcpy (str1, str2_with_nul_at_cstm1, cst);
12345 but in the former case that might increase code size
12346 and in the latter case grow .rodata section too much.
12347 So punt for now. */
12348 if (origlen
>= destlen
)
12351 /* Convert snprintf (str1, cst, "%s", str2) into
12352 strcpy (str1, str2) if strlen (str2) < cst. */
12356 call
= build_call_expr_loc (loc
, fn
, 2, dest
, orig
);
12359 retval
= NULL_TREE
;
12362 if (call
&& retval
)
12364 tree fn
= builtin_decl_explicit (BUILT_IN_SNPRINTF
);
12365 retval
= fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fn
)), retval
);
12366 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
12372 /* Expand a call EXP to __builtin_object_size. */
12375 expand_builtin_object_size (tree exp
)
12378 int object_size_type
;
12379 tree fndecl
= get_callee_fndecl (exp
);
12381 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
12383 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12385 expand_builtin_trap ();
12389 ost
= CALL_EXPR_ARG (exp
, 1);
12392 if (TREE_CODE (ost
) != INTEGER_CST
12393 || tree_int_cst_sgn (ost
) < 0
12394 || compare_tree_int (ost
, 3) > 0)
12396 error ("%Klast argument of %D is not integer constant between 0 and 3",
12398 expand_builtin_trap ();
12402 object_size_type
= tree_to_shwi (ost
);
12404 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
12407 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12408 FCODE is the BUILT_IN_* to use.
12409 Return NULL_RTX if we failed; the caller should emit a normal call,
12410 otherwise try to get the result in TARGET, if convenient (and in
12411 mode MODE if that's convenient). */
12414 expand_builtin_memory_chk (tree exp
, rtx target
, enum machine_mode mode
,
12415 enum built_in_function fcode
)
12417 tree dest
, src
, len
, size
;
12419 if (!validate_arglist (exp
,
12421 fcode
== BUILT_IN_MEMSET_CHK
12422 ? INTEGER_TYPE
: POINTER_TYPE
,
12423 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
12426 dest
= CALL_EXPR_ARG (exp
, 0);
12427 src
= CALL_EXPR_ARG (exp
, 1);
12428 len
= CALL_EXPR_ARG (exp
, 2);
12429 size
= CALL_EXPR_ARG (exp
, 3);
12431 if (! tree_fits_uhwi_p (size
))
12434 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
12438 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
12440 warning_at (tree_nonartificial_location (exp
),
12441 0, "%Kcall to %D will always overflow destination buffer",
12442 exp
, get_callee_fndecl (exp
));
12447 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12448 mem{cpy,pcpy,move,set} is available. */
12451 case BUILT_IN_MEMCPY_CHK
:
12452 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
12454 case BUILT_IN_MEMPCPY_CHK
:
12455 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
12457 case BUILT_IN_MEMMOVE_CHK
:
12458 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
12460 case BUILT_IN_MEMSET_CHK
:
12461 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
12470 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
12471 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
12472 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
12473 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
12475 else if (fcode
== BUILT_IN_MEMSET_CHK
)
12479 unsigned int dest_align
= get_pointer_alignment (dest
);
12481 /* If DEST is not a pointer type, call the normal function. */
12482 if (dest_align
== 0)
12485 /* If SRC and DEST are the same (and not volatile), do nothing. */
12486 if (operand_equal_p (src
, dest
, 0))
12490 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
12492 /* Evaluate and ignore LEN in case it has side-effects. */
12493 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
12494 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
12497 expr
= fold_build_pointer_plus (dest
, len
);
12498 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
12501 /* __memmove_chk special case. */
12502 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
12504 unsigned int src_align
= get_pointer_alignment (src
);
12506 if (src_align
== 0)
12509 /* If src is categorized for a readonly section we can use
12510 normal __memcpy_chk. */
12511 if (readonly_data_expr (src
))
12513 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
12516 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
12517 dest
, src
, len
, size
);
12518 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
12519 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
12520 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
12527 /* Emit warning if a buffer overflow is detected at compile time. */
12530 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
12534 location_t loc
= tree_nonartificial_location (exp
);
12538 case BUILT_IN_STRCPY_CHK
:
12539 case BUILT_IN_STPCPY_CHK
:
12540 /* For __strcat_chk the warning will be emitted only if overflowing
12541 by at least strlen (dest) + 1 bytes. */
12542 case BUILT_IN_STRCAT_CHK
:
12543 len
= CALL_EXPR_ARG (exp
, 1);
12544 size
= CALL_EXPR_ARG (exp
, 2);
12547 case BUILT_IN_STRNCAT_CHK
:
12548 case BUILT_IN_STRNCPY_CHK
:
12549 case BUILT_IN_STPNCPY_CHK
:
12550 len
= CALL_EXPR_ARG (exp
, 2);
12551 size
= CALL_EXPR_ARG (exp
, 3);
12553 case BUILT_IN_SNPRINTF_CHK
:
12554 case BUILT_IN_VSNPRINTF_CHK
:
12555 len
= CALL_EXPR_ARG (exp
, 1);
12556 size
= CALL_EXPR_ARG (exp
, 3);
12559 gcc_unreachable ();
12565 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
12570 len
= c_strlen (len
, 1);
12571 if (! len
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
12574 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
12576 tree src
= CALL_EXPR_ARG (exp
, 1);
12577 if (! src
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
12579 src
= c_strlen (src
, 1);
12580 if (! src
|| ! tree_fits_uhwi_p (src
))
12582 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
12583 exp
, get_callee_fndecl (exp
));
12586 else if (tree_int_cst_lt (src
, size
))
12589 else if (! tree_fits_uhwi_p (len
) || ! tree_int_cst_lt (size
, len
))
12592 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
12593 exp
, get_callee_fndecl (exp
));
12596 /* Emit warning if a buffer overflow is detected at compile time
12597 in __sprintf_chk/__vsprintf_chk calls. */
12600 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
12602 tree size
, len
, fmt
;
12603 const char *fmt_str
;
12604 int nargs
= call_expr_nargs (exp
);
12606 /* Verify the required arguments in the original call. */
12610 size
= CALL_EXPR_ARG (exp
, 2);
12611 fmt
= CALL_EXPR_ARG (exp
, 3);
12613 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
12616 /* Check whether the format is a literal string constant. */
12617 fmt_str
= c_getstr (fmt
);
12618 if (fmt_str
== NULL
)
12621 if (!init_target_chars ())
12624 /* If the format doesn't contain % args or %%, we know its size. */
12625 if (strchr (fmt_str
, target_percent
) == 0)
12626 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
12627 /* If the format is "%s" and first ... argument is a string literal,
12629 else if (fcode
== BUILT_IN_SPRINTF_CHK
12630 && strcmp (fmt_str
, target_percent_s
) == 0)
12636 arg
= CALL_EXPR_ARG (exp
, 4);
12637 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
12640 len
= c_strlen (arg
, 1);
12641 if (!len
|| ! tree_fits_uhwi_p (len
))
12647 if (! tree_int_cst_lt (len
, size
))
12648 warning_at (tree_nonartificial_location (exp
),
12649 0, "%Kcall to %D will always overflow destination buffer",
12650 exp
, get_callee_fndecl (exp
));
12653 /* Emit warning if a free is called with address of a variable. */
12656 maybe_emit_free_warning (tree exp
)
12658 tree arg
= CALL_EXPR_ARG (exp
, 0);
12661 if (TREE_CODE (arg
) != ADDR_EXPR
)
12664 arg
= get_base_address (TREE_OPERAND (arg
, 0));
12665 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
12668 if (SSA_VAR_P (arg
))
12669 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
12670 "%Kattempt to free a non-heap object %qD", exp
, arg
);
12672 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
12673 "%Kattempt to free a non-heap object", exp
);
12676 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12680 fold_builtin_object_size (tree ptr
, tree ost
)
12682 unsigned HOST_WIDE_INT bytes
;
12683 int object_size_type
;
12685 if (!validate_arg (ptr
, POINTER_TYPE
)
12686 || !validate_arg (ost
, INTEGER_TYPE
))
12691 if (TREE_CODE (ost
) != INTEGER_CST
12692 || tree_int_cst_sgn (ost
) < 0
12693 || compare_tree_int (ost
, 3) > 0)
12696 object_size_type
= tree_to_shwi (ost
);
12698 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12699 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12700 and (size_t) 0 for types 2 and 3. */
12701 if (TREE_SIDE_EFFECTS (ptr
))
12702 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
12704 if (TREE_CODE (ptr
) == ADDR_EXPR
)
12706 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
12707 if (double_int_fits_to_tree_p (size_type_node
,
12708 double_int::from_uhwi (bytes
)))
12709 return build_int_cstu (size_type_node
, bytes
);
12711 else if (TREE_CODE (ptr
) == SSA_NAME
)
12713 /* If object size is not known yet, delay folding until
12714 later. Maybe subsequent passes will help determining
12716 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
12717 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2 ? -1 : 0)
12718 && double_int_fits_to_tree_p (size_type_node
,
12719 double_int::from_uhwi (bytes
)))
12720 return build_int_cstu (size_type_node
, bytes
);
12726 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12727 DEST, SRC, LEN, and SIZE are the arguments to the call.
12728 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12729 code of the builtin. If MAXLEN is not NULL, it is maximum length
12730 passed as third argument. */
12733 fold_builtin_memory_chk (location_t loc
, tree fndecl
,
12734 tree dest
, tree src
, tree len
, tree size
,
12735 tree maxlen
, bool ignore
,
12736 enum built_in_function fcode
)
12740 if (!validate_arg (dest
, POINTER_TYPE
)
12741 || !validate_arg (src
,
12742 (fcode
== BUILT_IN_MEMSET_CHK
12743 ? INTEGER_TYPE
: POINTER_TYPE
))
12744 || !validate_arg (len
, INTEGER_TYPE
)
12745 || !validate_arg (size
, INTEGER_TYPE
))
12748 /* If SRC and DEST are the same (and not volatile), return DEST
12749 (resp. DEST+LEN for __mempcpy_chk). */
12750 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
12752 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
12753 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
12757 tree temp
= fold_build_pointer_plus_loc (loc
, dest
, len
);
12758 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), temp
);
12762 if (! tree_fits_uhwi_p (size
))
12765 if (! integer_all_onesp (size
))
12767 if (! tree_fits_uhwi_p (len
))
12769 /* If LEN is not constant, try MAXLEN too.
12770 For MAXLEN only allow optimizing into non-_ocs function
12771 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12772 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
12774 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
12776 /* (void) __mempcpy_chk () can be optimized into
12777 (void) __memcpy_chk (). */
12778 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
12782 return build_call_expr_loc (loc
, fn
, 4, dest
, src
, len
, size
);
12790 if (tree_int_cst_lt (size
, maxlen
))
12795 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12796 mem{cpy,pcpy,move,set} is available. */
12799 case BUILT_IN_MEMCPY_CHK
:
12800 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
12802 case BUILT_IN_MEMPCPY_CHK
:
12803 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
12805 case BUILT_IN_MEMMOVE_CHK
:
12806 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
12808 case BUILT_IN_MEMSET_CHK
:
12809 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
12818 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
12821 /* Fold a call to the __st[rp]cpy_chk builtin.
12822 DEST, SRC, and SIZE are the arguments to the call.
12823 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12824 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12825 strings passed as second argument. */
12828 fold_builtin_stxcpy_chk (location_t loc
, tree fndecl
, tree dest
,
12829 tree src
, tree size
,
12830 tree maxlen
, bool ignore
,
12831 enum built_in_function fcode
)
12835 if (!validate_arg (dest
, POINTER_TYPE
)
12836 || !validate_arg (src
, POINTER_TYPE
)
12837 || !validate_arg (size
, INTEGER_TYPE
))
12840 /* If SRC and DEST are the same (and not volatile), return DEST. */
12841 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
12842 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
12844 if (! tree_fits_uhwi_p (size
))
12847 if (! integer_all_onesp (size
))
12849 len
= c_strlen (src
, 1);
12850 if (! len
|| ! tree_fits_uhwi_p (len
))
12852 /* If LEN is not constant, try MAXLEN too.
12853 For MAXLEN only allow optimizing into non-_ocs function
12854 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12855 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
12857 if (fcode
== BUILT_IN_STPCPY_CHK
)
12862 /* If return value of __stpcpy_chk is ignored,
12863 optimize into __strcpy_chk. */
12864 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
12868 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, size
);
12871 if (! len
|| TREE_SIDE_EFFECTS (len
))
12874 /* If c_strlen returned something, but not a constant,
12875 transform __strcpy_chk into __memcpy_chk. */
12876 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
12880 len
= fold_convert_loc (loc
, size_type_node
, len
);
12881 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
12882 build_int_cst (size_type_node
, 1));
12883 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
12884 build_call_expr_loc (loc
, fn
, 4,
12885 dest
, src
, len
, size
));
12891 if (! tree_int_cst_lt (maxlen
, size
))
12895 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12896 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
12897 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
12901 return build_call_expr_loc (loc
, fn
, 2, dest
, src
);
12904 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12905 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12906 length passed as third argument. IGNORE is true if return value can be
12907 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12910 fold_builtin_stxncpy_chk (location_t loc
, tree dest
, tree src
,
12911 tree len
, tree size
, tree maxlen
, bool ignore
,
12912 enum built_in_function fcode
)
12916 if (!validate_arg (dest
, POINTER_TYPE
)
12917 || !validate_arg (src
, POINTER_TYPE
)
12918 || !validate_arg (len
, INTEGER_TYPE
)
12919 || !validate_arg (size
, INTEGER_TYPE
))
12922 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
12924 /* If return value of __stpncpy_chk is ignored,
12925 optimize into __strncpy_chk. */
12926 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
12928 return build_call_expr_loc (loc
, fn
, 4, dest
, src
, len
, size
);
12931 if (! tree_fits_uhwi_p (size
))
12934 if (! integer_all_onesp (size
))
12936 if (! tree_fits_uhwi_p (len
))
12938 /* If LEN is not constant, try MAXLEN too.
12939 For MAXLEN only allow optimizing into non-_ocs function
12940 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12941 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
12947 if (tree_int_cst_lt (size
, maxlen
))
12951 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12952 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
12953 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
12957 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
12960 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12961 are the arguments to the call. */
12964 fold_builtin_strcat_chk (location_t loc
, tree fndecl
, tree dest
,
12965 tree src
, tree size
)
12970 if (!validate_arg (dest
, POINTER_TYPE
)
12971 || !validate_arg (src
, POINTER_TYPE
)
12972 || !validate_arg (size
, INTEGER_TYPE
))
12975 p
= c_getstr (src
);
12976 /* If the SRC parameter is "", return DEST. */
12977 if (p
&& *p
== '\0')
12978 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
12980 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
12983 /* If __builtin_strcat_chk is used, assume strcat is available. */
12984 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
12988 return build_call_expr_loc (loc
, fn
, 2, dest
, src
);
12991 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12995 fold_builtin_strncat_chk (location_t loc
, tree fndecl
,
12996 tree dest
, tree src
, tree len
, tree size
)
13001 if (!validate_arg (dest
, POINTER_TYPE
)
13002 || !validate_arg (src
, POINTER_TYPE
)
13003 || !validate_arg (size
, INTEGER_TYPE
)
13004 || !validate_arg (size
, INTEGER_TYPE
))
13007 p
= c_getstr (src
);
13008 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
13009 if (p
&& *p
== '\0')
13010 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
13011 else if (integer_zerop (len
))
13012 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
13014 if (! tree_fits_uhwi_p (size
))
13017 if (! integer_all_onesp (size
))
13019 tree src_len
= c_strlen (src
, 1);
13021 && tree_fits_uhwi_p (src_len
)
13022 && tree_fits_uhwi_p (len
)
13023 && ! tree_int_cst_lt (len
, src_len
))
13025 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13026 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
13030 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, size
);
13035 /* If __builtin_strncat_chk is used, assume strncat is available. */
13036 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
13040 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
13043 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13044 Return NULL_TREE if a normal call should be emitted rather than
13045 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13046 or BUILT_IN_VSPRINTF_CHK. */
13049 fold_builtin_sprintf_chk_1 (location_t loc
, int nargs
, tree
*args
,
13050 enum built_in_function fcode
)
13052 tree dest
, size
, len
, fn
, fmt
, flag
;
13053 const char *fmt_str
;
13055 /* Verify the required arguments in the original call. */
13059 if (!validate_arg (dest
, POINTER_TYPE
))
13062 if (!validate_arg (flag
, INTEGER_TYPE
))
13065 if (!validate_arg (size
, INTEGER_TYPE
))
13068 if (!validate_arg (fmt
, POINTER_TYPE
))
13071 if (! tree_fits_uhwi_p (size
))
13076 if (!init_target_chars ())
13079 /* Check whether the format is a literal string constant. */
13080 fmt_str
= c_getstr (fmt
);
13081 if (fmt_str
!= NULL
)
13083 /* If the format doesn't contain % args or %%, we know the size. */
13084 if (strchr (fmt_str
, target_percent
) == 0)
13086 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
13087 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
13089 /* If the format is "%s" and first ... argument is a string literal,
13090 we know the size too. */
13091 else if (fcode
== BUILT_IN_SPRINTF_CHK
13092 && strcmp (fmt_str
, target_percent_s
) == 0)
13099 if (validate_arg (arg
, POINTER_TYPE
))
13101 len
= c_strlen (arg
, 1);
13102 if (! len
|| ! tree_fits_uhwi_p (len
))
13109 if (! integer_all_onesp (size
))
13111 if (! len
|| ! tree_int_cst_lt (len
, size
))
13115 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13116 or if format doesn't contain % chars or is "%s". */
13117 if (! integer_zerop (flag
))
13119 if (fmt_str
== NULL
)
13121 if (strchr (fmt_str
, target_percent
) != NULL
13122 && strcmp (fmt_str
, target_percent_s
))
13126 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13127 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
13128 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
13132 return rewrite_call_expr_array (loc
, nargs
, args
, 4, fn
, 2, dest
, fmt
);
13135 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13136 a normal call should be emitted rather than expanding the function
13137 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13140 fold_builtin_sprintf_chk (location_t loc
, tree exp
,
13141 enum built_in_function fcode
)
13143 return fold_builtin_sprintf_chk_1 (loc
, call_expr_nargs (exp
),
13144 CALL_EXPR_ARGP (exp
), fcode
);
13147 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13148 NULL_TREE if a normal call should be emitted rather than expanding
13149 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13150 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13151 passed as second argument. */
13154 fold_builtin_snprintf_chk_1 (location_t loc
, int nargs
, tree
*args
,
13155 tree maxlen
, enum built_in_function fcode
)
13157 tree dest
, size
, len
, fn
, fmt
, flag
;
13158 const char *fmt_str
;
13160 /* Verify the required arguments in the original call. */
13164 if (!validate_arg (dest
, POINTER_TYPE
))
13167 if (!validate_arg (len
, INTEGER_TYPE
))
13170 if (!validate_arg (flag
, INTEGER_TYPE
))
13173 if (!validate_arg (size
, INTEGER_TYPE
))
13176 if (!validate_arg (fmt
, POINTER_TYPE
))
13179 if (! tree_fits_uhwi_p (size
))
13182 if (! integer_all_onesp (size
))
13184 if (! tree_fits_uhwi_p (len
))
13186 /* If LEN is not constant, try MAXLEN too.
13187 For MAXLEN only allow optimizing into non-_ocs function
13188 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13189 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
13195 if (tree_int_cst_lt (size
, maxlen
))
13199 if (!init_target_chars ())
13202 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13203 or if format doesn't contain % chars or is "%s". */
13204 if (! integer_zerop (flag
))
13206 fmt_str
= c_getstr (fmt
);
13207 if (fmt_str
== NULL
)
13209 if (strchr (fmt_str
, target_percent
) != NULL
13210 && strcmp (fmt_str
, target_percent_s
))
13214 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13216 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
13217 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
13221 return rewrite_call_expr_array (loc
, nargs
, args
, 5, fn
, 3, dest
, len
, fmt
);
13224 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13225 a normal call should be emitted rather than expanding the function
13226 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13227 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13228 passed as second argument. */
13231 fold_builtin_snprintf_chk (location_t loc
, tree exp
, tree maxlen
,
13232 enum built_in_function fcode
)
13234 return fold_builtin_snprintf_chk_1 (loc
, call_expr_nargs (exp
),
13235 CALL_EXPR_ARGP (exp
), maxlen
, fcode
);
13238 /* Builtins with folding operations that operate on "..." arguments
13239 need special handling; we need to store the arguments in a convenient
13240 data structure before attempting any folding. Fortunately there are
13241 only a few builtins that fall into this category. FNDECL is the
13242 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13243 result of the function call is ignored. */
13246 fold_builtin_varargs (location_t loc
, tree fndecl
, tree exp
,
13247 bool ignore ATTRIBUTE_UNUSED
)
13249 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
13250 tree ret
= NULL_TREE
;
13254 case BUILT_IN_SPRINTF_CHK
:
13255 case BUILT_IN_VSPRINTF_CHK
:
13256 ret
= fold_builtin_sprintf_chk (loc
, exp
, fcode
);
13259 case BUILT_IN_SNPRINTF_CHK
:
13260 case BUILT_IN_VSNPRINTF_CHK
:
13261 ret
= fold_builtin_snprintf_chk (loc
, exp
, NULL_TREE
, fcode
);
13264 case BUILT_IN_FPCLASSIFY
:
13265 ret
= fold_builtin_fpclassify (loc
, exp
);
13273 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
13274 SET_EXPR_LOCATION (ret
, loc
);
13275 TREE_NO_WARNING (ret
) = 1;
13281 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13282 FMT and ARG are the arguments to the call; we don't fold cases with
13283 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13285 Return NULL_TREE if no simplification was possible, otherwise return the
13286 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13287 code of the function to be simplified. */
13290 fold_builtin_printf (location_t loc
, tree fndecl
, tree fmt
,
13291 tree arg
, bool ignore
,
13292 enum built_in_function fcode
)
13294 tree fn_putchar
, fn_puts
, newarg
, call
= NULL_TREE
;
13295 const char *fmt_str
= NULL
;
13297 /* If the return value is used, don't do the transformation. */
13301 /* Verify the required arguments in the original call. */
13302 if (!validate_arg (fmt
, POINTER_TYPE
))
13305 /* Check whether the format is a literal string constant. */
13306 fmt_str
= c_getstr (fmt
);
13307 if (fmt_str
== NULL
)
13310 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
13312 /* If we're using an unlocked function, assume the other
13313 unlocked functions exist explicitly. */
13314 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
13315 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
13319 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
13320 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
13323 if (!init_target_chars ())
13326 if (strcmp (fmt_str
, target_percent_s
) == 0
13327 || strchr (fmt_str
, target_percent
) == NULL
)
13331 if (strcmp (fmt_str
, target_percent_s
) == 0)
13333 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
13336 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
13339 str
= c_getstr (arg
);
13345 /* The format specifier doesn't contain any '%' characters. */
13346 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
13352 /* If the string was "", printf does nothing. */
13353 if (str
[0] == '\0')
13354 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
13356 /* If the string has length of 1, call putchar. */
13357 if (str
[1] == '\0')
13359 /* Given printf("c"), (where c is any one character,)
13360 convert "c"[0] to an int and pass that to the replacement
13362 newarg
= build_int_cst (integer_type_node
, str
[0]);
13364 call
= build_call_expr_loc (loc
, fn_putchar
, 1, newarg
);
13368 /* If the string was "string\n", call puts("string"). */
13369 size_t len
= strlen (str
);
13370 if ((unsigned char)str
[len
- 1] == target_newline
13371 && (size_t) (int) len
== len
13375 tree offset_node
, string_cst
;
13377 /* Create a NUL-terminated string that's one char shorter
13378 than the original, stripping off the trailing '\n'. */
13379 newarg
= build_string_literal (len
, str
);
13380 string_cst
= string_constant (newarg
, &offset_node
);
13381 gcc_checking_assert (string_cst
13382 && (TREE_STRING_LENGTH (string_cst
)
13384 && integer_zerop (offset_node
)
13386 TREE_STRING_POINTER (string_cst
)[len
- 1]
13387 == target_newline
);
13388 /* build_string_literal creates a new STRING_CST,
13389 modify it in place to avoid double copying. */
13390 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
13391 newstr
[len
- 1] = '\0';
13393 call
= build_call_expr_loc (loc
, fn_puts
, 1, newarg
);
13396 /* We'd like to arrange to call fputs(string,stdout) here,
13397 but we need stdout and don't have a way to get it yet. */
13402 /* The other optimizations can be done only on the non-va_list variants. */
13403 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
13406 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13407 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
13409 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
13412 call
= build_call_expr_loc (loc
, fn_puts
, 1, arg
);
13415 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13416 else if (strcmp (fmt_str
, target_percent_c
) == 0)
13418 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
13421 call
= build_call_expr_loc (loc
, fn_putchar
, 1, arg
);
13427 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), call
);
13430 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13431 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13432 more than 3 arguments, and ARG may be null in the 2-argument case.
13434 Return NULL_TREE if no simplification was possible, otherwise return the
13435 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13436 code of the function to be simplified. */
13439 fold_builtin_fprintf (location_t loc
, tree fndecl
, tree fp
,
13440 tree fmt
, tree arg
, bool ignore
,
13441 enum built_in_function fcode
)
13443 tree fn_fputc
, fn_fputs
, call
= NULL_TREE
;
13444 const char *fmt_str
= NULL
;
13446 /* If the return value is used, don't do the transformation. */
13450 /* Verify the required arguments in the original call. */
13451 if (!validate_arg (fp
, POINTER_TYPE
))
13453 if (!validate_arg (fmt
, POINTER_TYPE
))
13456 /* Check whether the format is a literal string constant. */
13457 fmt_str
= c_getstr (fmt
);
13458 if (fmt_str
== NULL
)
13461 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
13463 /* If we're using an unlocked function, assume the other
13464 unlocked functions exist explicitly. */
13465 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
13466 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
13470 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
13471 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
13474 if (!init_target_chars ())
13477 /* If the format doesn't contain % args or %%, use strcpy. */
13478 if (strchr (fmt_str
, target_percent
) == NULL
)
13480 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
13484 /* If the format specifier was "", fprintf does nothing. */
13485 if (fmt_str
[0] == '\0')
13487 /* If FP has side-effects, just wait until gimplification is
13489 if (TREE_SIDE_EFFECTS (fp
))
13492 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
13495 /* When "string" doesn't contain %, replace all cases of
13496 fprintf (fp, string) with fputs (string, fp). The fputs
13497 builtin will take care of special cases like length == 1. */
13499 call
= build_call_expr_loc (loc
, fn_fputs
, 2, fmt
, fp
);
13502 /* The other optimizations can be done only on the non-va_list variants. */
13503 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
13506 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13507 else if (strcmp (fmt_str
, target_percent_s
) == 0)
13509 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
13512 call
= build_call_expr_loc (loc
, fn_fputs
, 2, arg
, fp
);
13515 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13516 else if (strcmp (fmt_str
, target_percent_c
) == 0)
13518 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
13521 call
= build_call_expr_loc (loc
, fn_fputc
, 2, arg
, fp
);
13526 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), call
);
13529 /* Initialize format string characters in the target charset. */
13532 init_target_chars (void)
13537 target_newline
= lang_hooks
.to_target_charset ('\n');
13538 target_percent
= lang_hooks
.to_target_charset ('%');
13539 target_c
= lang_hooks
.to_target_charset ('c');
13540 target_s
= lang_hooks
.to_target_charset ('s');
13541 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
13545 target_percent_c
[0] = target_percent
;
13546 target_percent_c
[1] = target_c
;
13547 target_percent_c
[2] = '\0';
13549 target_percent_s
[0] = target_percent
;
13550 target_percent_s
[1] = target_s
;
13551 target_percent_s
[2] = '\0';
13553 target_percent_s_newline
[0] = target_percent
;
13554 target_percent_s_newline
[1] = target_s
;
13555 target_percent_s_newline
[2] = target_newline
;
13556 target_percent_s_newline
[3] = '\0';
13563 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13564 and no overflow/underflow occurred. INEXACT is true if M was not
13565 exactly calculated. TYPE is the tree type for the result. This
13566 function assumes that you cleared the MPFR flags and then
13567 calculated M to see if anything subsequently set a flag prior to
13568 entering this function. Return NULL_TREE if any checks fail. */
13571 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
13573 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13574 overflow/underflow occurred. If -frounding-math, proceed iff the
13575 result of calling FUNC was exact. */
13576 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13577 && (!flag_rounding_math
|| !inexact
))
13579 REAL_VALUE_TYPE rr
;
13581 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
13582 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13583 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13584 but the mpft_t is not, then we underflowed in the
13586 if (real_isfinite (&rr
)
13587 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
13589 REAL_VALUE_TYPE rmode
;
13591 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
13592 /* Proceed iff the specified mode can hold the value. */
13593 if (real_identical (&rmode
, &rr
))
13594 return build_real (type
, rmode
);
13600 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13601 number and no overflow/underflow occurred. INEXACT is true if M
13602 was not exactly calculated. TYPE is the tree type for the result.
13603 This function assumes that you cleared the MPFR flags and then
13604 calculated M to see if anything subsequently set a flag prior to
13605 entering this function. Return NULL_TREE if any checks fail, if
13606 FORCE_CONVERT is true, then bypass the checks. */
13609 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
13611 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13612 overflow/underflow occurred. If -frounding-math, proceed iff the
13613 result of calling FUNC was exact. */
13615 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
13616 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13617 && (!flag_rounding_math
|| !inexact
)))
13619 REAL_VALUE_TYPE re
, im
;
13621 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
13622 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
13623 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13624 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13625 but the mpft_t is not, then we underflowed in the
13628 || (real_isfinite (&re
) && real_isfinite (&im
)
13629 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
13630 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
13632 REAL_VALUE_TYPE re_mode
, im_mode
;
13634 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
13635 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
13636 /* Proceed iff the specified mode can hold the value. */
13638 || (real_identical (&re_mode
, &re
)
13639 && real_identical (&im_mode
, &im
)))
13640 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
13641 build_real (TREE_TYPE (type
), im_mode
));
13647 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13648 FUNC on it and return the resulting value as a tree with type TYPE.
13649 If MIN and/or MAX are not NULL, then the supplied ARG must be
13650 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13651 acceptable values, otherwise they are not. The mpfr precision is
13652 set to the precision of TYPE. We assume that function FUNC returns
13653 zero if the result could be calculated exactly within the requested
13657 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
13658 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
13661 tree result
= NULL_TREE
;
13665 /* To proceed, MPFR must exactly represent the target floating point
13666 format, which only happens when the target base equals two. */
13667 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13668 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
13670 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
13672 if (real_isfinite (ra
)
13673 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
13674 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
13676 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13677 const int prec
= fmt
->p
;
13678 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13682 mpfr_init2 (m
, prec
);
13683 mpfr_from_real (m
, ra
, GMP_RNDN
);
13684 mpfr_clear_flags ();
13685 inexact
= func (m
, m
, rnd
);
13686 result
= do_mpfr_ckconv (m
, type
, inexact
);
13694 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13695 FUNC on it and return the resulting value as a tree with type TYPE.
13696 The mpfr precision is set to the precision of TYPE. We assume that
13697 function FUNC returns zero if the result could be calculated
13698 exactly within the requested precision. */
13701 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
13702 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
13704 tree result
= NULL_TREE
;
13709 /* To proceed, MPFR must exactly represent the target floating point
13710 format, which only happens when the target base equals two. */
13711 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13712 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
13713 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
13715 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
13716 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
13718 if (real_isfinite (ra1
) && real_isfinite (ra2
))
13720 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13721 const int prec
= fmt
->p
;
13722 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13726 mpfr_inits2 (prec
, m1
, m2
, NULL
);
13727 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13728 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
13729 mpfr_clear_flags ();
13730 inexact
= func (m1
, m1
, m2
, rnd
);
13731 result
= do_mpfr_ckconv (m1
, type
, inexact
);
13732 mpfr_clears (m1
, m2
, NULL
);
13739 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13740 FUNC on it and return the resulting value as a tree with type TYPE.
13741 The mpfr precision is set to the precision of TYPE. We assume that
13742 function FUNC returns zero if the result could be calculated
13743 exactly within the requested precision. */
13746 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
13747 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
13749 tree result
= NULL_TREE
;
13755 /* To proceed, MPFR must exactly represent the target floating point
13756 format, which only happens when the target base equals two. */
13757 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13758 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
13759 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
13760 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
13762 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
13763 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
13764 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
13766 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
13768 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13769 const int prec
= fmt
->p
;
13770 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13774 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
13775 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13776 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
13777 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
13778 mpfr_clear_flags ();
13779 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
13780 result
= do_mpfr_ckconv (m1
, type
, inexact
);
13781 mpfr_clears (m1
, m2
, m3
, NULL
);
13788 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13789 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13790 If ARG_SINP and ARG_COSP are NULL then the result is returned
13791 as a complex value.
13792 The type is taken from the type of ARG and is used for setting the
13793 precision of the calculation and results. */
13796 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
13798 tree
const type
= TREE_TYPE (arg
);
13799 tree result
= NULL_TREE
;
13803 /* To proceed, MPFR must exactly represent the target floating point
13804 format, which only happens when the target base equals two. */
13805 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13806 && TREE_CODE (arg
) == REAL_CST
13807 && !TREE_OVERFLOW (arg
))
13809 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
13811 if (real_isfinite (ra
))
13813 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13814 const int prec
= fmt
->p
;
13815 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13816 tree result_s
, result_c
;
13820 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
13821 mpfr_from_real (m
, ra
, GMP_RNDN
);
13822 mpfr_clear_flags ();
13823 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
13824 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
13825 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
13826 mpfr_clears (m
, ms
, mc
, NULL
);
13827 if (result_s
&& result_c
)
13829 /* If we are to return in a complex value do so. */
13830 if (!arg_sinp
&& !arg_cosp
)
13831 return build_complex (build_complex_type (type
),
13832 result_c
, result_s
);
13834 /* Dereference the sin/cos pointer arguments. */
13835 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
13836 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
13837 /* Proceed if valid pointer type were passed in. */
13838 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
13839 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
13841 /* Set the values. */
13842 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
13844 TREE_SIDE_EFFECTS (result_s
) = 1;
13845 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
13847 TREE_SIDE_EFFECTS (result_c
) = 1;
13848 /* Combine the assignments into a compound expr. */
13849 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13850 result_s
, result_c
));
13858 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13859 two-argument mpfr order N Bessel function FUNC on them and return
13860 the resulting value as a tree with type TYPE. The mpfr precision
13861 is set to the precision of TYPE. We assume that function FUNC
13862 returns zero if the result could be calculated exactly within the
13863 requested precision. */
13865 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
13866 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
13867 const REAL_VALUE_TYPE
*min
, bool inclusive
)
13869 tree result
= NULL_TREE
;
13874 /* To proceed, MPFR must exactly represent the target floating point
13875 format, which only happens when the target base equals two. */
13876 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13877 && tree_fits_shwi_p (arg1
)
13878 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
13880 const HOST_WIDE_INT n
= tree_to_shwi (arg1
);
13881 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
13884 && real_isfinite (ra
)
13885 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
13887 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13888 const int prec
= fmt
->p
;
13889 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13893 mpfr_init2 (m
, prec
);
13894 mpfr_from_real (m
, ra
, GMP_RNDN
);
13895 mpfr_clear_flags ();
13896 inexact
= func (m
, n
, m
, rnd
);
13897 result
= do_mpfr_ckconv (m
, type
, inexact
);
13905 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13906 the pointer *(ARG_QUO) and return the result. The type is taken
13907 from the type of ARG0 and is used for setting the precision of the
13908 calculation and results. */
13911 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
13913 tree
const type
= TREE_TYPE (arg0
);
13914 tree result
= NULL_TREE
;
13919 /* To proceed, MPFR must exactly represent the target floating point
13920 format, which only happens when the target base equals two. */
13921 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13922 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
13923 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
13925 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
13926 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
13928 if (real_isfinite (ra0
) && real_isfinite (ra1
))
13930 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13931 const int prec
= fmt
->p
;
13932 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13937 mpfr_inits2 (prec
, m0
, m1
, NULL
);
13938 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
13939 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13940 mpfr_clear_flags ();
13941 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
13942 /* Remquo is independent of the rounding mode, so pass
13943 inexact=0 to do_mpfr_ckconv(). */
13944 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
13945 mpfr_clears (m0
, m1
, NULL
);
13948 /* MPFR calculates quo in the host's long so it may
13949 return more bits in quo than the target int can hold
13950 if sizeof(host long) > sizeof(target int). This can
13951 happen even for native compilers in LP64 mode. In
13952 these cases, modulo the quo value with the largest
13953 number that the target int can hold while leaving one
13954 bit for the sign. */
13955 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
13956 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
13958 /* Dereference the quo pointer argument. */
13959 arg_quo
= build_fold_indirect_ref (arg_quo
);
13960 /* Proceed iff a valid pointer type was passed in. */
13961 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
13963 /* Set the value. */
13965 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
13966 build_int_cst (TREE_TYPE (arg_quo
),
13968 TREE_SIDE_EFFECTS (result_quo
) = 1;
13969 /* Combine the quo assignment with the rem. */
13970 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13971 result_quo
, result_rem
));
13979 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13980 resulting value as a tree with type TYPE. The mpfr precision is
13981 set to the precision of TYPE. We assume that this mpfr function
13982 returns zero if the result could be calculated exactly within the
13983 requested precision. In addition, the integer pointer represented
13984 by ARG_SG will be dereferenced and set to the appropriate signgam
13988 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
13990 tree result
= NULL_TREE
;
13994 /* To proceed, MPFR must exactly represent the target floating point
13995 format, which only happens when the target base equals two. Also
13996 verify ARG is a constant and that ARG_SG is an int pointer. */
13997 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13998 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
13999 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
14000 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
14002 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
14004 /* In addition to NaN and Inf, the argument cannot be zero or a
14005 negative integer. */
14006 if (real_isfinite (ra
)
14007 && ra
->cl
!= rvc_zero
14008 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
14010 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
14011 const int prec
= fmt
->p
;
14012 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
14017 mpfr_init2 (m
, prec
);
14018 mpfr_from_real (m
, ra
, GMP_RNDN
);
14019 mpfr_clear_flags ();
14020 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
14021 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
14027 /* Dereference the arg_sg pointer argument. */
14028 arg_sg
= build_fold_indirect_ref (arg_sg
);
14029 /* Assign the signgam value into *arg_sg. */
14030 result_sg
= fold_build2 (MODIFY_EXPR
,
14031 TREE_TYPE (arg_sg
), arg_sg
,
14032 build_int_cst (TREE_TYPE (arg_sg
), sg
));
14033 TREE_SIDE_EFFECTS (result_sg
) = 1;
14034 /* Combine the signgam assignment with the lgamma result. */
14035 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
14036 result_sg
, result_lg
));
14044 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
14045 function FUNC on it and return the resulting value as a tree with
14046 type TYPE. The mpfr precision is set to the precision of TYPE. We
14047 assume that function FUNC returns zero if the result could be
14048 calculated exactly within the requested precision. */
14051 do_mpc_arg1 (tree arg
, tree type
, int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
))
14053 tree result
= NULL_TREE
;
14057 /* To proceed, MPFR must exactly represent the target floating point
14058 format, which only happens when the target base equals two. */
14059 if (TREE_CODE (arg
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg
)
14060 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
14061 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg
))))->b
== 2)
14063 const REAL_VALUE_TYPE
*const re
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
14064 const REAL_VALUE_TYPE
*const im
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
14066 if (real_isfinite (re
) && real_isfinite (im
))
14068 const struct real_format
*const fmt
=
14069 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
14070 const int prec
= fmt
->p
;
14071 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
14072 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
14076 mpc_init2 (m
, prec
);
14077 mpfr_from_real (mpc_realref (m
), re
, rnd
);
14078 mpfr_from_real (mpc_imagref (m
), im
, rnd
);
14079 mpfr_clear_flags ();
14080 inexact
= func (m
, m
, crnd
);
14081 result
= do_mpc_ckconv (m
, type
, inexact
, /*force_convert=*/ 0);
14089 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14090 mpc function FUNC on it and return the resulting value as a tree
14091 with type TYPE. The mpfr precision is set to the precision of
14092 TYPE. We assume that function FUNC returns zero if the result
14093 could be calculated exactly within the requested precision. If
14094 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14095 in the arguments and/or results. */
14098 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
14099 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
14101 tree result
= NULL_TREE
;
14106 /* To proceed, MPFR must exactly represent the target floating point
14107 format, which only happens when the target base equals two. */
14108 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
14109 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
14110 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
14111 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
14112 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
14114 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
14115 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
14116 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
14117 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
14120 || (real_isfinite (re0
) && real_isfinite (im0
)
14121 && real_isfinite (re1
) && real_isfinite (im1
)))
14123 const struct real_format
*const fmt
=
14124 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
14125 const int prec
= fmt
->p
;
14126 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
14127 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
14131 mpc_init2 (m0
, prec
);
14132 mpc_init2 (m1
, prec
);
14133 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
14134 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
14135 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
14136 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
14137 mpfr_clear_flags ();
14138 inexact
= func (m0
, m0
, m1
, crnd
);
14139 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
14148 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14149 a normal call should be emitted rather than expanding the function
14150 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14153 gimple_fold_builtin_sprintf_chk (gimple stmt
, enum built_in_function fcode
)
14155 int nargs
= gimple_call_num_args (stmt
);
14157 return fold_builtin_sprintf_chk_1 (gimple_location (stmt
), nargs
,
14159 ? gimple_call_arg_ptr (stmt
, 0)
14160 : &error_mark_node
), fcode
);
14163 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14164 a normal call should be emitted rather than expanding the function
14165 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14166 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14167 passed as second argument. */
14170 gimple_fold_builtin_snprintf_chk (gimple stmt
, tree maxlen
,
14171 enum built_in_function fcode
)
14173 int nargs
= gimple_call_num_args (stmt
);
14175 return fold_builtin_snprintf_chk_1 (gimple_location (stmt
), nargs
,
14177 ? gimple_call_arg_ptr (stmt
, 0)
14178 : &error_mark_node
), maxlen
, fcode
);
14181 /* Builtins with folding operations that operate on "..." arguments
14182 need special handling; we need to store the arguments in a convenient
14183 data structure before attempting any folding. Fortunately there are
14184 only a few builtins that fall into this category. FNDECL is the
14185 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14186 result of the function call is ignored. */
14189 gimple_fold_builtin_varargs (tree fndecl
, gimple stmt
,
14190 bool ignore ATTRIBUTE_UNUSED
)
14192 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
14193 tree ret
= NULL_TREE
;
14197 case BUILT_IN_SPRINTF_CHK
:
14198 case BUILT_IN_VSPRINTF_CHK
:
14199 ret
= gimple_fold_builtin_sprintf_chk (stmt
, fcode
);
14202 case BUILT_IN_SNPRINTF_CHK
:
14203 case BUILT_IN_VSNPRINTF_CHK
:
14204 ret
= gimple_fold_builtin_snprintf_chk (stmt
, NULL_TREE
, fcode
);
14211 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
14212 TREE_NO_WARNING (ret
) = 1;
14218 /* A wrapper function for builtin folding that prevents warnings for
14219 "statement without effect" and the like, caused by removing the
14220 call node earlier than the warning is generated. */
14223 fold_call_stmt (gimple stmt
, bool ignore
)
14225 tree ret
= NULL_TREE
;
14226 tree fndecl
= gimple_call_fndecl (stmt
);
14227 location_t loc
= gimple_location (stmt
);
14229 && TREE_CODE (fndecl
) == FUNCTION_DECL
14230 && DECL_BUILT_IN (fndecl
)
14231 && !gimple_call_va_arg_pack_p (stmt
))
14233 int nargs
= gimple_call_num_args (stmt
);
14234 tree
*args
= (nargs
> 0
14235 ? gimple_call_arg_ptr (stmt
, 0)
14236 : &error_mark_node
);
14238 if (avoid_folding_inline_builtin (fndecl
))
14240 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
14242 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
14246 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
14247 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
14249 ret
= gimple_fold_builtin_varargs (fndecl
, stmt
, ignore
);
14252 /* Propagate location information from original call to
14253 expansion of builtin. Otherwise things like
14254 maybe_emit_chk_warning, that operate on the expansion
14255 of a builtin, will use the wrong location information. */
14256 if (gimple_has_location (stmt
))
14258 tree realret
= ret
;
14259 if (TREE_CODE (ret
) == NOP_EXPR
)
14260 realret
= TREE_OPERAND (ret
, 0);
14261 if (CAN_HAVE_LOCATION_P (realret
)
14262 && !EXPR_HAS_LOCATION (realret
))
14263 SET_EXPR_LOCATION (realret
, loc
);
14273 /* Look up the function in builtin_decl that corresponds to DECL
14274 and set ASMSPEC as its user assembler name. DECL must be a
14275 function decl that declares a builtin. */
14278 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
14281 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
14282 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
14285 builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
14286 set_user_assembler_name (builtin
, asmspec
);
14287 switch (DECL_FUNCTION_CODE (decl
))
14289 case BUILT_IN_MEMCPY
:
14290 init_block_move_fn (asmspec
);
14291 memcpy_libfunc
= set_user_assembler_libfunc ("memcpy", asmspec
);
14293 case BUILT_IN_MEMSET
:
14294 init_block_clear_fn (asmspec
);
14295 memset_libfunc
= set_user_assembler_libfunc ("memset", asmspec
);
14297 case BUILT_IN_MEMMOVE
:
14298 memmove_libfunc
= set_user_assembler_libfunc ("memmove", asmspec
);
14300 case BUILT_IN_MEMCMP
:
14301 memcmp_libfunc
= set_user_assembler_libfunc ("memcmp", asmspec
);
14303 case BUILT_IN_ABORT
:
14304 abort_libfunc
= set_user_assembler_libfunc ("abort", asmspec
);
14307 if (INT_TYPE_SIZE
< BITS_PER_WORD
)
14309 set_user_assembler_libfunc ("ffs", asmspec
);
14310 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
,
14311 MODE_INT
, 0), "ffs");
14319 /* Return true if DECL is a builtin that expands to a constant or similarly
14322 is_simple_builtin (tree decl
)
14324 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
14325 switch (DECL_FUNCTION_CODE (decl
))
14327 /* Builtins that expand to constants. */
14328 case BUILT_IN_CONSTANT_P
:
14329 case BUILT_IN_EXPECT
:
14330 case BUILT_IN_OBJECT_SIZE
:
14331 case BUILT_IN_UNREACHABLE
:
14332 /* Simple register moves or loads from stack. */
14333 case BUILT_IN_ASSUME_ALIGNED
:
14334 case BUILT_IN_RETURN_ADDRESS
:
14335 case BUILT_IN_EXTRACT_RETURN_ADDR
:
14336 case BUILT_IN_FROB_RETURN_ADDR
:
14337 case BUILT_IN_RETURN
:
14338 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
14339 case BUILT_IN_FRAME_ADDRESS
:
14340 case BUILT_IN_VA_END
:
14341 case BUILT_IN_STACK_SAVE
:
14342 case BUILT_IN_STACK_RESTORE
:
14343 /* Exception state returns or moves registers around. */
14344 case BUILT_IN_EH_FILTER
:
14345 case BUILT_IN_EH_POINTER
:
14346 case BUILT_IN_EH_COPY_VALUES
:
14356 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14357 most probably expanded inline into reasonably simple code. This is a
14358 superset of is_simple_builtin. */
14360 is_inexpensive_builtin (tree decl
)
14364 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
14366 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
14367 switch (DECL_FUNCTION_CODE (decl
))
14370 case BUILT_IN_ALLOCA
:
14371 case BUILT_IN_ALLOCA_WITH_ALIGN
:
14372 case BUILT_IN_BSWAP16
:
14373 case BUILT_IN_BSWAP32
:
14374 case BUILT_IN_BSWAP64
:
14376 case BUILT_IN_CLZIMAX
:
14377 case BUILT_IN_CLZL
:
14378 case BUILT_IN_CLZLL
:
14380 case BUILT_IN_CTZIMAX
:
14381 case BUILT_IN_CTZL
:
14382 case BUILT_IN_CTZLL
:
14384 case BUILT_IN_FFSIMAX
:
14385 case BUILT_IN_FFSL
:
14386 case BUILT_IN_FFSLL
:
14387 case BUILT_IN_IMAXABS
:
14388 case BUILT_IN_FINITE
:
14389 case BUILT_IN_FINITEF
:
14390 case BUILT_IN_FINITEL
:
14391 case BUILT_IN_FINITED32
:
14392 case BUILT_IN_FINITED64
:
14393 case BUILT_IN_FINITED128
:
14394 case BUILT_IN_FPCLASSIFY
:
14395 case BUILT_IN_ISFINITE
:
14396 case BUILT_IN_ISINF_SIGN
:
14397 case BUILT_IN_ISINF
:
14398 case BUILT_IN_ISINFF
:
14399 case BUILT_IN_ISINFL
:
14400 case BUILT_IN_ISINFD32
:
14401 case BUILT_IN_ISINFD64
:
14402 case BUILT_IN_ISINFD128
:
14403 case BUILT_IN_ISNAN
:
14404 case BUILT_IN_ISNANF
:
14405 case BUILT_IN_ISNANL
:
14406 case BUILT_IN_ISNAND32
:
14407 case BUILT_IN_ISNAND64
:
14408 case BUILT_IN_ISNAND128
:
14409 case BUILT_IN_ISNORMAL
:
14410 case BUILT_IN_ISGREATER
:
14411 case BUILT_IN_ISGREATEREQUAL
:
14412 case BUILT_IN_ISLESS
:
14413 case BUILT_IN_ISLESSEQUAL
:
14414 case BUILT_IN_ISLESSGREATER
:
14415 case BUILT_IN_ISUNORDERED
:
14416 case BUILT_IN_VA_ARG_PACK
:
14417 case BUILT_IN_VA_ARG_PACK_LEN
:
14418 case BUILT_IN_VA_COPY
:
14419 case BUILT_IN_TRAP
:
14420 case BUILT_IN_SAVEREGS
:
14421 case BUILT_IN_POPCOUNTL
:
14422 case BUILT_IN_POPCOUNTLL
:
14423 case BUILT_IN_POPCOUNTIMAX
:
14424 case BUILT_IN_POPCOUNT
:
14425 case BUILT_IN_PARITYL
:
14426 case BUILT_IN_PARITYLL
:
14427 case BUILT_IN_PARITYIMAX
:
14428 case BUILT_IN_PARITY
:
14429 case BUILT_IN_LABS
:
14430 case BUILT_IN_LLABS
:
14431 case BUILT_IN_PREFETCH
:
14435 return is_simple_builtin (decl
);