1 /* Expand builtin functions.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
31 #include "hard-reg-set.h"
34 #include "insn-config.h"
40 #include "typeclass.h"
44 #include "langhooks.h"
45 #include "basic-block.h"
46 #include "tree-ssanames.h"
48 #include "value-prof.h"
49 #include "diagnostic-core.h"
55 static tree
do_mpc_arg1 (tree
, tree
, int (*)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
));
57 struct target_builtins default_target_builtins
;
59 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
62 /* Define the names of the builtin function types and codes. */
63 const char *const built_in_class_names
[BUILT_IN_LAST
]
64 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
67 const char * built_in_names
[(int) END_BUILTINS
] =
69 #include "builtins.def"
73 /* Setup an array of _DECL trees, make sure each element is
74 initialized to NULL_TREE. */
75 builtin_info_type builtin_info
;
77 /* Non-zero if __builtin_constant_p should be folded right away. */
78 bool force_folding_builtin_constant_p
;
80 static const char *c_getstr (tree
);
81 static rtx
c_readstr (const char *, enum machine_mode
);
82 static int target_char_cast (tree
, char *);
83 static rtx
get_memory_rtx (tree
, tree
);
84 static int apply_args_size (void);
85 static int apply_result_size (void);
86 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
87 static rtx
result_vector (int, rtx
);
89 static void expand_builtin_update_setjmp_buf (rtx
);
90 static void expand_builtin_prefetch (tree
);
91 static rtx
expand_builtin_apply_args (void);
92 static rtx
expand_builtin_apply_args_1 (void);
93 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
94 static void expand_builtin_return (rtx
);
95 static enum type_class
type_to_class (tree
);
96 static rtx
expand_builtin_classify_type (tree
);
97 static void expand_errno_check (tree
, rtx
);
98 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
99 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
100 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
101 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
102 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
103 static rtx
expand_builtin_sincos (tree
);
104 static rtx
expand_builtin_cexpi (tree
, rtx
);
105 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
106 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
107 static rtx
expand_builtin_next_arg (void);
108 static rtx
expand_builtin_va_start (tree
);
109 static rtx
expand_builtin_va_end (tree
);
110 static rtx
expand_builtin_va_copy (tree
);
111 static rtx
expand_builtin_memcmp (tree
, rtx
, enum machine_mode
);
112 static rtx
expand_builtin_strcmp (tree
, rtx
);
113 static rtx
expand_builtin_strncmp (tree
, rtx
, enum machine_mode
);
114 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
115 static rtx
expand_builtin_memcpy (tree
, rtx
);
116 static rtx
expand_builtin_mempcpy (tree
, rtx
, enum machine_mode
);
117 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
118 enum machine_mode
, int);
119 static rtx
expand_builtin_strcpy (tree
, rtx
);
120 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
121 static rtx
expand_builtin_stpcpy (tree
, rtx
, enum machine_mode
);
122 static rtx
expand_builtin_strncpy (tree
, rtx
);
123 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, enum machine_mode
);
124 static rtx
expand_builtin_memset (tree
, rtx
, enum machine_mode
);
125 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, enum machine_mode
, tree
);
126 static rtx
expand_builtin_bzero (tree
);
127 static rtx
expand_builtin_strlen (tree
, rtx
, enum machine_mode
);
128 static rtx
expand_builtin_alloca (tree
, bool);
129 static rtx
expand_builtin_unop (enum machine_mode
, tree
, rtx
, rtx
, optab
);
130 static rtx
expand_builtin_frame_address (tree
, tree
);
131 static tree
stabilize_va_list_loc (location_t
, tree
, int);
132 static rtx
expand_builtin_expect (tree
, rtx
);
133 static tree
fold_builtin_constant_p (tree
);
134 static tree
fold_builtin_expect (location_t
, tree
, tree
);
135 static tree
fold_builtin_classify_type (tree
);
136 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
137 static tree
fold_builtin_inf (location_t
, tree
, int);
138 static tree
fold_builtin_nan (tree
, tree
, int);
139 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
140 static bool validate_arg (const_tree
, enum tree_code code
);
141 static bool integer_valued_real_p (tree
);
142 static tree
fold_trunc_transparent_mathfn (location_t
, tree
, tree
);
143 static bool readonly_data_expr (tree
);
144 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
145 static rtx
expand_builtin_signbit (tree
, rtx
);
146 static tree
fold_builtin_sqrt (location_t
, tree
, tree
);
147 static tree
fold_builtin_cbrt (location_t
, tree
, tree
);
148 static tree
fold_builtin_pow (location_t
, tree
, tree
, tree
, tree
);
149 static tree
fold_builtin_powi (location_t
, tree
, tree
, tree
, tree
);
150 static tree
fold_builtin_cos (location_t
, tree
, tree
, tree
);
151 static tree
fold_builtin_cosh (location_t
, tree
, tree
, tree
);
152 static tree
fold_builtin_tan (tree
, tree
);
153 static tree
fold_builtin_trunc (location_t
, tree
, tree
);
154 static tree
fold_builtin_floor (location_t
, tree
, tree
);
155 static tree
fold_builtin_ceil (location_t
, tree
, tree
);
156 static tree
fold_builtin_round (location_t
, tree
, tree
);
157 static tree
fold_builtin_int_roundingfn (location_t
, tree
, tree
);
158 static tree
fold_builtin_bitop (tree
, tree
);
159 static tree
fold_builtin_memory_op (location_t
, tree
, tree
, tree
, tree
, bool, int);
160 static tree
fold_builtin_strchr (location_t
, tree
, tree
, tree
);
161 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
162 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
163 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
164 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
165 static tree
fold_builtin_signbit (location_t
, tree
, tree
);
166 static tree
fold_builtin_copysign (location_t
, tree
, tree
, tree
, tree
);
167 static tree
fold_builtin_isascii (location_t
, tree
);
168 static tree
fold_builtin_toascii (location_t
, tree
);
169 static tree
fold_builtin_isdigit (location_t
, tree
);
170 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
171 static tree
fold_builtin_abs (location_t
, tree
, tree
);
172 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
174 static tree
fold_builtin_n (location_t
, tree
, tree
*, int, bool);
175 static tree
fold_builtin_0 (location_t
, tree
, bool);
176 static tree
fold_builtin_1 (location_t
, tree
, tree
, bool);
177 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
, bool);
178 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
, bool);
179 static tree
fold_builtin_4 (location_t
, tree
, tree
, tree
, tree
, tree
, bool);
180 static tree
fold_builtin_varargs (location_t
, tree
, tree
, bool);
182 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
183 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
184 static tree
fold_builtin_strrchr (location_t
, tree
, tree
, tree
);
185 static tree
fold_builtin_strcat (location_t
, tree
, tree
);
186 static tree
fold_builtin_strncat (location_t
, tree
, tree
, tree
);
187 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
188 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
189 static tree
fold_builtin_sprintf (location_t
, tree
, tree
, tree
, int);
190 static tree
fold_builtin_snprintf (location_t
, tree
, tree
, tree
, tree
, int);
192 static rtx
expand_builtin_object_size (tree
);
193 static rtx
expand_builtin_memory_chk (tree
, rtx
, enum machine_mode
,
194 enum built_in_function
);
195 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
196 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
197 static void maybe_emit_free_warning (tree
);
198 static tree
fold_builtin_object_size (tree
, tree
);
199 static tree
fold_builtin_strcat_chk (location_t
, tree
, tree
, tree
, tree
);
200 static tree
fold_builtin_strncat_chk (location_t
, tree
, tree
, tree
, tree
, tree
);
201 static tree
fold_builtin_sprintf_chk (location_t
, tree
, enum built_in_function
);
202 static tree
fold_builtin_printf (location_t
, tree
, tree
, tree
, bool, enum built_in_function
);
203 static tree
fold_builtin_fprintf (location_t
, tree
, tree
, tree
, tree
, bool,
204 enum built_in_function
);
205 static bool init_target_chars (void);
207 static unsigned HOST_WIDE_INT target_newline
;
208 static unsigned HOST_WIDE_INT target_percent
;
209 static unsigned HOST_WIDE_INT target_c
;
210 static unsigned HOST_WIDE_INT target_s
;
211 static char target_percent_c
[3];
212 static char target_percent_s
[3];
213 static char target_percent_s_newline
[4];
214 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
215 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
216 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
217 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
218 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
219 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
220 static tree
do_mpfr_sincos (tree
, tree
, tree
);
221 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
222 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
223 const REAL_VALUE_TYPE
*, bool);
224 static tree
do_mpfr_remquo (tree
, tree
, tree
);
225 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
226 static void expand_builtin_sync_synchronize (void);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
231 is_builtin_name (const char *name
)
233 if (strncmp (name
, "__builtin_", 10) == 0)
235 if (strncmp (name
, "__sync_", 7) == 0)
237 if (strncmp (name
, "__atomic_", 9) == 0)
239 if (flag_enable_cilkplus
240 && (!strcmp (name
, "__cilkrts_detach")
241 || !strcmp (name
, "__cilkrts_pop_frame")))
247 /* Return true if DECL is a function symbol representing a built-in. */
250 is_builtin_fn (tree decl
)
252 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
255 /* By default we assume that c99 functions are present at the runtime,
256 but sincos is not. */
258 default_libc_has_function (enum function_class fn_class
)
260 if (fn_class
== function_c94
261 || fn_class
== function_c99_misc
262 || fn_class
== function_c99_math_complex
)
269 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED
)
275 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED
)
280 /* Return true if NODE should be considered for inline expansion regardless
281 of the optimization level. This means whenever a function is invoked with
282 its "internal" name, which normally contains the prefix "__builtin". */
285 called_as_built_in (tree node
)
287 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
288 we want the name used to call the function, not the name it
290 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
291 return is_builtin_name (name
);
294 /* Compute values M and N such that M divides (address of EXP - N) and such
295 that N < M. If these numbers can be determined, store M in alignp and N in
296 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
297 *alignp and any bit-offset to *bitposp.
299 Note that the address (and thus the alignment) computed here is based
300 on the address to which a symbol resolves, whereas DECL_ALIGN is based
301 on the address at which an object is actually located. These two
302 addresses are not always the same. For example, on ARM targets,
303 the address &foo of a Thumb function foo() has the lowest bit set,
304 whereas foo() itself starts on an even address.
306 If ADDR_P is true we are taking the address of the memory reference EXP
307 and thus cannot rely on the access taking place. */
310 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
311 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
313 HOST_WIDE_INT bitsize
, bitpos
;
315 enum machine_mode mode
;
316 int unsignedp
, volatilep
;
317 unsigned int align
= BITS_PER_UNIT
;
318 bool known_alignment
= false;
320 /* Get the innermost object and the constant (bitpos) and possibly
321 variable (offset) offset of the access. */
322 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
323 &mode
, &unsignedp
, &volatilep
, true);
325 /* Extract alignment information from the innermost object and
326 possibly adjust bitpos and offset. */
327 if (TREE_CODE (exp
) == FUNCTION_DECL
)
329 /* Function addresses can encode extra information besides their
330 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
331 allows the low bit to be used as a virtual bit, we know
332 that the address itself must be at least 2-byte aligned. */
333 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
334 align
= 2 * BITS_PER_UNIT
;
336 else if (TREE_CODE (exp
) == LABEL_DECL
)
338 else if (TREE_CODE (exp
) == CONST_DECL
)
340 /* The alignment of a CONST_DECL is determined by its initializer. */
341 exp
= DECL_INITIAL (exp
);
342 align
= TYPE_ALIGN (TREE_TYPE (exp
));
343 #ifdef CONSTANT_ALIGNMENT
344 if (CONSTANT_CLASS_P (exp
))
345 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
347 known_alignment
= true;
349 else if (DECL_P (exp
))
351 align
= DECL_ALIGN (exp
);
352 known_alignment
= true;
354 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
356 align
= TYPE_ALIGN (TREE_TYPE (exp
));
358 else if (TREE_CODE (exp
) == INDIRECT_REF
359 || TREE_CODE (exp
) == MEM_REF
360 || TREE_CODE (exp
) == TARGET_MEM_REF
)
362 tree addr
= TREE_OPERAND (exp
, 0);
364 unsigned HOST_WIDE_INT ptr_bitpos
;
366 if (TREE_CODE (addr
) == BIT_AND_EXPR
367 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
369 align
= (TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1))
370 & -TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1)));
371 align
*= BITS_PER_UNIT
;
372 addr
= TREE_OPERAND (addr
, 0);
376 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
377 align
= MAX (ptr_align
, align
);
379 /* The alignment of the pointer operand in a TARGET_MEM_REF
380 has to take the variable offset parts into account. */
381 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
385 unsigned HOST_WIDE_INT step
= 1;
387 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
388 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
390 if (TMR_INDEX2 (exp
))
391 align
= BITS_PER_UNIT
;
392 known_alignment
= false;
395 /* When EXP is an actual memory reference then we can use
396 TYPE_ALIGN of a pointer indirection to derive alignment.
397 Do so only if get_pointer_alignment_1 did not reveal absolute
398 alignment knowledge and if using that alignment would
399 improve the situation. */
400 if (!addr_p
&& !known_alignment
401 && TYPE_ALIGN (TREE_TYPE (exp
)) > align
)
402 align
= TYPE_ALIGN (TREE_TYPE (exp
));
405 /* Else adjust bitpos accordingly. */
406 bitpos
+= ptr_bitpos
;
407 if (TREE_CODE (exp
) == MEM_REF
408 || TREE_CODE (exp
) == TARGET_MEM_REF
)
409 bitpos
+= mem_ref_offset (exp
).low
* BITS_PER_UNIT
;
412 else if (TREE_CODE (exp
) == STRING_CST
)
414 /* STRING_CST are the only constant objects we allow to be not
415 wrapped inside a CONST_DECL. */
416 align
= TYPE_ALIGN (TREE_TYPE (exp
));
417 #ifdef CONSTANT_ALIGNMENT
418 if (CONSTANT_CLASS_P (exp
))
419 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
421 known_alignment
= true;
424 /* If there is a non-constant offset part extract the maximum
425 alignment that can prevail. */
428 int trailing_zeros
= tree_ctz (offset
);
429 if (trailing_zeros
< HOST_BITS_PER_INT
)
431 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
433 align
= MIN (align
, inner
);
438 *bitposp
= bitpos
& (*alignp
- 1);
439 return known_alignment
;
442 /* For a memory reference expression EXP compute values M and N such that M
443 divides (&EXP - N) and such that N < M. If these numbers can be determined,
444 store M in alignp and N in *BITPOSP and return true. Otherwise return false
445 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
448 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
449 unsigned HOST_WIDE_INT
*bitposp
)
451 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
454 /* Return the alignment in bits of EXP, an object. */
457 get_object_alignment (tree exp
)
459 unsigned HOST_WIDE_INT bitpos
= 0;
462 get_object_alignment_1 (exp
, &align
, &bitpos
);
464 /* align and bitpos now specify known low bits of the pointer.
465 ptr & (align - 1) == bitpos. */
468 align
= (bitpos
& -bitpos
);
472 /* For a pointer valued expression EXP compute values M and N such that M
473 divides (EXP - N) and such that N < M. If these numbers can be determined,
474 store M in alignp and N in *BITPOSP and return true. Return false if
475 the results are just a conservative approximation.
477 If EXP is not a pointer, false is returned too. */
480 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
481 unsigned HOST_WIDE_INT
*bitposp
)
485 if (TREE_CODE (exp
) == ADDR_EXPR
)
486 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
487 alignp
, bitposp
, true);
488 else if (TREE_CODE (exp
) == SSA_NAME
489 && POINTER_TYPE_P (TREE_TYPE (exp
)))
491 unsigned int ptr_align
, ptr_misalign
;
492 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
494 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
496 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
497 *alignp
= ptr_align
* BITS_PER_UNIT
;
498 /* We cannot really tell whether this result is an approximation. */
504 *alignp
= BITS_PER_UNIT
;
508 else if (TREE_CODE (exp
) == INTEGER_CST
)
510 *alignp
= BIGGEST_ALIGNMENT
;
511 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
512 & (BIGGEST_ALIGNMENT
- 1));
517 *alignp
= BITS_PER_UNIT
;
521 /* Return the alignment in bits of EXP, a pointer valued expression.
522 The alignment returned is, by default, the alignment of the thing that
523 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
525 Otherwise, look at the expression to see if we can do better, i.e., if the
526 expression is actually pointing at an object whose alignment is tighter. */
529 get_pointer_alignment (tree exp
)
531 unsigned HOST_WIDE_INT bitpos
= 0;
534 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
536 /* align and bitpos now specify known low bits of the pointer.
537 ptr & (align - 1) == bitpos. */
540 align
= (bitpos
& -bitpos
);
545 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
546 way, because it could contain a zero byte in the middle.
547 TREE_STRING_LENGTH is the size of the character array, not the string.
549 ONLY_VALUE should be nonzero if the result is not going to be emitted
550 into the instruction stream and zero if it is going to be expanded.
551 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
552 is returned, otherwise NULL, since
553 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
554 evaluate the side-effects.
556 The value returned is of type `ssizetype'.
558 Unfortunately, string_constant can't access the values of const char
559 arrays with initializers, so neither can we do so here. */
562 c_strlen (tree src
, int only_value
)
565 HOST_WIDE_INT offset
;
571 if (TREE_CODE (src
) == COND_EXPR
572 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
576 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
577 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
578 if (tree_int_cst_equal (len1
, len2
))
582 if (TREE_CODE (src
) == COMPOUND_EXPR
583 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
584 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
586 loc
= EXPR_LOC_OR_HERE (src
);
588 src
= string_constant (src
, &offset_node
);
592 max
= TREE_STRING_LENGTH (src
) - 1;
593 ptr
= TREE_STRING_POINTER (src
);
595 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
597 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
598 compute the offset to the following null if we don't know where to
599 start searching for it. */
602 for (i
= 0; i
< max
; i
++)
606 /* We don't know the starting offset, but we do know that the string
607 has no internal zero bytes. We can assume that the offset falls
608 within the bounds of the string; otherwise, the programmer deserves
609 what he gets. Subtract the offset from the length of the string,
610 and return that. This would perhaps not be valid if we were dealing
611 with named arrays in addition to literal string constants. */
613 return size_diffop_loc (loc
, size_int (max
), offset_node
);
616 /* We have a known offset into the string. Start searching there for
617 a null character if we can represent it as a single HOST_WIDE_INT. */
618 if (offset_node
== 0)
620 else if (! host_integerp (offset_node
, 0))
623 offset
= tree_low_cst (offset_node
, 0);
625 /* If the offset is known to be out of bounds, warn, and call strlen at
627 if (offset
< 0 || offset
> max
)
629 /* Suppress multiple warnings for propagated constant strings. */
630 if (! TREE_NO_WARNING (src
))
632 warning_at (loc
, 0, "offset outside bounds of constant string");
633 TREE_NO_WARNING (src
) = 1;
638 /* Use strlen to search for the first zero byte. Since any strings
639 constructed with build_string will have nulls appended, we win even
640 if we get handed something like (char[4])"abcd".
642 Since OFFSET is our starting index into the string, no further
643 calculation is needed. */
644 return ssize_int (strlen (ptr
+ offset
));
647 /* Return a char pointer for a C string if it is a string constant
648 or sum of string constant and integer constant. */
655 src
= string_constant (src
, &offset_node
);
659 if (offset_node
== 0)
660 return TREE_STRING_POINTER (src
);
661 else if (!host_integerp (offset_node
, 1)
662 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
665 return TREE_STRING_POINTER (src
) + tree_low_cst (offset_node
, 1);
668 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
669 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
672 c_readstr (const char *str
, enum machine_mode mode
)
678 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
683 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
686 if (WORDS_BIG_ENDIAN
)
687 j
= GET_MODE_SIZE (mode
) - i
- 1;
688 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
689 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
690 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
692 gcc_assert (j
< HOST_BITS_PER_DOUBLE_INT
);
695 ch
= (unsigned char) str
[i
];
696 c
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
698 return immed_double_const (c
[0], c
[1], mode
);
701 /* Cast a target constant CST to target CHAR and if that value fits into
702 host char type, return zero and put that value into variable pointed to by
706 target_char_cast (tree cst
, char *p
)
708 unsigned HOST_WIDE_INT val
, hostval
;
710 if (TREE_CODE (cst
) != INTEGER_CST
711 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
714 val
= TREE_INT_CST_LOW (cst
);
715 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
716 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
719 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
720 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
729 /* Similar to save_expr, but assumes that arbitrary code is not executed
730 in between the multiple evaluations. In particular, we assume that a
731 non-addressable local variable will not be modified. */
734 builtin_save_expr (tree exp
)
736 if (TREE_CODE (exp
) == SSA_NAME
737 || (TREE_ADDRESSABLE (exp
) == 0
738 && (TREE_CODE (exp
) == PARM_DECL
739 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
)))))
742 return save_expr (exp
);
745 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
746 times to get the address of either a higher stack frame, or a return
747 address located within it (depending on FNDECL_CODE). */
750 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
754 #ifdef INITIAL_FRAME_ADDRESS_RTX
755 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
759 /* For a zero count with __builtin_return_address, we don't care what
760 frame address we return, because target-specific definitions will
761 override us. Therefore frame pointer elimination is OK, and using
762 the soft frame pointer is OK.
764 For a nonzero count, or a zero count with __builtin_frame_address,
765 we require a stable offset from the current frame pointer to the
766 previous one, so we must use the hard frame pointer, and
767 we must disable frame pointer elimination. */
768 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
769 tem
= frame_pointer_rtx
;
772 tem
= hard_frame_pointer_rtx
;
774 /* Tell reload not to eliminate the frame pointer. */
775 crtl
->accesses_prior_frames
= 1;
779 /* Some machines need special handling before we can access
780 arbitrary frames. For example, on the SPARC, we must first flush
781 all register windows to the stack. */
782 #ifdef SETUP_FRAME_ADDRESSES
784 SETUP_FRAME_ADDRESSES ();
787 /* On the SPARC, the return address is not in the frame, it is in a
788 register. There is no way to access it off of the current frame
789 pointer, but it can be accessed off the previous frame pointer by
790 reading the value from the register window save area. */
791 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
792 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
796 /* Scan back COUNT frames to the specified frame. */
797 for (i
= 0; i
< count
; i
++)
799 /* Assume the dynamic chain pointer is in the word that the
800 frame address points to, unless otherwise specified. */
801 #ifdef DYNAMIC_CHAIN_ADDRESS
802 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
804 tem
= memory_address (Pmode
, tem
);
805 tem
= gen_frame_mem (Pmode
, tem
);
806 tem
= copy_to_reg (tem
);
809 /* For __builtin_frame_address, return what we've got. But, on
810 the SPARC for example, we may have to add a bias. */
811 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
812 #ifdef FRAME_ADDR_RTX
813 return FRAME_ADDR_RTX (tem
);
818 /* For __builtin_return_address, get the return address from that frame. */
819 #ifdef RETURN_ADDR_RTX
820 tem
= RETURN_ADDR_RTX (count
, tem
);
822 tem
= memory_address (Pmode
,
823 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
824 tem
= gen_frame_mem (Pmode
, tem
);
829 /* Alias set used for setjmp buffer. */
830 static alias_set_type setjmp_alias_set
= -1;
832 /* Construct the leading half of a __builtin_setjmp call. Control will
833 return to RECEIVER_LABEL. This is also called directly by the SJLJ
834 exception handling code. */
837 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
839 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
843 if (setjmp_alias_set
== -1)
844 setjmp_alias_set
= new_alias_set ();
846 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
848 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
850 /* We store the frame pointer and the address of receiver_label in
851 the buffer and use the rest of it for the stack save area, which
852 is machine-dependent. */
854 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
855 set_mem_alias_set (mem
, setjmp_alias_set
);
856 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
858 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
859 GET_MODE_SIZE (Pmode
))),
860 set_mem_alias_set (mem
, setjmp_alias_set
);
862 emit_move_insn (validize_mem (mem
),
863 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
865 stack_save
= gen_rtx_MEM (sa_mode
,
866 plus_constant (Pmode
, buf_addr
,
867 2 * GET_MODE_SIZE (Pmode
)));
868 set_mem_alias_set (stack_save
, setjmp_alias_set
);
869 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
871 /* If there is further processing to do, do it. */
872 #ifdef HAVE_builtin_setjmp_setup
873 if (HAVE_builtin_setjmp_setup
)
874 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
877 /* We have a nonlocal label. */
878 cfun
->has_nonlocal_label
= 1;
881 /* Construct the trailing part of a __builtin_setjmp call. This is
882 also called directly by the SJLJ exception handling code.
883 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
886 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
890 /* Mark the FP as used when we get here, so we have to make sure it's
891 marked as used by this function. */
892 emit_use (hard_frame_pointer_rtx
);
894 /* Mark the static chain as clobbered here so life information
895 doesn't get messed up for it. */
896 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
897 if (chain
&& REG_P (chain
))
898 emit_clobber (chain
);
900 /* Now put in the code to restore the frame pointer, and argument
901 pointer, if needed. */
902 #ifdef HAVE_nonlocal_goto
903 if (! HAVE_nonlocal_goto
)
905 /* First adjust our frame pointer to its actual value. It was
906 previously set to the start of the virtual area corresponding to
907 the stacked variables when we branched here and now needs to be
908 adjusted to the actual hardware fp value.
910 Assignments to virtual registers are converted by
911 instantiate_virtual_regs into the corresponding assignment
912 to the underlying register (fp in this case) that makes
913 the original assignment true.
914 So the following insn will actually be decrementing fp by
915 STARTING_FRAME_OFFSET. */
916 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
918 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
919 if (fixed_regs
[ARG_POINTER_REGNUM
])
921 #ifdef ELIMINABLE_REGS
922 /* If the argument pointer can be eliminated in favor of the
923 frame pointer, we don't need to restore it. We assume here
924 that if such an elimination is present, it can always be used.
925 This is the case on all known machines; if we don't make this
926 assumption, we do unnecessary saving on many machines. */
928 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
930 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
931 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
932 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
935 if (i
== ARRAY_SIZE (elim_regs
))
938 /* Now restore our arg pointer from the address at which it
939 was saved in our stack frame. */
940 emit_move_insn (crtl
->args
.internal_arg_pointer
,
941 copy_to_reg (get_arg_pointer_save_area ()));
946 #ifdef HAVE_builtin_setjmp_receiver
947 if (receiver_label
!= NULL
&& HAVE_builtin_setjmp_receiver
)
948 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
951 #ifdef HAVE_nonlocal_goto_receiver
952 if (HAVE_nonlocal_goto_receiver
)
953 emit_insn (gen_nonlocal_goto_receiver ());
958 /* We must not allow the code we just generated to be reordered by
959 scheduling. Specifically, the update of the frame pointer must
960 happen immediately, not later. Similarly, we must block
961 (frame-related) register values to be used across this code. */
962 emit_insn (gen_blockage ());
965 /* __builtin_longjmp is passed a pointer to an array of five words (not
966 all will be used on all machines). It operates similarly to the C
967 library function of the same name, but is more efficient. Much of
968 the code below is copied from the handling of non-local gotos. */
971 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
973 rtx fp
, lab
, stack
, insn
, last
;
974 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
976 /* DRAP is needed for stack realign if longjmp is expanded to current
978 if (SUPPORTS_STACK_ALIGNMENT
)
979 crtl
->need_drap
= true;
981 if (setjmp_alias_set
== -1)
982 setjmp_alias_set
= new_alias_set ();
984 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
986 buf_addr
= force_reg (Pmode
, buf_addr
);
988 /* We require that the user must pass a second argument of 1, because
989 that is what builtin_setjmp will return. */
990 gcc_assert (value
== const1_rtx
);
992 last
= get_last_insn ();
993 #ifdef HAVE_builtin_longjmp
994 if (HAVE_builtin_longjmp
)
995 emit_insn (gen_builtin_longjmp (buf_addr
));
999 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
1000 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
1001 GET_MODE_SIZE (Pmode
)));
1003 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
1004 2 * GET_MODE_SIZE (Pmode
)));
1005 set_mem_alias_set (fp
, setjmp_alias_set
);
1006 set_mem_alias_set (lab
, setjmp_alias_set
);
1007 set_mem_alias_set (stack
, setjmp_alias_set
);
1009 /* Pick up FP, label, and SP from the block and jump. This code is
1010 from expand_goto in stmt.c; see there for detailed comments. */
1011 #ifdef HAVE_nonlocal_goto
1012 if (HAVE_nonlocal_goto
)
1013 /* We have to pass a value to the nonlocal_goto pattern that will
1014 get copied into the static_chain pointer, but it does not matter
1015 what that value is, because builtin_setjmp does not use it. */
1016 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
1020 lab
= copy_to_reg (lab
);
1022 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1023 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1025 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1026 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1028 emit_use (hard_frame_pointer_rtx
);
1029 emit_use (stack_pointer_rtx
);
1030 emit_indirect_jump (lab
);
1034 /* Search backwards and mark the jump insn as a non-local goto.
1035 Note that this precludes the use of __builtin_longjmp to a
1036 __builtin_setjmp target in the same function. However, we've
1037 already cautioned the user that these functions are for
1038 internal exception handling use only. */
1039 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1041 gcc_assert (insn
!= last
);
1045 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1048 else if (CALL_P (insn
))
1053 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1054 and the address of the save area. */
1057 expand_builtin_nonlocal_goto (tree exp
)
1059 tree t_label
, t_save_area
;
1060 rtx r_label
, r_save_area
, r_fp
, r_sp
, insn
;
1062 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1065 t_label
= CALL_EXPR_ARG (exp
, 0);
1066 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1068 r_label
= expand_normal (t_label
);
1069 r_label
= convert_memory_address (Pmode
, r_label
);
1070 r_save_area
= expand_normal (t_save_area
);
1071 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1072 /* Copy the address of the save location to a register just in case it was
1073 based on the frame pointer. */
1074 r_save_area
= copy_to_reg (r_save_area
);
1075 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1076 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1077 plus_constant (Pmode
, r_save_area
,
1078 GET_MODE_SIZE (Pmode
)));
1080 crtl
->has_nonlocal_goto
= 1;
1082 #ifdef HAVE_nonlocal_goto
1083 /* ??? We no longer need to pass the static chain value, afaik. */
1084 if (HAVE_nonlocal_goto
)
1085 emit_insn (gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1089 r_label
= copy_to_reg (r_label
);
1091 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1092 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1094 /* Restore frame pointer for containing function. */
1095 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1096 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1098 /* USE of hard_frame_pointer_rtx added for consistency;
1099 not clear if really needed. */
1100 emit_use (hard_frame_pointer_rtx
);
1101 emit_use (stack_pointer_rtx
);
1103 /* If the architecture is using a GP register, we must
1104 conservatively assume that the target function makes use of it.
1105 The prologue of functions with nonlocal gotos must therefore
1106 initialize the GP register to the appropriate value, and we
1107 must then make sure that this value is live at the point
1108 of the jump. (Note that this doesn't necessarily apply
1109 to targets with a nonlocal_goto pattern; they are free
1110 to implement it in their own way. Note also that this is
1111 a no-op if the GP register is a global invariant.) */
1112 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1113 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1114 emit_use (pic_offset_table_rtx
);
1116 emit_indirect_jump (r_label
);
1119 /* Search backwards to the jump insn and mark it as a
1121 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1125 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1128 else if (CALL_P (insn
))
1135 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1136 (not all will be used on all machines) that was passed to __builtin_setjmp.
1137 It updates the stack pointer in that block to correspond to the current
1141 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1143 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1145 = gen_rtx_MEM (sa_mode
,
1148 plus_constant (Pmode
, buf_addr
,
1149 2 * GET_MODE_SIZE (Pmode
))));
1151 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1154 /* Expand a call to __builtin_prefetch. For a target that does not support
1155 data prefetch, evaluate the memory address argument in case it has side
1159 expand_builtin_prefetch (tree exp
)
1161 tree arg0
, arg1
, arg2
;
1165 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1168 arg0
= CALL_EXPR_ARG (exp
, 0);
1170 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1171 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1173 nargs
= call_expr_nargs (exp
);
1175 arg1
= CALL_EXPR_ARG (exp
, 1);
1177 arg1
= integer_zero_node
;
1179 arg2
= CALL_EXPR_ARG (exp
, 2);
1181 arg2
= integer_three_node
;
1183 /* Argument 0 is an address. */
1184 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1186 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1187 if (TREE_CODE (arg1
) != INTEGER_CST
)
1189 error ("second argument to %<__builtin_prefetch%> must be a constant");
1190 arg1
= integer_zero_node
;
1192 op1
= expand_normal (arg1
);
1193 /* Argument 1 must be either zero or one. */
1194 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1196 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1201 /* Argument 2 (locality) must be a compile-time constant int. */
1202 if (TREE_CODE (arg2
) != INTEGER_CST
)
1204 error ("third argument to %<__builtin_prefetch%> must be a constant");
1205 arg2
= integer_zero_node
;
1207 op2
= expand_normal (arg2
);
1208 /* Argument 2 must be 0, 1, 2, or 3. */
1209 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1211 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1215 #ifdef HAVE_prefetch
1218 struct expand_operand ops
[3];
1220 create_address_operand (&ops
[0], op0
);
1221 create_integer_operand (&ops
[1], INTVAL (op1
));
1222 create_integer_operand (&ops
[2], INTVAL (op2
));
1223 if (maybe_expand_insn (CODE_FOR_prefetch
, 3, ops
))
1228 /* Don't do anything with direct references to volatile memory, but
1229 generate code to handle other side effects. */
1230 if (!MEM_P (op0
) && side_effects_p (op0
))
1234 /* Get a MEM rtx for expression EXP which is the address of an operand
1235 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1236 the maximum length of the block of memory that might be accessed or
1240 get_memory_rtx (tree exp
, tree len
)
1242 tree orig_exp
= exp
;
1245 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1246 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1247 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1248 exp
= TREE_OPERAND (exp
, 0);
1250 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1251 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1253 /* Get an expression we can use to find the attributes to assign to MEM.
1254 First remove any nops. */
1255 while (CONVERT_EXPR_P (exp
)
1256 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1257 exp
= TREE_OPERAND (exp
, 0);
1259 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1260 (as builtin stringops may alias with anything). */
1261 exp
= fold_build2 (MEM_REF
,
1262 build_array_type (char_type_node
,
1263 build_range_type (sizetype
,
1264 size_one_node
, len
)),
1265 exp
, build_int_cst (ptr_type_node
, 0));
1267 /* If the MEM_REF has no acceptable address, try to get the base object
1268 from the original address we got, and build an all-aliasing
1269 unknown-sized access to that one. */
1270 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1271 set_mem_attributes (mem
, exp
, 0);
1272 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1273 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1276 exp
= build_fold_addr_expr (exp
);
1277 exp
= fold_build2 (MEM_REF
,
1278 build_array_type (char_type_node
,
1279 build_range_type (sizetype
,
1282 exp
, build_int_cst (ptr_type_node
, 0));
1283 set_mem_attributes (mem
, exp
, 0);
1285 set_mem_alias_set (mem
, 0);
1289 /* Built-in functions to perform an untyped call and return. */
1291 #define apply_args_mode \
1292 (this_target_builtins->x_apply_args_mode)
1293 #define apply_result_mode \
1294 (this_target_builtins->x_apply_result_mode)
1296 /* Return the size required for the block returned by __builtin_apply_args,
1297 and initialize apply_args_mode. */
1300 apply_args_size (void)
1302 static int size
= -1;
1305 enum machine_mode mode
;
1307 /* The values computed by this function never change. */
1310 /* The first value is the incoming arg-pointer. */
1311 size
= GET_MODE_SIZE (Pmode
);
1313 /* The second value is the structure value address unless this is
1314 passed as an "invisible" first argument. */
1315 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1316 size
+= GET_MODE_SIZE (Pmode
);
1318 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1319 if (FUNCTION_ARG_REGNO_P (regno
))
1321 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1323 gcc_assert (mode
!= VOIDmode
);
1325 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1326 if (size
% align
!= 0)
1327 size
= CEIL (size
, align
) * align
;
1328 size
+= GET_MODE_SIZE (mode
);
1329 apply_args_mode
[regno
] = mode
;
1333 apply_args_mode
[regno
] = VOIDmode
;
1339 /* Return the size required for the block returned by __builtin_apply,
1340 and initialize apply_result_mode. */
1343 apply_result_size (void)
1345 static int size
= -1;
1347 enum machine_mode mode
;
1349 /* The values computed by this function never change. */
1354 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1355 if (targetm
.calls
.function_value_regno_p (regno
))
1357 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1359 gcc_assert (mode
!= VOIDmode
);
1361 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1362 if (size
% align
!= 0)
1363 size
= CEIL (size
, align
) * align
;
1364 size
+= GET_MODE_SIZE (mode
);
1365 apply_result_mode
[regno
] = mode
;
1368 apply_result_mode
[regno
] = VOIDmode
;
1370 /* Allow targets that use untyped_call and untyped_return to override
1371 the size so that machine-specific information can be stored here. */
1372 #ifdef APPLY_RESULT_SIZE
1373 size
= APPLY_RESULT_SIZE
;
1379 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1380 /* Create a vector describing the result block RESULT. If SAVEP is true,
1381 the result block is used to save the values; otherwise it is used to
1382 restore the values. */
1385 result_vector (int savep
, rtx result
)
1387 int regno
, size
, align
, nelts
;
1388 enum machine_mode mode
;
1390 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1393 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1394 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1396 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1397 if (size
% align
!= 0)
1398 size
= CEIL (size
, align
) * align
;
1399 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1400 mem
= adjust_address (result
, mode
, size
);
1401 savevec
[nelts
++] = (savep
1402 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
1403 : gen_rtx_SET (VOIDmode
, reg
, mem
));
1404 size
+= GET_MODE_SIZE (mode
);
1406 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1408 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1410 /* Save the state required to perform an untyped call with the same
1411 arguments as were passed to the current function. */
1414 expand_builtin_apply_args_1 (void)
1417 int size
, align
, regno
;
1418 enum machine_mode mode
;
1419 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1421 /* Create a block where the arg-pointer, structure value address,
1422 and argument registers can be saved. */
1423 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1425 /* Walk past the arg-pointer and structure value address. */
1426 size
= GET_MODE_SIZE (Pmode
);
1427 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1428 size
+= GET_MODE_SIZE (Pmode
);
1430 /* Save each register used in calling a function to the block. */
1431 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1432 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1434 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1435 if (size
% align
!= 0)
1436 size
= CEIL (size
, align
) * align
;
1438 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1440 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1441 size
+= GET_MODE_SIZE (mode
);
1444 /* Save the arg pointer to the block. */
1445 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1446 #ifdef STACK_GROWS_DOWNWARD
1447 /* We need the pointer as the caller actually passed them to us, not
1448 as we might have pretended they were passed. Make sure it's a valid
1449 operand, as emit_move_insn isn't expected to handle a PLUS. */
1451 = force_operand (plus_constant (Pmode
, tem
, crtl
->args
.pretend_args_size
),
1454 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1456 size
= GET_MODE_SIZE (Pmode
);
1458 /* Save the structure value address unless this is passed as an
1459 "invisible" first argument. */
1460 if (struct_incoming_value
)
1462 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1463 copy_to_reg (struct_incoming_value
));
1464 size
+= GET_MODE_SIZE (Pmode
);
1467 /* Return the address of the block. */
1468 return copy_addr_to_reg (XEXP (registers
, 0));
1471 /* __builtin_apply_args returns block of memory allocated on
1472 the stack into which is stored the arg pointer, structure
1473 value address, static chain, and all the registers that might
1474 possibly be used in performing a function call. The code is
1475 moved to the start of the function so the incoming values are
1479 expand_builtin_apply_args (void)
1481 /* Don't do __builtin_apply_args more than once in a function.
1482 Save the result of the first call and reuse it. */
1483 if (apply_args_value
!= 0)
1484 return apply_args_value
;
1486 /* When this function is called, it means that registers must be
1487 saved on entry to this function. So we migrate the
1488 call to the first insn of this function. */
1493 temp
= expand_builtin_apply_args_1 ();
1497 apply_args_value
= temp
;
1499 /* Put the insns after the NOTE that starts the function.
1500 If this is inside a start_sequence, make the outer-level insn
1501 chain current, so the code is placed at the start of the
1502 function. If internal_arg_pointer is a non-virtual pseudo,
1503 it needs to be placed after the function that initializes
1505 push_topmost_sequence ();
1506 if (REG_P (crtl
->args
.internal_arg_pointer
)
1507 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1508 emit_insn_before (seq
, parm_birth_insn
);
1510 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1511 pop_topmost_sequence ();
1516 /* Perform an untyped call and save the state required to perform an
1517 untyped return of whatever value was returned by the given function. */
1520 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1522 int size
, align
, regno
;
1523 enum machine_mode mode
;
1524 rtx incoming_args
, result
, reg
, dest
, src
, call_insn
;
1525 rtx old_stack_level
= 0;
1526 rtx call_fusage
= 0;
1527 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1529 arguments
= convert_memory_address (Pmode
, arguments
);
1531 /* Create a block where the return registers can be saved. */
1532 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1534 /* Fetch the arg pointer from the ARGUMENTS block. */
1535 incoming_args
= gen_reg_rtx (Pmode
);
1536 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1537 #ifndef STACK_GROWS_DOWNWARD
1538 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1539 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1542 /* Push a new argument block and copy the arguments. Do not allow
1543 the (potential) memcpy call below to interfere with our stack
1545 do_pending_stack_adjust ();
1548 /* Save the stack with nonlocal if available. */
1549 #ifdef HAVE_save_stack_nonlocal
1550 if (HAVE_save_stack_nonlocal
)
1551 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1554 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1556 /* Allocate a block of memory onto the stack and copy the memory
1557 arguments to the outgoing arguments address. We can pass TRUE
1558 as the 4th argument because we just saved the stack pointer
1559 and will restore it right after the call. */
1560 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1562 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1563 may have already set current_function_calls_alloca to true.
1564 current_function_calls_alloca won't be set if argsize is zero,
1565 so we have to guarantee need_drap is true here. */
1566 if (SUPPORTS_STACK_ALIGNMENT
)
1567 crtl
->need_drap
= true;
1569 dest
= virtual_outgoing_args_rtx
;
1570 #ifndef STACK_GROWS_DOWNWARD
1571 if (CONST_INT_P (argsize
))
1572 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1574 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1576 dest
= gen_rtx_MEM (BLKmode
, dest
);
1577 set_mem_align (dest
, PARM_BOUNDARY
);
1578 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1579 set_mem_align (src
, PARM_BOUNDARY
);
1580 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1582 /* Refer to the argument block. */
1584 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1585 set_mem_align (arguments
, PARM_BOUNDARY
);
1587 /* Walk past the arg-pointer and structure value address. */
1588 size
= GET_MODE_SIZE (Pmode
);
1590 size
+= GET_MODE_SIZE (Pmode
);
1592 /* Restore each of the registers previously saved. Make USE insns
1593 for each of these registers for use in making the call. */
1594 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1595 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1597 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1598 if (size
% align
!= 0)
1599 size
= CEIL (size
, align
) * align
;
1600 reg
= gen_rtx_REG (mode
, regno
);
1601 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1602 use_reg (&call_fusage
, reg
);
1603 size
+= GET_MODE_SIZE (mode
);
1606 /* Restore the structure value address unless this is passed as an
1607 "invisible" first argument. */
1608 size
= GET_MODE_SIZE (Pmode
);
1611 rtx value
= gen_reg_rtx (Pmode
);
1612 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1613 emit_move_insn (struct_value
, value
);
1614 if (REG_P (struct_value
))
1615 use_reg (&call_fusage
, struct_value
);
1616 size
+= GET_MODE_SIZE (Pmode
);
1619 /* All arguments and registers used for the call are set up by now! */
1620 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1622 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1623 and we don't want to load it into a register as an optimization,
1624 because prepare_call_address already did it if it should be done. */
1625 if (GET_CODE (function
) != SYMBOL_REF
)
1626 function
= memory_address (FUNCTION_MODE
, function
);
1628 /* Generate the actual call instruction and save the return value. */
1629 #ifdef HAVE_untyped_call
1630 if (HAVE_untyped_call
)
1631 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1632 result
, result_vector (1, result
)));
1635 #ifdef HAVE_call_value
1636 if (HAVE_call_value
)
1640 /* Locate the unique return register. It is not possible to
1641 express a call that sets more than one return register using
1642 call_value; use untyped_call for that. In fact, untyped_call
1643 only needs to save the return registers in the given block. */
1644 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1645 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1647 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1649 valreg
= gen_rtx_REG (mode
, regno
);
1652 emit_call_insn (GEN_CALL_VALUE (valreg
,
1653 gen_rtx_MEM (FUNCTION_MODE
, function
),
1654 const0_rtx
, NULL_RTX
, const0_rtx
));
1656 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1662 /* Find the CALL insn we just emitted, and attach the register usage
1664 call_insn
= last_call_insn ();
1665 add_function_usage_to (call_insn
, call_fusage
);
1667 /* Restore the stack. */
1668 #ifdef HAVE_save_stack_nonlocal
1669 if (HAVE_save_stack_nonlocal
)
1670 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1673 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1674 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1678 /* Return the address of the result block. */
1679 result
= copy_addr_to_reg (XEXP (result
, 0));
1680 return convert_memory_address (ptr_mode
, result
);
1683 /* Perform an untyped return. */
1686 expand_builtin_return (rtx result
)
1688 int size
, align
, regno
;
1689 enum machine_mode mode
;
1691 rtx call_fusage
= 0;
1693 result
= convert_memory_address (Pmode
, result
);
1695 apply_result_size ();
1696 result
= gen_rtx_MEM (BLKmode
, result
);
1698 #ifdef HAVE_untyped_return
1699 if (HAVE_untyped_return
)
1701 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1707 /* Restore the return value and note that each value is used. */
1709 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1710 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1712 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1713 if (size
% align
!= 0)
1714 size
= CEIL (size
, align
) * align
;
1715 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1716 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1718 push_to_sequence (call_fusage
);
1720 call_fusage
= get_insns ();
1722 size
+= GET_MODE_SIZE (mode
);
1725 /* Put the USE insns before the return. */
1726 emit_insn (call_fusage
);
1728 /* Return whatever values was restored by jumping directly to the end
1730 expand_naked_return ();
1733 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1735 static enum type_class
1736 type_to_class (tree type
)
1738 switch (TREE_CODE (type
))
1740 case VOID_TYPE
: return void_type_class
;
1741 case INTEGER_TYPE
: return integer_type_class
;
1742 case ENUMERAL_TYPE
: return enumeral_type_class
;
1743 case BOOLEAN_TYPE
: return boolean_type_class
;
1744 case POINTER_TYPE
: return pointer_type_class
;
1745 case REFERENCE_TYPE
: return reference_type_class
;
1746 case OFFSET_TYPE
: return offset_type_class
;
1747 case REAL_TYPE
: return real_type_class
;
1748 case COMPLEX_TYPE
: return complex_type_class
;
1749 case FUNCTION_TYPE
: return function_type_class
;
1750 case METHOD_TYPE
: return method_type_class
;
1751 case RECORD_TYPE
: return record_type_class
;
1753 case QUAL_UNION_TYPE
: return union_type_class
;
1754 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1755 ? string_type_class
: array_type_class
);
1756 case LANG_TYPE
: return lang_type_class
;
1757 default: return no_type_class
;
1761 /* Expand a call EXP to __builtin_classify_type. */
1764 expand_builtin_classify_type (tree exp
)
1766 if (call_expr_nargs (exp
))
1767 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1768 return GEN_INT (no_type_class
);
1771 /* This helper macro, meant to be used in mathfn_built_in below,
1772 determines which among a set of three builtin math functions is
1773 appropriate for a given type mode. The `F' and `L' cases are
1774 automatically generated from the `double' case. */
1775 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1776 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1777 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1778 fcodel = BUILT_IN_MATHFN##L ; break;
1779 /* Similar to above, but appends _R after any F/L suffix. */
1780 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1781 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1782 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1783 fcodel = BUILT_IN_MATHFN##L_R ; break;
1785 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1786 if available. If IMPLICIT is true use the implicit builtin declaration,
1787 otherwise use the explicit declaration. If we can't do the conversion,
1791 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit_p
)
1793 enum built_in_function fcode
, fcodef
, fcodel
, fcode2
;
1797 CASE_MATHFN (BUILT_IN_ACOS
)
1798 CASE_MATHFN (BUILT_IN_ACOSH
)
1799 CASE_MATHFN (BUILT_IN_ASIN
)
1800 CASE_MATHFN (BUILT_IN_ASINH
)
1801 CASE_MATHFN (BUILT_IN_ATAN
)
1802 CASE_MATHFN (BUILT_IN_ATAN2
)
1803 CASE_MATHFN (BUILT_IN_ATANH
)
1804 CASE_MATHFN (BUILT_IN_CBRT
)
1805 CASE_MATHFN (BUILT_IN_CEIL
)
1806 CASE_MATHFN (BUILT_IN_CEXPI
)
1807 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1808 CASE_MATHFN (BUILT_IN_COS
)
1809 CASE_MATHFN (BUILT_IN_COSH
)
1810 CASE_MATHFN (BUILT_IN_DREM
)
1811 CASE_MATHFN (BUILT_IN_ERF
)
1812 CASE_MATHFN (BUILT_IN_ERFC
)
1813 CASE_MATHFN (BUILT_IN_EXP
)
1814 CASE_MATHFN (BUILT_IN_EXP10
)
1815 CASE_MATHFN (BUILT_IN_EXP2
)
1816 CASE_MATHFN (BUILT_IN_EXPM1
)
1817 CASE_MATHFN (BUILT_IN_FABS
)
1818 CASE_MATHFN (BUILT_IN_FDIM
)
1819 CASE_MATHFN (BUILT_IN_FLOOR
)
1820 CASE_MATHFN (BUILT_IN_FMA
)
1821 CASE_MATHFN (BUILT_IN_FMAX
)
1822 CASE_MATHFN (BUILT_IN_FMIN
)
1823 CASE_MATHFN (BUILT_IN_FMOD
)
1824 CASE_MATHFN (BUILT_IN_FREXP
)
1825 CASE_MATHFN (BUILT_IN_GAMMA
)
1826 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1827 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1828 CASE_MATHFN (BUILT_IN_HYPOT
)
1829 CASE_MATHFN (BUILT_IN_ILOGB
)
1830 CASE_MATHFN (BUILT_IN_ICEIL
)
1831 CASE_MATHFN (BUILT_IN_IFLOOR
)
1832 CASE_MATHFN (BUILT_IN_INF
)
1833 CASE_MATHFN (BUILT_IN_IRINT
)
1834 CASE_MATHFN (BUILT_IN_IROUND
)
1835 CASE_MATHFN (BUILT_IN_ISINF
)
1836 CASE_MATHFN (BUILT_IN_J0
)
1837 CASE_MATHFN (BUILT_IN_J1
)
1838 CASE_MATHFN (BUILT_IN_JN
)
1839 CASE_MATHFN (BUILT_IN_LCEIL
)
1840 CASE_MATHFN (BUILT_IN_LDEXP
)
1841 CASE_MATHFN (BUILT_IN_LFLOOR
)
1842 CASE_MATHFN (BUILT_IN_LGAMMA
)
1843 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1844 CASE_MATHFN (BUILT_IN_LLCEIL
)
1845 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1846 CASE_MATHFN (BUILT_IN_LLRINT
)
1847 CASE_MATHFN (BUILT_IN_LLROUND
)
1848 CASE_MATHFN (BUILT_IN_LOG
)
1849 CASE_MATHFN (BUILT_IN_LOG10
)
1850 CASE_MATHFN (BUILT_IN_LOG1P
)
1851 CASE_MATHFN (BUILT_IN_LOG2
)
1852 CASE_MATHFN (BUILT_IN_LOGB
)
1853 CASE_MATHFN (BUILT_IN_LRINT
)
1854 CASE_MATHFN (BUILT_IN_LROUND
)
1855 CASE_MATHFN (BUILT_IN_MODF
)
1856 CASE_MATHFN (BUILT_IN_NAN
)
1857 CASE_MATHFN (BUILT_IN_NANS
)
1858 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1859 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1860 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1861 CASE_MATHFN (BUILT_IN_POW
)
1862 CASE_MATHFN (BUILT_IN_POWI
)
1863 CASE_MATHFN (BUILT_IN_POW10
)
1864 CASE_MATHFN (BUILT_IN_REMAINDER
)
1865 CASE_MATHFN (BUILT_IN_REMQUO
)
1866 CASE_MATHFN (BUILT_IN_RINT
)
1867 CASE_MATHFN (BUILT_IN_ROUND
)
1868 CASE_MATHFN (BUILT_IN_SCALB
)
1869 CASE_MATHFN (BUILT_IN_SCALBLN
)
1870 CASE_MATHFN (BUILT_IN_SCALBN
)
1871 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1872 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1873 CASE_MATHFN (BUILT_IN_SIN
)
1874 CASE_MATHFN (BUILT_IN_SINCOS
)
1875 CASE_MATHFN (BUILT_IN_SINH
)
1876 CASE_MATHFN (BUILT_IN_SQRT
)
1877 CASE_MATHFN (BUILT_IN_TAN
)
1878 CASE_MATHFN (BUILT_IN_TANH
)
1879 CASE_MATHFN (BUILT_IN_TGAMMA
)
1880 CASE_MATHFN (BUILT_IN_TRUNC
)
1881 CASE_MATHFN (BUILT_IN_Y0
)
1882 CASE_MATHFN (BUILT_IN_Y1
)
1883 CASE_MATHFN (BUILT_IN_YN
)
1889 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1891 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1893 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1898 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1901 return builtin_decl_explicit (fcode2
);
1904 /* Like mathfn_built_in_1(), but always use the implicit array. */
1907 mathfn_built_in (tree type
, enum built_in_function fn
)
1909 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1912 /* If errno must be maintained, expand the RTL to check if the result,
1913 TARGET, of a built-in function call, EXP, is NaN, and if so set
1917 expand_errno_check (tree exp
, rtx target
)
1919 rtx lab
= gen_label_rtx ();
1921 /* Test the result; if it is NaN, set errno=EDOM because
1922 the argument was not in the domain. */
1923 do_compare_rtx_and_jump (target
, target
, EQ
, 0, GET_MODE (target
),
1924 NULL_RTX
, NULL_RTX
, lab
,
1925 /* The jump is very likely. */
1926 REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1));
1929 /* If this built-in doesn't throw an exception, set errno directly. */
1930 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1932 #ifdef GEN_ERRNO_RTX
1933 rtx errno_rtx
= GEN_ERRNO_RTX
;
1936 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1938 emit_move_insn (errno_rtx
,
1939 gen_int_mode (TARGET_EDOM
, GET_MODE (errno_rtx
)));
1945 /* Make sure the library call isn't expanded as a tail call. */
1946 CALL_EXPR_TAILCALL (exp
) = 0;
1948 /* We can't set errno=EDOM directly; let the library call do it.
1949 Pop the arguments right away in case the call gets deleted. */
1951 expand_call (exp
, target
, 0);
1956 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1957 Return NULL_RTX if a normal call should be emitted rather than expanding
1958 the function in-line. EXP is the expression that is a call to the builtin
1959 function; if convenient, the result should be placed in TARGET.
1960 SUBTARGET may be used as the target for computing one of EXP's operands. */
1963 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
1965 optab builtin_optab
;
1967 tree fndecl
= get_callee_fndecl (exp
);
1968 enum machine_mode mode
;
1969 bool errno_set
= false;
1970 bool try_widening
= false;
1973 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
1976 arg
= CALL_EXPR_ARG (exp
, 0);
1978 switch (DECL_FUNCTION_CODE (fndecl
))
1980 CASE_FLT_FN (BUILT_IN_SQRT
):
1981 errno_set
= ! tree_expr_nonnegative_p (arg
);
1982 try_widening
= true;
1983 builtin_optab
= sqrt_optab
;
1985 CASE_FLT_FN (BUILT_IN_EXP
):
1986 errno_set
= true; builtin_optab
= exp_optab
; break;
1987 CASE_FLT_FN (BUILT_IN_EXP10
):
1988 CASE_FLT_FN (BUILT_IN_POW10
):
1989 errno_set
= true; builtin_optab
= exp10_optab
; break;
1990 CASE_FLT_FN (BUILT_IN_EXP2
):
1991 errno_set
= true; builtin_optab
= exp2_optab
; break;
1992 CASE_FLT_FN (BUILT_IN_EXPM1
):
1993 errno_set
= true; builtin_optab
= expm1_optab
; break;
1994 CASE_FLT_FN (BUILT_IN_LOGB
):
1995 errno_set
= true; builtin_optab
= logb_optab
; break;
1996 CASE_FLT_FN (BUILT_IN_LOG
):
1997 errno_set
= true; builtin_optab
= log_optab
; break;
1998 CASE_FLT_FN (BUILT_IN_LOG10
):
1999 errno_set
= true; builtin_optab
= log10_optab
; break;
2000 CASE_FLT_FN (BUILT_IN_LOG2
):
2001 errno_set
= true; builtin_optab
= log2_optab
; break;
2002 CASE_FLT_FN (BUILT_IN_LOG1P
):
2003 errno_set
= true; builtin_optab
= log1p_optab
; break;
2004 CASE_FLT_FN (BUILT_IN_ASIN
):
2005 builtin_optab
= asin_optab
; break;
2006 CASE_FLT_FN (BUILT_IN_ACOS
):
2007 builtin_optab
= acos_optab
; break;
2008 CASE_FLT_FN (BUILT_IN_TAN
):
2009 builtin_optab
= tan_optab
; break;
2010 CASE_FLT_FN (BUILT_IN_ATAN
):
2011 builtin_optab
= atan_optab
; break;
2012 CASE_FLT_FN (BUILT_IN_FLOOR
):
2013 builtin_optab
= floor_optab
; break;
2014 CASE_FLT_FN (BUILT_IN_CEIL
):
2015 builtin_optab
= ceil_optab
; break;
2016 CASE_FLT_FN (BUILT_IN_TRUNC
):
2017 builtin_optab
= btrunc_optab
; break;
2018 CASE_FLT_FN (BUILT_IN_ROUND
):
2019 builtin_optab
= round_optab
; break;
2020 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
2021 builtin_optab
= nearbyint_optab
;
2022 if (flag_trapping_math
)
2024 /* Else fallthrough and expand as rint. */
2025 CASE_FLT_FN (BUILT_IN_RINT
):
2026 builtin_optab
= rint_optab
; break;
2027 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
2028 builtin_optab
= significand_optab
; break;
2033 /* Make a suitable register to place result in. */
2034 mode
= TYPE_MODE (TREE_TYPE (exp
));
2036 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2039 /* Before working hard, check whether the instruction is available, but try
2040 to widen the mode for specific operations. */
2041 if ((optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
2042 || (try_widening
&& !excess_precision_type (TREE_TYPE (exp
))))
2043 && (!errno_set
|| !optimize_insn_for_size_p ()))
2045 rtx result
= gen_reg_rtx (mode
);
2047 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2048 need to expand the argument again. This way, we will not perform
2049 side-effects more the once. */
2050 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2052 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2056 /* Compute into RESULT.
2057 Set RESULT to wherever the result comes back. */
2058 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2063 expand_errno_check (exp
, result
);
2065 /* Output the entire sequence. */
2066 insns
= get_insns ();
2072 /* If we were unable to expand via the builtin, stop the sequence
2073 (without outputting the insns) and call to the library function
2074 with the stabilized argument list. */
2078 return expand_call (exp
, target
, target
== const0_rtx
);
2081 /* Expand a call to the builtin binary math functions (pow and atan2).
2082 Return NULL_RTX if a normal call should be emitted rather than expanding the
2083 function in-line. EXP is the expression that is a call to the builtin
2084 function; if convenient, the result should be placed in TARGET.
2085 SUBTARGET may be used as the target for computing one of EXP's
2089 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2091 optab builtin_optab
;
2092 rtx op0
, op1
, insns
, result
;
2093 int op1_type
= REAL_TYPE
;
2094 tree fndecl
= get_callee_fndecl (exp
);
2096 enum machine_mode mode
;
2097 bool errno_set
= true;
2099 switch (DECL_FUNCTION_CODE (fndecl
))
2101 CASE_FLT_FN (BUILT_IN_SCALBN
):
2102 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2103 CASE_FLT_FN (BUILT_IN_LDEXP
):
2104 op1_type
= INTEGER_TYPE
;
2109 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2112 arg0
= CALL_EXPR_ARG (exp
, 0);
2113 arg1
= CALL_EXPR_ARG (exp
, 1);
2115 switch (DECL_FUNCTION_CODE (fndecl
))
2117 CASE_FLT_FN (BUILT_IN_POW
):
2118 builtin_optab
= pow_optab
; break;
2119 CASE_FLT_FN (BUILT_IN_ATAN2
):
2120 builtin_optab
= atan2_optab
; break;
2121 CASE_FLT_FN (BUILT_IN_SCALB
):
2122 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2124 builtin_optab
= scalb_optab
; break;
2125 CASE_FLT_FN (BUILT_IN_SCALBN
):
2126 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2127 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2129 /* Fall through... */
2130 CASE_FLT_FN (BUILT_IN_LDEXP
):
2131 builtin_optab
= ldexp_optab
; break;
2132 CASE_FLT_FN (BUILT_IN_FMOD
):
2133 builtin_optab
= fmod_optab
; break;
2134 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2135 CASE_FLT_FN (BUILT_IN_DREM
):
2136 builtin_optab
= remainder_optab
; break;
2141 /* Make a suitable register to place result in. */
2142 mode
= TYPE_MODE (TREE_TYPE (exp
));
2144 /* Before working hard, check whether the instruction is available. */
2145 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2148 result
= gen_reg_rtx (mode
);
2150 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2153 if (errno_set
&& optimize_insn_for_size_p ())
2156 /* Always stabilize the argument list. */
2157 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2158 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2160 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2161 op1
= expand_normal (arg1
);
2165 /* Compute into RESULT.
2166 Set RESULT to wherever the result comes back. */
2167 result
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2168 result
, 0, OPTAB_DIRECT
);
2170 /* If we were unable to expand via the builtin, stop the sequence
2171 (without outputting the insns) and call to the library function
2172 with the stabilized argument list. */
2176 return expand_call (exp
, target
, target
== const0_rtx
);
2180 expand_errno_check (exp
, result
);
2182 /* Output the entire sequence. */
2183 insns
= get_insns ();
2190 /* Expand a call to the builtin trinary math functions (fma).
2191 Return NULL_RTX if a normal call should be emitted rather than expanding the
2192 function in-line. EXP is the expression that is a call to the builtin
2193 function; if convenient, the result should be placed in TARGET.
2194 SUBTARGET may be used as the target for computing one of EXP's
2198 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2200 optab builtin_optab
;
2201 rtx op0
, op1
, op2
, insns
, result
;
2202 tree fndecl
= get_callee_fndecl (exp
);
2203 tree arg0
, arg1
, arg2
;
2204 enum machine_mode mode
;
2206 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2209 arg0
= CALL_EXPR_ARG (exp
, 0);
2210 arg1
= CALL_EXPR_ARG (exp
, 1);
2211 arg2
= CALL_EXPR_ARG (exp
, 2);
2213 switch (DECL_FUNCTION_CODE (fndecl
))
2215 CASE_FLT_FN (BUILT_IN_FMA
):
2216 builtin_optab
= fma_optab
; break;
2221 /* Make a suitable register to place result in. */
2222 mode
= TYPE_MODE (TREE_TYPE (exp
));
2224 /* Before working hard, check whether the instruction is available. */
2225 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2228 result
= gen_reg_rtx (mode
);
2230 /* Always stabilize the argument list. */
2231 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2232 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2233 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2235 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2236 op1
= expand_normal (arg1
);
2237 op2
= expand_normal (arg2
);
2241 /* Compute into RESULT.
2242 Set RESULT to wherever the result comes back. */
2243 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2246 /* If we were unable to expand via the builtin, stop the sequence
2247 (without outputting the insns) and call to the library function
2248 with the stabilized argument list. */
2252 return expand_call (exp
, target
, target
== const0_rtx
);
2255 /* Output the entire sequence. */
2256 insns
= get_insns ();
2263 /* Expand a call to the builtin sin and cos math functions.
2264 Return NULL_RTX if a normal call should be emitted rather than expanding the
2265 function in-line. EXP is the expression that is a call to the builtin
2266 function; if convenient, the result should be placed in TARGET.
2267 SUBTARGET may be used as the target for computing one of EXP's
2271 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2273 optab builtin_optab
;
2275 tree fndecl
= get_callee_fndecl (exp
);
2276 enum machine_mode mode
;
2279 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2282 arg
= CALL_EXPR_ARG (exp
, 0);
2284 switch (DECL_FUNCTION_CODE (fndecl
))
2286 CASE_FLT_FN (BUILT_IN_SIN
):
2287 CASE_FLT_FN (BUILT_IN_COS
):
2288 builtin_optab
= sincos_optab
; break;
2293 /* Make a suitable register to place result in. */
2294 mode
= TYPE_MODE (TREE_TYPE (exp
));
2296 /* Check if sincos insn is available, otherwise fallback
2297 to sin or cos insn. */
2298 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2299 switch (DECL_FUNCTION_CODE (fndecl
))
2301 CASE_FLT_FN (BUILT_IN_SIN
):
2302 builtin_optab
= sin_optab
; break;
2303 CASE_FLT_FN (BUILT_IN_COS
):
2304 builtin_optab
= cos_optab
; break;
2309 /* Before working hard, check whether the instruction is available. */
2310 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2312 rtx result
= gen_reg_rtx (mode
);
2314 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2315 need to expand the argument again. This way, we will not perform
2316 side-effects more the once. */
2317 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2319 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2323 /* Compute into RESULT.
2324 Set RESULT to wherever the result comes back. */
2325 if (builtin_optab
== sincos_optab
)
2329 switch (DECL_FUNCTION_CODE (fndecl
))
2331 CASE_FLT_FN (BUILT_IN_SIN
):
2332 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2334 CASE_FLT_FN (BUILT_IN_COS
):
2335 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2343 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2347 /* Output the entire sequence. */
2348 insns
= get_insns ();
2354 /* If we were unable to expand via the builtin, stop the sequence
2355 (without outputting the insns) and call to the library function
2356 with the stabilized argument list. */
2360 return expand_call (exp
, target
, target
== const0_rtx
);
2363 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2364 return an RTL instruction code that implements the functionality.
2365 If that isn't possible or available return CODE_FOR_nothing. */
2367 static enum insn_code
2368 interclass_mathfn_icode (tree arg
, tree fndecl
)
2370 bool errno_set
= false;
2371 optab builtin_optab
= unknown_optab
;
2372 enum machine_mode mode
;
2374 switch (DECL_FUNCTION_CODE (fndecl
))
2376 CASE_FLT_FN (BUILT_IN_ILOGB
):
2377 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2378 CASE_FLT_FN (BUILT_IN_ISINF
):
2379 builtin_optab
= isinf_optab
; break;
2380 case BUILT_IN_ISNORMAL
:
2381 case BUILT_IN_ISFINITE
:
2382 CASE_FLT_FN (BUILT_IN_FINITE
):
2383 case BUILT_IN_FINITED32
:
2384 case BUILT_IN_FINITED64
:
2385 case BUILT_IN_FINITED128
:
2386 case BUILT_IN_ISINFD32
:
2387 case BUILT_IN_ISINFD64
:
2388 case BUILT_IN_ISINFD128
:
2389 /* These builtins have no optabs (yet). */
2395 /* There's no easy way to detect the case we need to set EDOM. */
2396 if (flag_errno_math
&& errno_set
)
2397 return CODE_FOR_nothing
;
2399 /* Optab mode depends on the mode of the input argument. */
2400 mode
= TYPE_MODE (TREE_TYPE (arg
));
2403 return optab_handler (builtin_optab
, mode
);
2404 return CODE_FOR_nothing
;
2407 /* Expand a call to one of the builtin math functions that operate on
2408 floating point argument and output an integer result (ilogb, isinf,
2410 Return 0 if a normal call should be emitted rather than expanding the
2411 function in-line. EXP is the expression that is a call to the builtin
2412 function; if convenient, the result should be placed in TARGET. */
2415 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2417 enum insn_code icode
= CODE_FOR_nothing
;
2419 tree fndecl
= get_callee_fndecl (exp
);
2420 enum machine_mode mode
;
2423 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2426 arg
= CALL_EXPR_ARG (exp
, 0);
2427 icode
= interclass_mathfn_icode (arg
, fndecl
);
2428 mode
= TYPE_MODE (TREE_TYPE (arg
));
2430 if (icode
!= CODE_FOR_nothing
)
2432 struct expand_operand ops
[1];
2433 rtx last
= get_last_insn ();
2434 tree orig_arg
= arg
;
2436 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2437 need to expand the argument again. This way, we will not perform
2438 side-effects more the once. */
2439 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2441 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2443 if (mode
!= GET_MODE (op0
))
2444 op0
= convert_to_mode (mode
, op0
, 0);
2446 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2447 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2448 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2449 return ops
[0].value
;
2451 delete_insns_since (last
);
2452 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2458 /* Expand a call to the builtin sincos math function.
2459 Return NULL_RTX if a normal call should be emitted rather than expanding the
2460 function in-line. EXP is the expression that is a call to the builtin
2464 expand_builtin_sincos (tree exp
)
2466 rtx op0
, op1
, op2
, target1
, target2
;
2467 enum machine_mode mode
;
2468 tree arg
, sinp
, cosp
;
2470 location_t loc
= EXPR_LOCATION (exp
);
2471 tree alias_type
, alias_off
;
2473 if (!validate_arglist (exp
, REAL_TYPE
,
2474 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2477 arg
= CALL_EXPR_ARG (exp
, 0);
2478 sinp
= CALL_EXPR_ARG (exp
, 1);
2479 cosp
= CALL_EXPR_ARG (exp
, 2);
2481 /* Make a suitable register to place result in. */
2482 mode
= TYPE_MODE (TREE_TYPE (arg
));
2484 /* Check if sincos insn is available, otherwise emit the call. */
2485 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2488 target1
= gen_reg_rtx (mode
);
2489 target2
= gen_reg_rtx (mode
);
2491 op0
= expand_normal (arg
);
2492 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2493 alias_off
= build_int_cst (alias_type
, 0);
2494 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2496 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2499 /* Compute into target1 and target2.
2500 Set TARGET to wherever the result comes back. */
2501 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2502 gcc_assert (result
);
2504 /* Move target1 and target2 to the memory locations indicated
2506 emit_move_insn (op1
, target1
);
2507 emit_move_insn (op2
, target2
);
2512 /* Expand a call to the internal cexpi builtin to the sincos math function.
2513 EXP is the expression that is a call to the builtin function; if convenient,
2514 the result should be placed in TARGET. */
2517 expand_builtin_cexpi (tree exp
, rtx target
)
2519 tree fndecl
= get_callee_fndecl (exp
);
2521 enum machine_mode mode
;
2523 location_t loc
= EXPR_LOCATION (exp
);
2525 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2528 arg
= CALL_EXPR_ARG (exp
, 0);
2529 type
= TREE_TYPE (arg
);
2530 mode
= TYPE_MODE (TREE_TYPE (arg
));
2532 /* Try expanding via a sincos optab, fall back to emitting a libcall
2533 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2534 is only generated from sincos, cexp or if we have either of them. */
2535 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2537 op1
= gen_reg_rtx (mode
);
2538 op2
= gen_reg_rtx (mode
);
2540 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2542 /* Compute into op1 and op2. */
2543 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2545 else if (targetm
.libc_has_function (function_sincos
))
2547 tree call
, fn
= NULL_TREE
;
2551 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2552 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2553 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2554 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2555 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2556 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2560 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2561 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2562 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2563 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2564 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2565 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2567 /* Make sure not to fold the sincos call again. */
2568 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2569 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2570 call
, 3, arg
, top1
, top2
));
2574 tree call
, fn
= NULL_TREE
, narg
;
2575 tree ctype
= build_complex_type (type
);
2577 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2578 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2579 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2580 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2581 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2582 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2586 /* If we don't have a decl for cexp create one. This is the
2587 friendliest fallback if the user calls __builtin_cexpi
2588 without full target C99 function support. */
2589 if (fn
== NULL_TREE
)
2592 const char *name
= NULL
;
2594 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2596 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2598 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2601 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2602 fn
= build_fn_decl (name
, fntype
);
2605 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2606 build_real (type
, dconst0
), arg
);
2608 /* Make sure not to fold the cexp call again. */
2609 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2610 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2611 target
, VOIDmode
, EXPAND_NORMAL
);
2614 /* Now build the proper return type. */
2615 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2616 make_tree (TREE_TYPE (arg
), op2
),
2617 make_tree (TREE_TYPE (arg
), op1
)),
2618 target
, VOIDmode
, EXPAND_NORMAL
);
2621 /* Conveniently construct a function call expression. FNDECL names the
2622 function to be called, N is the number of arguments, and the "..."
2623 parameters are the argument expressions. Unlike build_call_exr
2624 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2627 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2630 tree fntype
= TREE_TYPE (fndecl
);
2631 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2634 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2636 SET_EXPR_LOCATION (fn
, loc
);
2640 /* Expand a call to one of the builtin rounding functions gcc defines
2641 as an extension (lfloor and lceil). As these are gcc extensions we
2642 do not need to worry about setting errno to EDOM.
2643 If expanding via optab fails, lower expression to (int)(floor(x)).
2644 EXP is the expression that is a call to the builtin function;
2645 if convenient, the result should be placed in TARGET. */
2648 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2650 convert_optab builtin_optab
;
2651 rtx op0
, insns
, tmp
;
2652 tree fndecl
= get_callee_fndecl (exp
);
2653 enum built_in_function fallback_fn
;
2654 tree fallback_fndecl
;
2655 enum machine_mode mode
;
2658 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2661 arg
= CALL_EXPR_ARG (exp
, 0);
2663 switch (DECL_FUNCTION_CODE (fndecl
))
2665 CASE_FLT_FN (BUILT_IN_ICEIL
):
2666 CASE_FLT_FN (BUILT_IN_LCEIL
):
2667 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2668 builtin_optab
= lceil_optab
;
2669 fallback_fn
= BUILT_IN_CEIL
;
2672 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2673 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2674 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2675 builtin_optab
= lfloor_optab
;
2676 fallback_fn
= BUILT_IN_FLOOR
;
2683 /* Make a suitable register to place result in. */
2684 mode
= TYPE_MODE (TREE_TYPE (exp
));
2686 target
= gen_reg_rtx (mode
);
2688 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2689 need to expand the argument again. This way, we will not perform
2690 side-effects more the once. */
2691 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2693 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2697 /* Compute into TARGET. */
2698 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2700 /* Output the entire sequence. */
2701 insns
= get_insns ();
2707 /* If we were unable to expand via the builtin, stop the sequence
2708 (without outputting the insns). */
2711 /* Fall back to floating point rounding optab. */
2712 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2714 /* For non-C99 targets we may end up without a fallback fndecl here
2715 if the user called __builtin_lfloor directly. In this case emit
2716 a call to the floor/ceil variants nevertheless. This should result
2717 in the best user experience for not full C99 targets. */
2718 if (fallback_fndecl
== NULL_TREE
)
2721 const char *name
= NULL
;
2723 switch (DECL_FUNCTION_CODE (fndecl
))
2725 case BUILT_IN_ICEIL
:
2726 case BUILT_IN_LCEIL
:
2727 case BUILT_IN_LLCEIL
:
2730 case BUILT_IN_ICEILF
:
2731 case BUILT_IN_LCEILF
:
2732 case BUILT_IN_LLCEILF
:
2735 case BUILT_IN_ICEILL
:
2736 case BUILT_IN_LCEILL
:
2737 case BUILT_IN_LLCEILL
:
2740 case BUILT_IN_IFLOOR
:
2741 case BUILT_IN_LFLOOR
:
2742 case BUILT_IN_LLFLOOR
:
2745 case BUILT_IN_IFLOORF
:
2746 case BUILT_IN_LFLOORF
:
2747 case BUILT_IN_LLFLOORF
:
2750 case BUILT_IN_IFLOORL
:
2751 case BUILT_IN_LFLOORL
:
2752 case BUILT_IN_LLFLOORL
:
2759 fntype
= build_function_type_list (TREE_TYPE (arg
),
2760 TREE_TYPE (arg
), NULL_TREE
);
2761 fallback_fndecl
= build_fn_decl (name
, fntype
);
2764 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2766 tmp
= expand_normal (exp
);
2767 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2769 /* Truncate the result of floating point optab to integer
2770 via expand_fix (). */
2771 target
= gen_reg_rtx (mode
);
2772 expand_fix (target
, tmp
, 0);
2777 /* Expand a call to one of the builtin math functions doing integer
2779 Return 0 if a normal call should be emitted rather than expanding the
2780 function in-line. EXP is the expression that is a call to the builtin
2781 function; if convenient, the result should be placed in TARGET. */
2784 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2786 convert_optab builtin_optab
;
2788 tree fndecl
= get_callee_fndecl (exp
);
2790 enum machine_mode mode
;
2791 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2793 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2796 arg
= CALL_EXPR_ARG (exp
, 0);
2798 switch (DECL_FUNCTION_CODE (fndecl
))
2800 CASE_FLT_FN (BUILT_IN_IRINT
):
2801 fallback_fn
= BUILT_IN_LRINT
;
2803 CASE_FLT_FN (BUILT_IN_LRINT
):
2804 CASE_FLT_FN (BUILT_IN_LLRINT
):
2805 builtin_optab
= lrint_optab
;
2808 CASE_FLT_FN (BUILT_IN_IROUND
):
2809 fallback_fn
= BUILT_IN_LROUND
;
2811 CASE_FLT_FN (BUILT_IN_LROUND
):
2812 CASE_FLT_FN (BUILT_IN_LLROUND
):
2813 builtin_optab
= lround_optab
;
2820 /* There's no easy way to detect the case we need to set EDOM. */
2821 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2824 /* Make a suitable register to place result in. */
2825 mode
= TYPE_MODE (TREE_TYPE (exp
));
2827 /* There's no easy way to detect the case we need to set EDOM. */
2828 if (!flag_errno_math
)
2830 rtx result
= gen_reg_rtx (mode
);
2832 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2833 need to expand the argument again. This way, we will not perform
2834 side-effects more the once. */
2835 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2837 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2841 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2843 /* Output the entire sequence. */
2844 insns
= get_insns ();
2850 /* If we were unable to expand via the builtin, stop the sequence
2851 (without outputting the insns) and call to the library function
2852 with the stabilized argument list. */
2856 if (fallback_fn
!= BUILT_IN_NONE
)
2858 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2859 targets, (int) round (x) should never be transformed into
2860 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2861 a call to lround in the hope that the target provides at least some
2862 C99 functions. This should result in the best user experience for
2863 not full C99 targets. */
2864 tree fallback_fndecl
= mathfn_built_in_1 (TREE_TYPE (arg
),
2867 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2868 fallback_fndecl
, 1, arg
);
2870 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2871 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2872 return convert_to_mode (mode
, target
, 0);
2875 return expand_call (exp
, target
, target
== const0_rtx
);
2878 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2879 a normal call should be emitted rather than expanding the function
2880 in-line. EXP is the expression that is a call to the builtin
2881 function; if convenient, the result should be placed in TARGET. */
2884 expand_builtin_powi (tree exp
, rtx target
)
2888 enum machine_mode mode
;
2889 enum machine_mode mode2
;
2891 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2894 arg0
= CALL_EXPR_ARG (exp
, 0);
2895 arg1
= CALL_EXPR_ARG (exp
, 1);
2896 mode
= TYPE_MODE (TREE_TYPE (exp
));
2898 /* Emit a libcall to libgcc. */
2900 /* Mode of the 2nd argument must match that of an int. */
2901 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2903 if (target
== NULL_RTX
)
2904 target
= gen_reg_rtx (mode
);
2906 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2907 if (GET_MODE (op0
) != mode
)
2908 op0
= convert_to_mode (mode
, op0
, 0);
2909 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2910 if (GET_MODE (op1
) != mode2
)
2911 op1
= convert_to_mode (mode2
, op1
, 0);
2913 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2914 target
, LCT_CONST
, mode
, 2,
2915 op0
, mode
, op1
, mode2
);
2920 /* Expand expression EXP which is a call to the strlen builtin. Return
2921 NULL_RTX if we failed the caller should emit a normal call, otherwise
2922 try to get the result in TARGET, if convenient. */
2925 expand_builtin_strlen (tree exp
, rtx target
,
2926 enum machine_mode target_mode
)
2928 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2932 struct expand_operand ops
[4];
2935 tree src
= CALL_EXPR_ARG (exp
, 0);
2936 rtx src_reg
, before_strlen
;
2937 enum machine_mode insn_mode
= target_mode
;
2938 enum insn_code icode
= CODE_FOR_nothing
;
2941 /* If the length can be computed at compile-time, return it. */
2942 len
= c_strlen (src
, 0);
2944 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2946 /* If the length can be computed at compile-time and is constant
2947 integer, but there are side-effects in src, evaluate
2948 src for side-effects, then return len.
2949 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2950 can be optimized into: i++; x = 3; */
2951 len
= c_strlen (src
, 1);
2952 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
2954 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2955 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2958 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
2960 /* If SRC is not a pointer type, don't do this operation inline. */
2964 /* Bail out if we can't compute strlen in the right mode. */
2965 while (insn_mode
!= VOIDmode
)
2967 icode
= optab_handler (strlen_optab
, insn_mode
);
2968 if (icode
!= CODE_FOR_nothing
)
2971 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
2973 if (insn_mode
== VOIDmode
)
2976 /* Make a place to hold the source address. We will not expand
2977 the actual source until we are sure that the expansion will
2978 not fail -- there are trees that cannot be expanded twice. */
2979 src_reg
= gen_reg_rtx (Pmode
);
2981 /* Mark the beginning of the strlen sequence so we can emit the
2982 source operand later. */
2983 before_strlen
= get_last_insn ();
2985 create_output_operand (&ops
[0], target
, insn_mode
);
2986 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
2987 create_integer_operand (&ops
[2], 0);
2988 create_integer_operand (&ops
[3], align
);
2989 if (!maybe_expand_insn (icode
, 4, ops
))
2992 /* Now that we are assured of success, expand the source. */
2994 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
2997 #ifdef POINTERS_EXTEND_UNSIGNED
2998 if (GET_MODE (pat
) != Pmode
)
2999 pat
= convert_to_mode (Pmode
, pat
,
3000 POINTERS_EXTEND_UNSIGNED
);
3002 emit_move_insn (src_reg
, pat
);
3008 emit_insn_after (pat
, before_strlen
);
3010 emit_insn_before (pat
, get_insns ());
3012 /* Return the value in the proper mode for this function. */
3013 if (GET_MODE (ops
[0].value
) == target_mode
)
3014 target
= ops
[0].value
;
3015 else if (target
!= 0)
3016 convert_move (target
, ops
[0].value
, 0);
3018 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3024 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3025 bytes from constant string DATA + OFFSET and return it as target
3029 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3030 enum machine_mode mode
)
3032 const char *str
= (const char *) data
;
3034 gcc_assert (offset
>= 0
3035 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3036 <= strlen (str
) + 1));
3038 return c_readstr (str
+ offset
, mode
);
3041 /* Expand a call EXP to the memcpy builtin.
3042 Return NULL_RTX if we failed, the caller should emit a normal call,
3043 otherwise try to get the result in TARGET, if convenient (and in
3044 mode MODE if that's convenient). */
3047 expand_builtin_memcpy (tree exp
, rtx target
)
3049 if (!validate_arglist (exp
,
3050 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3054 tree dest
= CALL_EXPR_ARG (exp
, 0);
3055 tree src
= CALL_EXPR_ARG (exp
, 1);
3056 tree len
= CALL_EXPR_ARG (exp
, 2);
3057 const char *src_str
;
3058 unsigned int src_align
= get_pointer_alignment (src
);
3059 unsigned int dest_align
= get_pointer_alignment (dest
);
3060 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3061 HOST_WIDE_INT expected_size
= -1;
3062 unsigned int expected_align
= 0;
3064 /* If DEST is not a pointer type, call the normal function. */
3065 if (dest_align
== 0)
3068 /* If either SRC is not a pointer type, don't do this
3069 operation in-line. */
3073 if (currently_expanding_gimple_stmt
)
3074 stringop_block_profile (currently_expanding_gimple_stmt
,
3075 &expected_align
, &expected_size
);
3077 if (expected_align
< dest_align
)
3078 expected_align
= dest_align
;
3079 dest_mem
= get_memory_rtx (dest
, len
);
3080 set_mem_align (dest_mem
, dest_align
);
3081 len_rtx
= expand_normal (len
);
3082 src_str
= c_getstr (src
);
3084 /* If SRC is a string constant and block move would be done
3085 by pieces, we can avoid loading the string from memory
3086 and only stored the computed constants. */
3088 && CONST_INT_P (len_rtx
)
3089 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3090 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3091 CONST_CAST (char *, src_str
),
3094 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3095 builtin_memcpy_read_str
,
3096 CONST_CAST (char *, src_str
),
3097 dest_align
, false, 0);
3098 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3099 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3103 src_mem
= get_memory_rtx (src
, len
);
3104 set_mem_align (src_mem
, src_align
);
3106 /* Copy word part most expediently. */
3107 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3108 CALL_EXPR_TAILCALL (exp
)
3109 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3110 expected_align
, expected_size
);
3114 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3115 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3121 /* Expand a call EXP to the mempcpy builtin.
3122 Return NULL_RTX if we failed; the caller should emit a normal call,
3123 otherwise try to get the result in TARGET, if convenient (and in
3124 mode MODE if that's convenient). If ENDP is 0 return the
3125 destination pointer, if ENDP is 1 return the end pointer ala
3126 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3130 expand_builtin_mempcpy (tree exp
, rtx target
, enum machine_mode mode
)
3132 if (!validate_arglist (exp
,
3133 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3137 tree dest
= CALL_EXPR_ARG (exp
, 0);
3138 tree src
= CALL_EXPR_ARG (exp
, 1);
3139 tree len
= CALL_EXPR_ARG (exp
, 2);
3140 return expand_builtin_mempcpy_args (dest
, src
, len
,
3141 target
, mode
, /*endp=*/ 1);
3145 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3146 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3147 so that this can also be called without constructing an actual CALL_EXPR.
3148 The other arguments and return value are the same as for
3149 expand_builtin_mempcpy. */
3152 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3153 rtx target
, enum machine_mode mode
, int endp
)
3155 /* If return value is ignored, transform mempcpy into memcpy. */
3156 if (target
== const0_rtx
&& builtin_decl_implicit_p (BUILT_IN_MEMCPY
))
3158 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3159 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3161 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3165 const char *src_str
;
3166 unsigned int src_align
= get_pointer_alignment (src
);
3167 unsigned int dest_align
= get_pointer_alignment (dest
);
3168 rtx dest_mem
, src_mem
, len_rtx
;
3170 /* If either SRC or DEST is not a pointer type, don't do this
3171 operation in-line. */
3172 if (dest_align
== 0 || src_align
== 0)
3175 /* If LEN is not constant, call the normal function. */
3176 if (! host_integerp (len
, 1))
3179 len_rtx
= expand_normal (len
);
3180 src_str
= c_getstr (src
);
3182 /* If SRC is a string constant and block move would be done
3183 by pieces, we can avoid loading the string from memory
3184 and only stored the computed constants. */
3186 && CONST_INT_P (len_rtx
)
3187 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3188 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3189 CONST_CAST (char *, src_str
),
3192 dest_mem
= get_memory_rtx (dest
, len
);
3193 set_mem_align (dest_mem
, dest_align
);
3194 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3195 builtin_memcpy_read_str
,
3196 CONST_CAST (char *, src_str
),
3197 dest_align
, false, endp
);
3198 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3199 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3203 if (CONST_INT_P (len_rtx
)
3204 && can_move_by_pieces (INTVAL (len_rtx
),
3205 MIN (dest_align
, src_align
)))
3207 dest_mem
= get_memory_rtx (dest
, len
);
3208 set_mem_align (dest_mem
, dest_align
);
3209 src_mem
= get_memory_rtx (src
, len
);
3210 set_mem_align (src_mem
, src_align
);
3211 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3212 MIN (dest_align
, src_align
), endp
);
3213 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3214 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3223 # define HAVE_movstr 0
3224 # define CODE_FOR_movstr CODE_FOR_nothing
3227 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3228 we failed, the caller should emit a normal call, otherwise try to
3229 get the result in TARGET, if convenient. If ENDP is 0 return the
3230 destination pointer, if ENDP is 1 return the end pointer ala
3231 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3235 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3237 struct expand_operand ops
[3];
3244 dest_mem
= get_memory_rtx (dest
, NULL
);
3245 src_mem
= get_memory_rtx (src
, NULL
);
3248 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3249 dest_mem
= replace_equiv_address (dest_mem
, target
);
3252 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3253 create_fixed_operand (&ops
[1], dest_mem
);
3254 create_fixed_operand (&ops
[2], src_mem
);
3255 expand_insn (CODE_FOR_movstr
, 3, ops
);
3257 if (endp
&& target
!= const0_rtx
)
3259 target
= ops
[0].value
;
3260 /* movstr is supposed to set end to the address of the NUL
3261 terminator. If the caller requested a mempcpy-like return value,
3265 rtx tem
= plus_constant (GET_MODE (target
),
3266 gen_lowpart (GET_MODE (target
), target
), 1);
3267 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3273 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3274 NULL_RTX if we failed the caller should emit a normal call, otherwise
3275 try to get the result in TARGET, if convenient (and in mode MODE if that's
3279 expand_builtin_strcpy (tree exp
, rtx target
)
3281 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3283 tree dest
= CALL_EXPR_ARG (exp
, 0);
3284 tree src
= CALL_EXPR_ARG (exp
, 1);
3285 return expand_builtin_strcpy_args (dest
, src
, target
);
3290 /* Helper function to do the actual work for expand_builtin_strcpy. The
3291 arguments to the builtin_strcpy call DEST and SRC are broken out
3292 so that this can also be called without constructing an actual CALL_EXPR.
3293 The other arguments and return value are the same as for
3294 expand_builtin_strcpy. */
3297 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3299 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3302 /* Expand a call EXP to the stpcpy builtin.
3303 Return NULL_RTX if we failed the caller should emit a normal call,
3304 otherwise try to get the result in TARGET, if convenient (and in
3305 mode MODE if that's convenient). */
3308 expand_builtin_stpcpy (tree exp
, rtx target
, enum machine_mode mode
)
3311 location_t loc
= EXPR_LOCATION (exp
);
3313 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3316 dst
= CALL_EXPR_ARG (exp
, 0);
3317 src
= CALL_EXPR_ARG (exp
, 1);
3319 /* If return value is ignored, transform stpcpy into strcpy. */
3320 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3322 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3323 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3324 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3331 /* Ensure we get an actual string whose length can be evaluated at
3332 compile-time, not an expression containing a string. This is
3333 because the latter will potentially produce pessimized code
3334 when used to produce the return value. */
3335 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3336 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3338 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3339 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3340 target
, mode
, /*endp=*/2);
3345 if (TREE_CODE (len
) == INTEGER_CST
)
3347 rtx len_rtx
= expand_normal (len
);
3349 if (CONST_INT_P (len_rtx
))
3351 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3357 if (mode
!= VOIDmode
)
3358 target
= gen_reg_rtx (mode
);
3360 target
= gen_reg_rtx (GET_MODE (ret
));
3362 if (GET_MODE (target
) != GET_MODE (ret
))
3363 ret
= gen_lowpart (GET_MODE (target
), ret
);
3365 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3366 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3374 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3378 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3379 bytes from constant string DATA + OFFSET and return it as target
3383 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3384 enum machine_mode mode
)
3386 const char *str
= (const char *) data
;
3388 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3391 return c_readstr (str
+ offset
, mode
);
3394 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3395 NULL_RTX if we failed the caller should emit a normal call. */
3398 expand_builtin_strncpy (tree exp
, rtx target
)
3400 location_t loc
= EXPR_LOCATION (exp
);
3402 if (validate_arglist (exp
,
3403 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3405 tree dest
= CALL_EXPR_ARG (exp
, 0);
3406 tree src
= CALL_EXPR_ARG (exp
, 1);
3407 tree len
= CALL_EXPR_ARG (exp
, 2);
3408 tree slen
= c_strlen (src
, 1);
3410 /* We must be passed a constant len and src parameter. */
3411 if (!host_integerp (len
, 1) || !slen
|| !host_integerp (slen
, 1))
3414 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3416 /* We're required to pad with trailing zeros if the requested
3417 len is greater than strlen(s2)+1. In that case try to
3418 use store_by_pieces, if it fails, punt. */
3419 if (tree_int_cst_lt (slen
, len
))
3421 unsigned int dest_align
= get_pointer_alignment (dest
);
3422 const char *p
= c_getstr (src
);
3425 if (!p
|| dest_align
== 0 || !host_integerp (len
, 1)
3426 || !can_store_by_pieces (tree_low_cst (len
, 1),
3427 builtin_strncpy_read_str
,
3428 CONST_CAST (char *, p
),
3432 dest_mem
= get_memory_rtx (dest
, len
);
3433 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3434 builtin_strncpy_read_str
,
3435 CONST_CAST (char *, p
), dest_align
, false, 0);
3436 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3437 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3444 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3445 bytes from constant string DATA + OFFSET and return it as target
3449 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3450 enum machine_mode mode
)
3452 const char *c
= (const char *) data
;
3453 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3455 memset (p
, *c
, GET_MODE_SIZE (mode
));
3457 return c_readstr (p
, mode
);
3460 /* Callback routine for store_by_pieces. Return the RTL of a register
3461 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3462 char value given in the RTL register data. For example, if mode is
3463 4 bytes wide, return the RTL for 0x01010101*data. */
3466 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3467 enum machine_mode mode
)
3473 size
= GET_MODE_SIZE (mode
);
3477 p
= XALLOCAVEC (char, size
);
3478 memset (p
, 1, size
);
3479 coeff
= c_readstr (p
, mode
);
3481 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3482 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3483 return force_reg (mode
, target
);
3486 /* Expand expression EXP, which is a call to the memset builtin. Return
3487 NULL_RTX if we failed the caller should emit a normal call, otherwise
3488 try to get the result in TARGET, if convenient (and in mode MODE if that's
3492 expand_builtin_memset (tree exp
, rtx target
, enum machine_mode mode
)
3494 if (!validate_arglist (exp
,
3495 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3499 tree dest
= CALL_EXPR_ARG (exp
, 0);
3500 tree val
= CALL_EXPR_ARG (exp
, 1);
3501 tree len
= CALL_EXPR_ARG (exp
, 2);
3502 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3506 /* Helper function to do the actual work for expand_builtin_memset. The
3507 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3508 so that this can also be called without constructing an actual CALL_EXPR.
3509 The other arguments and return value are the same as for
3510 expand_builtin_memset. */
3513 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3514 rtx target
, enum machine_mode mode
, tree orig_exp
)
3517 enum built_in_function fcode
;
3518 enum machine_mode val_mode
;
3520 unsigned int dest_align
;
3521 rtx dest_mem
, dest_addr
, len_rtx
;
3522 HOST_WIDE_INT expected_size
= -1;
3523 unsigned int expected_align
= 0;
3525 dest_align
= get_pointer_alignment (dest
);
3527 /* If DEST is not a pointer type, don't do this operation in-line. */
3528 if (dest_align
== 0)
3531 if (currently_expanding_gimple_stmt
)
3532 stringop_block_profile (currently_expanding_gimple_stmt
,
3533 &expected_align
, &expected_size
);
3535 if (expected_align
< dest_align
)
3536 expected_align
= dest_align
;
3538 /* If the LEN parameter is zero, return DEST. */
3539 if (integer_zerop (len
))
3541 /* Evaluate and ignore VAL in case it has side-effects. */
3542 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3543 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3546 /* Stabilize the arguments in case we fail. */
3547 dest
= builtin_save_expr (dest
);
3548 val
= builtin_save_expr (val
);
3549 len
= builtin_save_expr (len
);
3551 len_rtx
= expand_normal (len
);
3552 dest_mem
= get_memory_rtx (dest
, len
);
3553 val_mode
= TYPE_MODE (unsigned_char_type_node
);
3555 if (TREE_CODE (val
) != INTEGER_CST
)
3559 val_rtx
= expand_normal (val
);
3560 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
3562 /* Assume that we can memset by pieces if we can store
3563 * the coefficients by pieces (in the required modes).
3564 * We can't pass builtin_memset_gen_str as that emits RTL. */
3566 if (host_integerp (len
, 1)
3567 && can_store_by_pieces (tree_low_cst (len
, 1),
3568 builtin_memset_read_str
, &c
, dest_align
,
3571 val_rtx
= force_reg (val_mode
, val_rtx
);
3572 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3573 builtin_memset_gen_str
, val_rtx
, dest_align
,
3576 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3577 dest_align
, expected_align
,
3581 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3582 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3586 if (target_char_cast (val
, &c
))
3591 if (host_integerp (len
, 1)
3592 && can_store_by_pieces (tree_low_cst (len
, 1),
3593 builtin_memset_read_str
, &c
, dest_align
,
3595 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3596 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3597 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
3598 gen_int_mode (c
, val_mode
),
3599 dest_align
, expected_align
,
3603 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3604 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3608 set_mem_align (dest_mem
, dest_align
);
3609 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3610 CALL_EXPR_TAILCALL (orig_exp
)
3611 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3612 expected_align
, expected_size
);
3616 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3617 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3623 fndecl
= get_callee_fndecl (orig_exp
);
3624 fcode
= DECL_FUNCTION_CODE (fndecl
);
3625 if (fcode
== BUILT_IN_MEMSET
)
3626 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
3628 else if (fcode
== BUILT_IN_BZERO
)
3629 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
3633 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3634 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3635 return expand_call (fn
, target
, target
== const0_rtx
);
3638 /* Expand expression EXP, which is a call to the bzero builtin. Return
3639 NULL_RTX if we failed the caller should emit a normal call. */
3642 expand_builtin_bzero (tree exp
)
3645 location_t loc
= EXPR_LOCATION (exp
);
3647 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3650 dest
= CALL_EXPR_ARG (exp
, 0);
3651 size
= CALL_EXPR_ARG (exp
, 1);
3653 /* New argument list transforming bzero(ptr x, int y) to
3654 memset(ptr x, int 0, size_t y). This is done this way
3655 so that if it isn't expanded inline, we fallback to
3656 calling bzero instead of memset. */
3658 return expand_builtin_memset_args (dest
, integer_zero_node
,
3659 fold_convert_loc (loc
,
3660 size_type_node
, size
),
3661 const0_rtx
, VOIDmode
, exp
);
3664 /* Expand expression EXP, which is a call to the memcmp built-in function.
3665 Return NULL_RTX if we failed and the caller should emit a normal call,
3666 otherwise try to get the result in TARGET, if convenient (and in mode
3667 MODE, if that's convenient). */
3670 expand_builtin_memcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
3671 ATTRIBUTE_UNUSED
enum machine_mode mode
)
3673 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
3675 if (!validate_arglist (exp
,
3676 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3679 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3680 implementing memcmp because it will stop if it encounters two
3682 #if defined HAVE_cmpmemsi
3684 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3687 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3688 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3689 tree len
= CALL_EXPR_ARG (exp
, 2);
3691 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3692 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3693 enum machine_mode insn_mode
;
3696 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
3700 /* If we don't have POINTER_TYPE, call the function. */
3701 if (arg1_align
== 0 || arg2_align
== 0)
3704 /* Make a place to write the result of the instruction. */
3707 && REG_P (result
) && GET_MODE (result
) == insn_mode
3708 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3709 result
= gen_reg_rtx (insn_mode
);
3711 arg1_rtx
= get_memory_rtx (arg1
, len
);
3712 arg2_rtx
= get_memory_rtx (arg2
, len
);
3713 arg3_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
3715 /* Set MEM_SIZE as appropriate. */
3716 if (CONST_INT_P (arg3_rtx
))
3718 set_mem_size (arg1_rtx
, INTVAL (arg3_rtx
));
3719 set_mem_size (arg2_rtx
, INTVAL (arg3_rtx
));
3723 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3724 GEN_INT (MIN (arg1_align
, arg2_align
)));
3731 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
3732 TYPE_MODE (integer_type_node
), 3,
3733 XEXP (arg1_rtx
, 0), Pmode
,
3734 XEXP (arg2_rtx
, 0), Pmode
,
3735 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
3736 TYPE_UNSIGNED (sizetype
)),
3737 TYPE_MODE (sizetype
));
3739 /* Return the value in the proper mode for this function. */
3740 mode
= TYPE_MODE (TREE_TYPE (exp
));
3741 if (GET_MODE (result
) == mode
)
3743 else if (target
!= 0)
3745 convert_move (target
, result
, 0);
3749 return convert_to_mode (mode
, result
, 0);
3751 #endif /* HAVE_cmpmemsi. */
3756 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3757 if we failed the caller should emit a normal call, otherwise try to get
3758 the result in TARGET, if convenient. */
3761 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
3763 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3766 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3767 if (direct_optab_handler (cmpstr_optab
, SImode
) != CODE_FOR_nothing
3768 || direct_optab_handler (cmpstrn_optab
, SImode
) != CODE_FOR_nothing
)
3770 rtx arg1_rtx
, arg2_rtx
;
3771 rtx result
, insn
= NULL_RTX
;
3773 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3774 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3776 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3777 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3779 /* If we don't have POINTER_TYPE, call the function. */
3780 if (arg1_align
== 0 || arg2_align
== 0)
3783 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3784 arg1
= builtin_save_expr (arg1
);
3785 arg2
= builtin_save_expr (arg2
);
3787 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
3788 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
3790 #ifdef HAVE_cmpstrsi
3791 /* Try to call cmpstrsi. */
3794 enum machine_mode insn_mode
3795 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
3797 /* Make a place to write the result of the instruction. */
3800 && REG_P (result
) && GET_MODE (result
) == insn_mode
3801 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3802 result
= gen_reg_rtx (insn_mode
);
3804 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
3805 GEN_INT (MIN (arg1_align
, arg2_align
)));
3808 #ifdef HAVE_cmpstrnsi
3809 /* Try to determine at least one length and call cmpstrnsi. */
3810 if (!insn
&& HAVE_cmpstrnsi
)
3815 enum machine_mode insn_mode
3816 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
3817 tree len1
= c_strlen (arg1
, 1);
3818 tree len2
= c_strlen (arg2
, 1);
3821 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
3823 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
3825 /* If we don't have a constant length for the first, use the length
3826 of the second, if we know it. We don't require a constant for
3827 this case; some cost analysis could be done if both are available
3828 but neither is constant. For now, assume they're equally cheap,
3829 unless one has side effects. If both strings have constant lengths,
3836 else if (TREE_SIDE_EFFECTS (len1
))
3838 else if (TREE_SIDE_EFFECTS (len2
))
3840 else if (TREE_CODE (len1
) != INTEGER_CST
)
3842 else if (TREE_CODE (len2
) != INTEGER_CST
)
3844 else if (tree_int_cst_lt (len1
, len2
))
3849 /* If both arguments have side effects, we cannot optimize. */
3850 if (!len
|| TREE_SIDE_EFFECTS (len
))
3853 arg3_rtx
= expand_normal (len
);
3855 /* Make a place to write the result of the instruction. */
3858 && REG_P (result
) && GET_MODE (result
) == insn_mode
3859 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3860 result
= gen_reg_rtx (insn_mode
);
3862 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3863 GEN_INT (MIN (arg1_align
, arg2_align
)));
3869 enum machine_mode mode
;
3872 /* Return the value in the proper mode for this function. */
3873 mode
= TYPE_MODE (TREE_TYPE (exp
));
3874 if (GET_MODE (result
) == mode
)
3877 return convert_to_mode (mode
, result
, 0);
3878 convert_move (target
, result
, 0);
3882 /* Expand the library call ourselves using a stabilized argument
3883 list to avoid re-evaluating the function's arguments twice. */
3884 #ifdef HAVE_cmpstrnsi
3887 fndecl
= get_callee_fndecl (exp
);
3888 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
3889 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3890 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
3891 return expand_call (fn
, target
, target
== const0_rtx
);
3897 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3898 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3899 the result in TARGET, if convenient. */
3902 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
3903 ATTRIBUTE_UNUSED
enum machine_mode mode
)
3905 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
3907 if (!validate_arglist (exp
,
3908 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3911 /* If c_strlen can determine an expression for one of the string
3912 lengths, and it doesn't have side effects, then emit cmpstrnsi
3913 using length MIN(strlen(string)+1, arg3). */
3914 #ifdef HAVE_cmpstrnsi
3917 tree len
, len1
, len2
;
3918 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3921 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3922 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3923 tree arg3
= CALL_EXPR_ARG (exp
, 2);
3925 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3926 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3927 enum machine_mode insn_mode
3928 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
3930 len1
= c_strlen (arg1
, 1);
3931 len2
= c_strlen (arg2
, 1);
3934 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
3936 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
3938 /* If we don't have a constant length for the first, use the length
3939 of the second, if we know it. We don't require a constant for
3940 this case; some cost analysis could be done if both are available
3941 but neither is constant. For now, assume they're equally cheap,
3942 unless one has side effects. If both strings have constant lengths,
3949 else if (TREE_SIDE_EFFECTS (len1
))
3951 else if (TREE_SIDE_EFFECTS (len2
))
3953 else if (TREE_CODE (len1
) != INTEGER_CST
)
3955 else if (TREE_CODE (len2
) != INTEGER_CST
)
3957 else if (tree_int_cst_lt (len1
, len2
))
3962 /* If both arguments have side effects, we cannot optimize. */
3963 if (!len
|| TREE_SIDE_EFFECTS (len
))
3966 /* The actual new length parameter is MIN(len,arg3). */
3967 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
3968 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
3970 /* If we don't have POINTER_TYPE, call the function. */
3971 if (arg1_align
== 0 || arg2_align
== 0)
3974 /* Make a place to write the result of the instruction. */
3977 && REG_P (result
) && GET_MODE (result
) == insn_mode
3978 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3979 result
= gen_reg_rtx (insn_mode
);
3981 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3982 arg1
= builtin_save_expr (arg1
);
3983 arg2
= builtin_save_expr (arg2
);
3984 len
= builtin_save_expr (len
);
3986 arg1_rtx
= get_memory_rtx (arg1
, len
);
3987 arg2_rtx
= get_memory_rtx (arg2
, len
);
3988 arg3_rtx
= expand_normal (len
);
3989 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3990 GEN_INT (MIN (arg1_align
, arg2_align
)));
3995 /* Return the value in the proper mode for this function. */
3996 mode
= TYPE_MODE (TREE_TYPE (exp
));
3997 if (GET_MODE (result
) == mode
)
4000 return convert_to_mode (mode
, result
, 0);
4001 convert_move (target
, result
, 0);
4005 /* Expand the library call ourselves using a stabilized argument
4006 list to avoid re-evaluating the function's arguments twice. */
4007 fndecl
= get_callee_fndecl (exp
);
4008 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4010 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4011 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4012 return expand_call (fn
, target
, target
== const0_rtx
);
4018 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4019 if that's convenient. */
4022 expand_builtin_saveregs (void)
4026 /* Don't do __builtin_saveregs more than once in a function.
4027 Save the result of the first call and reuse it. */
4028 if (saveregs_value
!= 0)
4029 return saveregs_value
;
4031 /* When this function is called, it means that registers must be
4032 saved on entry to this function. So we migrate the call to the
4033 first insn of this function. */
4037 /* Do whatever the machine needs done in this case. */
4038 val
= targetm
.calls
.expand_builtin_saveregs ();
4043 saveregs_value
= val
;
4045 /* Put the insns after the NOTE that starts the function. If this
4046 is inside a start_sequence, make the outer-level insn chain current, so
4047 the code is placed at the start of the function. */
4048 push_topmost_sequence ();
4049 emit_insn_after (seq
, entry_of_function ());
4050 pop_topmost_sequence ();
4055 /* Expand a call to __builtin_next_arg. */
4058 expand_builtin_next_arg (void)
4060 /* Checking arguments is already done in fold_builtin_next_arg
4061 that must be called before this function. */
4062 return expand_binop (ptr_mode
, add_optab
,
4063 crtl
->args
.internal_arg_pointer
,
4064 crtl
->args
.arg_offset_rtx
,
4065 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4068 /* Make it easier for the backends by protecting the valist argument
4069 from multiple evaluations. */
4072 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4074 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4076 /* The current way of determining the type of valist is completely
4077 bogus. We should have the information on the va builtin instead. */
4079 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4081 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4083 if (TREE_SIDE_EFFECTS (valist
))
4084 valist
= save_expr (valist
);
4086 /* For this case, the backends will be expecting a pointer to
4087 vatype, but it's possible we've actually been given an array
4088 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4090 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4092 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4093 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4098 tree pt
= build_pointer_type (vatype
);
4102 if (! TREE_SIDE_EFFECTS (valist
))
4105 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4106 TREE_SIDE_EFFECTS (valist
) = 1;
4109 if (TREE_SIDE_EFFECTS (valist
))
4110 valist
= save_expr (valist
);
4111 valist
= fold_build2_loc (loc
, MEM_REF
,
4112 vatype
, valist
, build_int_cst (pt
, 0));
4118 /* The "standard" definition of va_list is void*. */
4121 std_build_builtin_va_list (void)
4123 return ptr_type_node
;
4126 /* The "standard" abi va_list is va_list_type_node. */
4129 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4131 return va_list_type_node
;
4134 /* The "standard" type of va_list is va_list_type_node. */
4137 std_canonical_va_list_type (tree type
)
4141 if (INDIRECT_REF_P (type
))
4142 type
= TREE_TYPE (type
);
4143 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE (type
)))
4144 type
= TREE_TYPE (type
);
4145 wtype
= va_list_type_node
;
4147 /* Treat structure va_list types. */
4148 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4149 htype
= TREE_TYPE (htype
);
4150 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4152 /* If va_list is an array type, the argument may have decayed
4153 to a pointer type, e.g. by being passed to another function.
4154 In that case, unwrap both types so that we can compare the
4155 underlying records. */
4156 if (TREE_CODE (htype
) == ARRAY_TYPE
4157 || POINTER_TYPE_P (htype
))
4159 wtype
= TREE_TYPE (wtype
);
4160 htype
= TREE_TYPE (htype
);
4163 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4164 return va_list_type_node
;
4169 /* The "standard" implementation of va_start: just assign `nextarg' to
4173 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4175 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4176 convert_move (va_r
, nextarg
, 0);
4179 /* Expand EXP, a call to __builtin_va_start. */
4182 expand_builtin_va_start (tree exp
)
4186 location_t loc
= EXPR_LOCATION (exp
);
4188 if (call_expr_nargs (exp
) < 2)
4190 error_at (loc
, "too few arguments to function %<va_start%>");
4194 if (fold_builtin_next_arg (exp
, true))
4197 nextarg
= expand_builtin_next_arg ();
4198 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4200 if (targetm
.expand_builtin_va_start
)
4201 targetm
.expand_builtin_va_start (valist
, nextarg
);
4203 std_expand_builtin_va_start (valist
, nextarg
);
4208 /* Expand EXP, a call to __builtin_va_end. */
4211 expand_builtin_va_end (tree exp
)
4213 tree valist
= CALL_EXPR_ARG (exp
, 0);
4215 /* Evaluate for side effects, if needed. I hate macros that don't
4217 if (TREE_SIDE_EFFECTS (valist
))
4218 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4223 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4224 builtin rather than just as an assignment in stdarg.h because of the
4225 nastiness of array-type va_list types. */
4228 expand_builtin_va_copy (tree exp
)
4231 location_t loc
= EXPR_LOCATION (exp
);
4233 dst
= CALL_EXPR_ARG (exp
, 0);
4234 src
= CALL_EXPR_ARG (exp
, 1);
4236 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4237 src
= stabilize_va_list_loc (loc
, src
, 0);
4239 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4241 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4243 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4244 TREE_SIDE_EFFECTS (t
) = 1;
4245 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4249 rtx dstb
, srcb
, size
;
4251 /* Evaluate to pointers. */
4252 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4253 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4254 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4255 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4257 dstb
= convert_memory_address (Pmode
, dstb
);
4258 srcb
= convert_memory_address (Pmode
, srcb
);
4260 /* "Dereference" to BLKmode memories. */
4261 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4262 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4263 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4264 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4265 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4266 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4269 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4275 /* Expand a call to one of the builtin functions __builtin_frame_address or
4276 __builtin_return_address. */
4279 expand_builtin_frame_address (tree fndecl
, tree exp
)
4281 /* The argument must be a nonnegative integer constant.
4282 It counts the number of frames to scan up the stack.
4283 The value is the return address saved in that frame. */
4284 if (call_expr_nargs (exp
) == 0)
4285 /* Warning about missing arg was already issued. */
4287 else if (! host_integerp (CALL_EXPR_ARG (exp
, 0), 1))
4289 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4290 error ("invalid argument to %<__builtin_frame_address%>");
4292 error ("invalid argument to %<__builtin_return_address%>");
4298 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
4299 tree_low_cst (CALL_EXPR_ARG (exp
, 0), 1));
4301 /* Some ports cannot access arbitrary stack frames. */
4304 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4305 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4307 warning (0, "unsupported argument to %<__builtin_return_address%>");
4311 /* For __builtin_frame_address, return what we've got. */
4312 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4316 && ! CONSTANT_P (tem
))
4317 tem
= copy_addr_to_reg (tem
);
4322 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4323 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4324 is the same as for allocate_dynamic_stack_space. */
4327 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
4333 bool alloca_with_align
= (DECL_FUNCTION_CODE (get_callee_fndecl (exp
))
4334 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4337 = (alloca_with_align
4338 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4339 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4344 /* Compute the argument. */
4345 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4347 /* Compute the alignment. */
4348 align
= (alloca_with_align
4349 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1))
4350 : BIGGEST_ALIGNMENT
);
4352 /* Allocate the desired space. */
4353 result
= allocate_dynamic_stack_space (op0
, 0, align
, cannot_accumulate
);
4354 result
= convert_memory_address (ptr_mode
, result
);
4359 /* Expand a call to bswap builtin in EXP.
4360 Return NULL_RTX if a normal call should be emitted rather than expanding the
4361 function in-line. If convenient, the result should be placed in TARGET.
4362 SUBTARGET may be used as the target for computing one of EXP's operands. */
4365 expand_builtin_bswap (enum machine_mode target_mode
, tree exp
, rtx target
,
4371 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4374 arg
= CALL_EXPR_ARG (exp
, 0);
4375 op0
= expand_expr (arg
,
4376 subtarget
&& GET_MODE (subtarget
) == target_mode
4377 ? subtarget
: NULL_RTX
,
4378 target_mode
, EXPAND_NORMAL
);
4379 if (GET_MODE (op0
) != target_mode
)
4380 op0
= convert_to_mode (target_mode
, op0
, 1);
4382 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4384 gcc_assert (target
);
4386 return convert_to_mode (target_mode
, target
, 1);
4389 /* Expand a call to a unary builtin in EXP.
4390 Return NULL_RTX if a normal call should be emitted rather than expanding the
4391 function in-line. If convenient, the result should be placed in TARGET.
4392 SUBTARGET may be used as the target for computing one of EXP's operands. */
4395 expand_builtin_unop (enum machine_mode target_mode
, tree exp
, rtx target
,
4396 rtx subtarget
, optab op_optab
)
4400 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4403 /* Compute the argument. */
4404 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4406 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4407 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4408 VOIDmode
, EXPAND_NORMAL
);
4409 /* Compute op, into TARGET if possible.
4410 Set TARGET to wherever the result comes back. */
4411 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4412 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4413 gcc_assert (target
);
4415 return convert_to_mode (target_mode
, target
, 0);
4418 /* Expand a call to __builtin_expect. We just return our argument
4419 as the builtin_expect semantic should've been already executed by
4420 tree branch prediction pass. */
4423 expand_builtin_expect (tree exp
, rtx target
)
4427 if (call_expr_nargs (exp
) < 2)
4429 arg
= CALL_EXPR_ARG (exp
, 0);
4431 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4432 /* When guessing was done, the hints should be already stripped away. */
4433 gcc_assert (!flag_guess_branch_prob
4434 || optimize
== 0 || seen_error ());
4438 /* Expand a call to __builtin_assume_aligned. We just return our first
4439 argument as the builtin_assume_aligned semantic should've been already
4443 expand_builtin_assume_aligned (tree exp
, rtx target
)
4445 if (call_expr_nargs (exp
) < 2)
4447 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
4449 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
4450 && (call_expr_nargs (exp
) < 3
4451 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
4456 expand_builtin_trap (void)
4461 rtx insn
= emit_insn (gen_trap ());
4462 /* For trap insns when not accumulating outgoing args force
4463 REG_ARGS_SIZE note to prevent crossjumping of calls with
4464 different args sizes. */
4465 if (!ACCUMULATE_OUTGOING_ARGS
)
4466 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4470 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4474 /* Expand a call to __builtin_unreachable. We do nothing except emit
4475 a barrier saying that control flow will not pass here.
4477 It is the responsibility of the program being compiled to ensure
4478 that control flow does never reach __builtin_unreachable. */
4480 expand_builtin_unreachable (void)
4485 /* Expand EXP, a call to fabs, fabsf or fabsl.
4486 Return NULL_RTX if a normal call should be emitted rather than expanding
4487 the function inline. If convenient, the result should be placed
4488 in TARGET. SUBTARGET may be used as the target for computing
4492 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
4494 enum machine_mode mode
;
4498 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4501 arg
= CALL_EXPR_ARG (exp
, 0);
4502 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
4503 mode
= TYPE_MODE (TREE_TYPE (arg
));
4504 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4505 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4508 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4509 Return NULL is a normal call should be emitted rather than expanding the
4510 function inline. If convenient, the result should be placed in TARGET.
4511 SUBTARGET may be used as the target for computing the operand. */
4514 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
4519 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4522 arg
= CALL_EXPR_ARG (exp
, 0);
4523 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4525 arg
= CALL_EXPR_ARG (exp
, 1);
4526 op1
= expand_normal (arg
);
4528 return expand_copysign (op0
, op1
, target
);
4531 /* Create a new constant string literal and return a char* pointer to it.
4532 The STRING_CST value is the LEN characters at STR. */
4534 build_string_literal (int len
, const char *str
)
4536 tree t
, elem
, index
, type
;
4538 t
= build_string (len
, str
);
4539 elem
= build_type_variant (char_type_node
, 1, 0);
4540 index
= build_index_type (size_int (len
- 1));
4541 type
= build_array_type (elem
, index
);
4542 TREE_TYPE (t
) = type
;
4543 TREE_CONSTANT (t
) = 1;
4544 TREE_READONLY (t
) = 1;
4545 TREE_STATIC (t
) = 1;
4547 type
= build_pointer_type (elem
);
4548 t
= build1 (ADDR_EXPR
, type
,
4549 build4 (ARRAY_REF
, elem
,
4550 t
, integer_zero_node
, NULL_TREE
, NULL_TREE
));
4554 /* Expand a call to __builtin___clear_cache. */
4557 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED
)
4559 #ifndef HAVE_clear_cache
4560 #ifdef CLEAR_INSN_CACHE
4561 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4562 does something. Just do the default expansion to a call to
4566 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4567 does nothing. There is no need to call it. Do nothing. */
4569 #endif /* CLEAR_INSN_CACHE */
4571 /* We have a "clear_cache" insn, and it will handle everything. */
4573 rtx begin_rtx
, end_rtx
;
4575 /* We must not expand to a library call. If we did, any
4576 fallback library function in libgcc that might contain a call to
4577 __builtin___clear_cache() would recurse infinitely. */
4578 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4580 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4584 if (HAVE_clear_cache
)
4586 struct expand_operand ops
[2];
4588 begin
= CALL_EXPR_ARG (exp
, 0);
4589 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4591 end
= CALL_EXPR_ARG (exp
, 1);
4592 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4594 create_address_operand (&ops
[0], begin_rtx
);
4595 create_address_operand (&ops
[1], end_rtx
);
4596 if (maybe_expand_insn (CODE_FOR_clear_cache
, 2, ops
))
4600 #endif /* HAVE_clear_cache */
4603 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4606 round_trampoline_addr (rtx tramp
)
4608 rtx temp
, addend
, mask
;
4610 /* If we don't need too much alignment, we'll have been guaranteed
4611 proper alignment by get_trampoline_type. */
4612 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
4615 /* Round address up to desired boundary. */
4616 temp
= gen_reg_rtx (Pmode
);
4617 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
4618 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
4620 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
4621 temp
, 0, OPTAB_LIB_WIDEN
);
4622 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
4623 temp
, 0, OPTAB_LIB_WIDEN
);
4629 expand_builtin_init_trampoline (tree exp
, bool onstack
)
4631 tree t_tramp
, t_func
, t_chain
;
4632 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
4634 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
4635 POINTER_TYPE
, VOID_TYPE
))
4638 t_tramp
= CALL_EXPR_ARG (exp
, 0);
4639 t_func
= CALL_EXPR_ARG (exp
, 1);
4640 t_chain
= CALL_EXPR_ARG (exp
, 2);
4642 r_tramp
= expand_normal (t_tramp
);
4643 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
4644 MEM_NOTRAP_P (m_tramp
) = 1;
4646 /* If ONSTACK, the TRAMP argument should be the address of a field
4647 within the local function's FRAME decl. Either way, let's see if
4648 we can fill in the MEM_ATTRs for this memory. */
4649 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
4650 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
4652 /* Creator of a heap trampoline is responsible for making sure the
4653 address is aligned to at least STACK_BOUNDARY. Normally malloc
4654 will ensure this anyhow. */
4655 tmp
= round_trampoline_addr (r_tramp
);
4658 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
4659 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
4660 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
4663 /* The FUNC argument should be the address of the nested function.
4664 Extract the actual function decl to pass to the hook. */
4665 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
4666 t_func
= TREE_OPERAND (t_func
, 0);
4667 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
4669 r_chain
= expand_normal (t_chain
);
4671 /* Generate insns to initialize the trampoline. */
4672 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
4676 trampolines_created
= 1;
4678 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
4679 "trampoline generated for nested function %qD", t_func
);
4686 expand_builtin_adjust_trampoline (tree exp
)
4690 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4693 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4694 tramp
= round_trampoline_addr (tramp
);
4695 if (targetm
.calls
.trampoline_adjust_address
)
4696 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
4701 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4702 function. The function first checks whether the back end provides
4703 an insn to implement signbit for the respective mode. If not, it
4704 checks whether the floating point format of the value is such that
4705 the sign bit can be extracted. If that is not the case, the
4706 function returns NULL_RTX to indicate that a normal call should be
4707 emitted rather than expanding the function in-line. EXP is the
4708 expression that is a call to the builtin function; if convenient,
4709 the result should be placed in TARGET. */
4711 expand_builtin_signbit (tree exp
, rtx target
)
4713 const struct real_format
*fmt
;
4714 enum machine_mode fmode
, imode
, rmode
;
4717 enum insn_code icode
;
4719 location_t loc
= EXPR_LOCATION (exp
);
4721 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4724 arg
= CALL_EXPR_ARG (exp
, 0);
4725 fmode
= TYPE_MODE (TREE_TYPE (arg
));
4726 rmode
= TYPE_MODE (TREE_TYPE (exp
));
4727 fmt
= REAL_MODE_FORMAT (fmode
);
4729 arg
= builtin_save_expr (arg
);
4731 /* Expand the argument yielding a RTX expression. */
4732 temp
= expand_normal (arg
);
4734 /* Check if the back end provides an insn that handles signbit for the
4736 icode
= optab_handler (signbit_optab
, fmode
);
4737 if (icode
!= CODE_FOR_nothing
)
4739 rtx last
= get_last_insn ();
4740 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
4741 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
4743 delete_insns_since (last
);
4746 /* For floating point formats without a sign bit, implement signbit
4748 bitpos
= fmt
->signbit_ro
;
4751 /* But we can't do this if the format supports signed zero. */
4752 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
4755 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
4756 build_real (TREE_TYPE (arg
), dconst0
));
4757 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4760 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
4762 imode
= int_mode_for_mode (fmode
);
4763 if (imode
== BLKmode
)
4765 temp
= gen_lowpart (imode
, temp
);
4770 /* Handle targets with different FP word orders. */
4771 if (FLOAT_WORDS_BIG_ENDIAN
)
4772 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
4774 word
= bitpos
/ BITS_PER_WORD
;
4775 temp
= operand_subword_force (temp
, word
, fmode
);
4776 bitpos
= bitpos
% BITS_PER_WORD
;
4779 /* Force the intermediate word_mode (or narrower) result into a
4780 register. This avoids attempting to create paradoxical SUBREGs
4781 of floating point modes below. */
4782 temp
= force_reg (imode
, temp
);
4784 /* If the bitpos is within the "result mode" lowpart, the operation
4785 can be implement with a single bitwise AND. Otherwise, we need
4786 a right shift and an AND. */
4788 if (bitpos
< GET_MODE_BITSIZE (rmode
))
4790 double_int mask
= double_int_zero
.set_bit (bitpos
);
4792 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
4793 temp
= gen_lowpart (rmode
, temp
);
4794 temp
= expand_binop (rmode
, and_optab
, temp
,
4795 immed_double_int_const (mask
, rmode
),
4796 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
4800 /* Perform a logical right shift to place the signbit in the least
4801 significant bit, then truncate the result to the desired mode
4802 and mask just this bit. */
4803 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
4804 temp
= gen_lowpart (rmode
, temp
);
4805 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
4806 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
4812 /* Expand fork or exec calls. TARGET is the desired target of the
4813 call. EXP is the call. FN is the
4814 identificator of the actual function. IGNORE is nonzero if the
4815 value is to be ignored. */
4818 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
4823 /* If we are not profiling, just call the function. */
4824 if (!profile_arc_flag
)
4827 /* Otherwise call the wrapper. This should be equivalent for the rest of
4828 compiler, so the code does not diverge, and the wrapper may run the
4829 code necessary for keeping the profiling sane. */
4831 switch (DECL_FUNCTION_CODE (fn
))
4834 id
= get_identifier ("__gcov_fork");
4837 case BUILT_IN_EXECL
:
4838 id
= get_identifier ("__gcov_execl");
4841 case BUILT_IN_EXECV
:
4842 id
= get_identifier ("__gcov_execv");
4845 case BUILT_IN_EXECLP
:
4846 id
= get_identifier ("__gcov_execlp");
4849 case BUILT_IN_EXECLE
:
4850 id
= get_identifier ("__gcov_execle");
4853 case BUILT_IN_EXECVP
:
4854 id
= get_identifier ("__gcov_execvp");
4857 case BUILT_IN_EXECVE
:
4858 id
= get_identifier ("__gcov_execve");
4865 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
4866 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
4867 DECL_EXTERNAL (decl
) = 1;
4868 TREE_PUBLIC (decl
) = 1;
4869 DECL_ARTIFICIAL (decl
) = 1;
4870 TREE_NOTHROW (decl
) = 1;
4871 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
4872 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
4873 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
4874 return expand_call (call
, target
, ignore
);
4879 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4880 the pointer in these functions is void*, the tree optimizers may remove
4881 casts. The mode computed in expand_builtin isn't reliable either, due
4882 to __sync_bool_compare_and_swap.
4884 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4885 group of builtins. This gives us log2 of the mode size. */
4887 static inline enum machine_mode
4888 get_builtin_sync_mode (int fcode_diff
)
4890 /* The size is not negotiable, so ask not to get BLKmode in return
4891 if the target indicates that a smaller size would be better. */
4892 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
4895 /* Expand the memory expression LOC and return the appropriate memory operand
4896 for the builtin_sync operations. */
4899 get_builtin_sync_mem (tree loc
, enum machine_mode mode
)
4903 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
4904 addr
= convert_memory_address (Pmode
, addr
);
4906 /* Note that we explicitly do not want any alias information for this
4907 memory, so that we kill all other live memories. Otherwise we don't
4908 satisfy the full barrier semantics of the intrinsic. */
4909 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
4911 /* The alignment needs to be at least according to that of the mode. */
4912 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
4913 get_pointer_alignment (loc
)));
4914 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
4915 MEM_VOLATILE_P (mem
) = 1;
4920 /* Make sure an argument is in the right mode.
4921 EXP is the tree argument.
4922 MODE is the mode it should be in. */
4925 expand_expr_force_mode (tree exp
, enum machine_mode mode
)
4928 enum machine_mode old_mode
;
4930 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
4931 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
4932 of CONST_INTs, where we know the old_mode only from the call argument. */
4934 old_mode
= GET_MODE (val
);
4935 if (old_mode
== VOIDmode
)
4936 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
4937 val
= convert_modes (mode
, old_mode
, val
, 1);
4942 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
4943 EXP is the CALL_EXPR. CODE is the rtx code
4944 that corresponds to the arithmetic or logical operation from the name;
4945 an exception here is that NOT actually means NAND. TARGET is an optional
4946 place for us to store the results; AFTER is true if this is the
4947 fetch_and_xxx form. */
4950 expand_builtin_sync_operation (enum machine_mode mode
, tree exp
,
4951 enum rtx_code code
, bool after
,
4955 location_t loc
= EXPR_LOCATION (exp
);
4957 if (code
== NOT
&& warn_sync_nand
)
4959 tree fndecl
= get_callee_fndecl (exp
);
4960 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
4962 static bool warned_f_a_n
, warned_n_a_f
;
4966 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
4967 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
4968 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
4969 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
4970 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
4974 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
4975 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
4976 warned_f_a_n
= true;
4979 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
4980 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
4981 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
4982 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
4983 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
4987 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
4988 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
4989 warned_n_a_f
= true;
4997 /* Expand the operands. */
4998 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
4999 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5001 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SEQ_CST
,
5005 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5006 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5007 true if this is the boolean form. TARGET is a place for us to store the
5008 results; this is NOT optional if IS_BOOL is true. */
5011 expand_builtin_compare_and_swap (enum machine_mode mode
, tree exp
,
5012 bool is_bool
, rtx target
)
5014 rtx old_val
, new_val
, mem
;
5017 /* Expand the operands. */
5018 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5019 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5020 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5022 pbool
= poval
= NULL
;
5023 if (target
!= const0_rtx
)
5030 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5031 false, MEMMODEL_SEQ_CST
,
5038 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5039 general form is actually an atomic exchange, and some targets only
5040 support a reduced form with the second argument being a constant 1.
5041 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5045 expand_builtin_sync_lock_test_and_set (enum machine_mode mode
, tree exp
,
5050 /* Expand the operands. */
5051 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5052 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5054 return expand_sync_lock_test_and_set (target
, mem
, val
);
5057 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5060 expand_builtin_sync_lock_release (enum machine_mode mode
, tree exp
)
5064 /* Expand the operands. */
5065 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5067 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_RELEASE
, true);
5070 /* Given an integer representing an ``enum memmodel'', verify its
5071 correctness and return the memory model enum. */
5073 static enum memmodel
5074 get_memmodel (tree exp
)
5077 unsigned HOST_WIDE_INT val
;
5079 /* If the parameter is not a constant, it's a run time value so we'll just
5080 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5081 if (TREE_CODE (exp
) != INTEGER_CST
)
5082 return MEMMODEL_SEQ_CST
;
5084 op
= expand_normal (exp
);
5087 if (targetm
.memmodel_check
)
5088 val
= targetm
.memmodel_check (val
);
5089 else if (val
& ~MEMMODEL_MASK
)
5091 warning (OPT_Winvalid_memory_model
,
5092 "Unknown architecture specifier in memory model to builtin.");
5093 return MEMMODEL_SEQ_CST
;
5096 if ((INTVAL (op
) & MEMMODEL_MASK
) >= MEMMODEL_LAST
)
5098 warning (OPT_Winvalid_memory_model
,
5099 "invalid memory model argument to builtin");
5100 return MEMMODEL_SEQ_CST
;
5103 return (enum memmodel
) val
;
5106 /* Expand the __atomic_exchange intrinsic:
5107 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5108 EXP is the CALL_EXPR.
5109 TARGET is an optional place for us to store the results. */
5112 expand_builtin_atomic_exchange (enum machine_mode mode
, tree exp
, rtx target
)
5115 enum memmodel model
;
5117 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5118 if ((model
& MEMMODEL_MASK
) == MEMMODEL_CONSUME
)
5120 error ("invalid memory model for %<__atomic_exchange%>");
5124 if (!flag_inline_atomics
)
5127 /* Expand the operands. */
5128 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5129 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5131 return expand_atomic_exchange (target
, mem
, val
, model
);
5134 /* Expand the __atomic_compare_exchange intrinsic:
5135 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5136 TYPE desired, BOOL weak,
5137 enum memmodel success,
5138 enum memmodel failure)
5139 EXP is the CALL_EXPR.
5140 TARGET is an optional place for us to store the results. */
5143 expand_builtin_atomic_compare_exchange (enum machine_mode mode
, tree exp
,
5146 rtx expect
, desired
, mem
, oldval
;
5147 enum memmodel success
, failure
;
5151 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5152 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5154 if ((failure
& MEMMODEL_MASK
) == MEMMODEL_RELEASE
5155 || (failure
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5157 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5161 if (failure
> success
)
5163 error ("failure memory model cannot be stronger than success "
5164 "memory model for %<__atomic_compare_exchange%>");
5168 if (!flag_inline_atomics
)
5171 /* Expand the operands. */
5172 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5174 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5175 expect
= convert_memory_address (Pmode
, expect
);
5176 expect
= gen_rtx_MEM (mode
, expect
);
5177 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5179 weak
= CALL_EXPR_ARG (exp
, 3);
5181 if (host_integerp (weak
, 0) && tree_low_cst (weak
, 0) != 0)
5185 if (!expand_atomic_compare_and_swap ((target
== const0_rtx
? NULL
: &target
),
5186 &oldval
, mem
, oldval
, desired
,
5187 is_weak
, success
, failure
))
5190 if (oldval
!= expect
)
5191 emit_move_insn (expect
, oldval
);
5196 /* Expand the __atomic_load intrinsic:
5197 TYPE __atomic_load (TYPE *object, enum memmodel)
5198 EXP is the CALL_EXPR.
5199 TARGET is an optional place for us to store the results. */
5202 expand_builtin_atomic_load (enum machine_mode mode
, tree exp
, rtx target
)
5205 enum memmodel model
;
5207 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5208 if ((model
& MEMMODEL_MASK
) == MEMMODEL_RELEASE
5209 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5211 error ("invalid memory model for %<__atomic_load%>");
5215 if (!flag_inline_atomics
)
5218 /* Expand the operand. */
5219 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5221 return expand_atomic_load (target
, mem
, model
);
5225 /* Expand the __atomic_store intrinsic:
5226 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5227 EXP is the CALL_EXPR.
5228 TARGET is an optional place for us to store the results. */
5231 expand_builtin_atomic_store (enum machine_mode mode
, tree exp
)
5234 enum memmodel model
;
5236 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5237 if ((model
& MEMMODEL_MASK
) != MEMMODEL_RELAXED
5238 && (model
& MEMMODEL_MASK
) != MEMMODEL_SEQ_CST
5239 && (model
& MEMMODEL_MASK
) != MEMMODEL_RELEASE
)
5241 error ("invalid memory model for %<__atomic_store%>");
5245 if (!flag_inline_atomics
)
5248 /* Expand the operands. */
5249 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5250 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5252 return expand_atomic_store (mem
, val
, model
, false);
5255 /* Expand the __atomic_fetch_XXX intrinsic:
5256 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5257 EXP is the CALL_EXPR.
5258 TARGET is an optional place for us to store the results.
5259 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5260 FETCH_AFTER is true if returning the result of the operation.
5261 FETCH_AFTER is false if returning the value before the operation.
5262 IGNORE is true if the result is not used.
5263 EXT_CALL is the correct builtin for an external call if this cannot be
5264 resolved to an instruction sequence. */
5267 expand_builtin_atomic_fetch_op (enum machine_mode mode
, tree exp
, rtx target
,
5268 enum rtx_code code
, bool fetch_after
,
5269 bool ignore
, enum built_in_function ext_call
)
5272 enum memmodel model
;
5276 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5278 /* Expand the operands. */
5279 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5280 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5282 /* Only try generating instructions if inlining is turned on. */
5283 if (flag_inline_atomics
)
5285 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
5290 /* Return if a different routine isn't needed for the library call. */
5291 if (ext_call
== BUILT_IN_NONE
)
5294 /* Change the call to the specified function. */
5295 fndecl
= get_callee_fndecl (exp
);
5296 addr
= CALL_EXPR_FN (exp
);
5299 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
5300 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
5302 /* Expand the call here so we can emit trailing code. */
5303 ret
= expand_call (exp
, target
, ignore
);
5305 /* Replace the original function just in case it matters. */
5306 TREE_OPERAND (addr
, 0) = fndecl
;
5308 /* Then issue the arithmetic correction to return the right result. */
5313 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
5315 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
5318 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
5325 #ifndef HAVE_atomic_clear
5326 # define HAVE_atomic_clear 0
5327 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5330 /* Expand an atomic clear operation.
5331 void _atomic_clear (BOOL *obj, enum memmodel)
5332 EXP is the call expression. */
5335 expand_builtin_atomic_clear (tree exp
)
5337 enum machine_mode mode
;
5339 enum memmodel model
;
5341 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5342 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5343 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5345 if ((model
& MEMMODEL_MASK
) == MEMMODEL_ACQUIRE
5346 || (model
& MEMMODEL_MASK
) == MEMMODEL_ACQ_REL
)
5348 error ("invalid memory model for %<__atomic_store%>");
5352 if (HAVE_atomic_clear
)
5354 emit_insn (gen_atomic_clear (mem
, model
));
5358 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5359 Failing that, a store is issued by __atomic_store. The only way this can
5360 fail is if the bool type is larger than a word size. Unlikely, but
5361 handle it anyway for completeness. Assume a single threaded model since
5362 there is no atomic support in this case, and no barriers are required. */
5363 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
5365 emit_move_insn (mem
, const0_rtx
);
5369 /* Expand an atomic test_and_set operation.
5370 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5371 EXP is the call expression. */
5374 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
5377 enum memmodel model
;
5378 enum machine_mode mode
;
5380 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5381 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5382 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5384 return expand_atomic_test_and_set (target
, mem
, model
);
5388 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5389 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5392 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
5395 enum machine_mode mode
;
5396 unsigned int mode_align
, type_align
;
5398 if (TREE_CODE (arg0
) != INTEGER_CST
)
5401 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
5402 mode
= mode_for_size (size
, MODE_INT
, 0);
5403 mode_align
= GET_MODE_ALIGNMENT (mode
);
5405 if (TREE_CODE (arg1
) == INTEGER_CST
&& INTVAL (expand_normal (arg1
)) == 0)
5406 type_align
= mode_align
;
5409 tree ttype
= TREE_TYPE (arg1
);
5411 /* This function is usually invoked and folded immediately by the front
5412 end before anything else has a chance to look at it. The pointer
5413 parameter at this point is usually cast to a void *, so check for that
5414 and look past the cast. */
5415 if (TREE_CODE (arg1
) == NOP_EXPR
&& POINTER_TYPE_P (ttype
)
5416 && VOID_TYPE_P (TREE_TYPE (ttype
)))
5417 arg1
= TREE_OPERAND (arg1
, 0);
5419 ttype
= TREE_TYPE (arg1
);
5420 gcc_assert (POINTER_TYPE_P (ttype
));
5422 /* Get the underlying type of the object. */
5423 ttype
= TREE_TYPE (ttype
);
5424 type_align
= TYPE_ALIGN (ttype
);
5427 /* If the object has smaller alignment, the the lock free routines cannot
5429 if (type_align
< mode_align
)
5430 return boolean_false_node
;
5432 /* Check if a compare_and_swap pattern exists for the mode which represents
5433 the required size. The pattern is not allowed to fail, so the existence
5434 of the pattern indicates support is present. */
5435 if (can_compare_and_swap_p (mode
, true))
5436 return boolean_true_node
;
5438 return boolean_false_node
;
5441 /* Return true if the parameters to call EXP represent an object which will
5442 always generate lock free instructions. The first argument represents the
5443 size of the object, and the second parameter is a pointer to the object
5444 itself. If NULL is passed for the object, then the result is based on
5445 typical alignment for an object of the specified size. Otherwise return
5449 expand_builtin_atomic_always_lock_free (tree exp
)
5452 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5453 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5455 if (TREE_CODE (arg0
) != INTEGER_CST
)
5457 error ("non-constant argument 1 to __atomic_always_lock_free");
5461 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
5462 if (size
== boolean_true_node
)
5467 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5468 is lock free on this architecture. */
5471 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
5473 if (!flag_inline_atomics
)
5476 /* If it isn't always lock free, don't generate a result. */
5477 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
5478 return boolean_true_node
;
5483 /* Return true if the parameters to call EXP represent an object which will
5484 always generate lock free instructions. The first argument represents the
5485 size of the object, and the second parameter is a pointer to the object
5486 itself. If NULL is passed for the object, then the result is based on
5487 typical alignment for an object of the specified size. Otherwise return
5491 expand_builtin_atomic_is_lock_free (tree exp
)
5494 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5495 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5497 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5499 error ("non-integer argument 1 to __atomic_is_lock_free");
5503 if (!flag_inline_atomics
)
5506 /* If the value is known at compile time, return the RTX for it. */
5507 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
5508 if (size
== boolean_true_node
)
5514 /* Expand the __atomic_thread_fence intrinsic:
5515 void __atomic_thread_fence (enum memmodel)
5516 EXP is the CALL_EXPR. */
5519 expand_builtin_atomic_thread_fence (tree exp
)
5521 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5522 expand_mem_thread_fence (model
);
5525 /* Expand the __atomic_signal_fence intrinsic:
5526 void __atomic_signal_fence (enum memmodel)
5527 EXP is the CALL_EXPR. */
5530 expand_builtin_atomic_signal_fence (tree exp
)
5532 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5533 expand_mem_signal_fence (model
);
5536 /* Expand the __sync_synchronize intrinsic. */
5539 expand_builtin_sync_synchronize (void)
5541 expand_mem_thread_fence (MEMMODEL_SEQ_CST
);
5545 expand_builtin_thread_pointer (tree exp
, rtx target
)
5547 enum insn_code icode
;
5548 if (!validate_arglist (exp
, VOID_TYPE
))
5550 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
5551 if (icode
!= CODE_FOR_nothing
)
5553 struct expand_operand op
;
5554 if (!REG_P (target
) || GET_MODE (target
) != Pmode
)
5555 target
= gen_reg_rtx (Pmode
);
5556 create_output_operand (&op
, target
, Pmode
);
5557 expand_insn (icode
, 1, &op
);
5560 error ("__builtin_thread_pointer is not supported on this target");
5565 expand_builtin_set_thread_pointer (tree exp
)
5567 enum insn_code icode
;
5568 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5570 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
5571 if (icode
!= CODE_FOR_nothing
)
5573 struct expand_operand op
;
5574 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
5575 Pmode
, EXPAND_NORMAL
);
5576 create_input_operand (&op
, val
, Pmode
);
5577 expand_insn (icode
, 1, &op
);
5580 error ("__builtin_set_thread_pointer is not supported on this target");
5584 /* Expand an expression EXP that calls a built-in function,
5585 with result going to TARGET if that's convenient
5586 (and in mode MODE if that's convenient).
5587 SUBTARGET may be used as the target for computing one of EXP's operands.
5588 IGNORE is nonzero if the value is to be ignored. */
5591 expand_builtin (tree exp
, rtx target
, rtx subtarget
, enum machine_mode mode
,
5594 tree fndecl
= get_callee_fndecl (exp
);
5595 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5596 enum machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5599 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5600 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5602 /* When not optimizing, generate calls to library functions for a certain
5605 && !called_as_built_in (fndecl
)
5606 && fcode
!= BUILT_IN_FORK
5607 && fcode
!= BUILT_IN_EXECL
5608 && fcode
!= BUILT_IN_EXECV
5609 && fcode
!= BUILT_IN_EXECLP
5610 && fcode
!= BUILT_IN_EXECLE
5611 && fcode
!= BUILT_IN_EXECVP
5612 && fcode
!= BUILT_IN_EXECVE
5613 && fcode
!= BUILT_IN_ALLOCA
5614 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
5615 && fcode
!= BUILT_IN_FREE
5616 && fcode
!= BUILT_IN_CHKP_SET_PTR_BOUNDS
5617 && fcode
!= BUILT_IN_CHKP_INIT_PTR_BOUNDS
5618 && fcode
!= BUILT_IN_CHKP_NULL_PTR_BOUNDS
5619 && fcode
!= BUILT_IN_CHKP_COPY_PTR_BOUNDS
5620 && fcode
!= BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5621 && fcode
!= BUILT_IN_CHKP_STORE_PTR_BOUNDS
5622 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5623 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5624 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5625 && fcode
!= BUILT_IN_CHKP_GET_PTR_LBOUND
5626 && fcode
!= BUILT_IN_CHKP_GET_PTR_UBOUND
)
5627 return expand_call (exp
, target
, ignore
);
5629 /* The built-in function expanders test for target == const0_rtx
5630 to determine whether the function's result will be ignored. */
5632 target
= const0_rtx
;
5634 /* If the result of a pure or const built-in function is ignored, and
5635 none of its arguments are volatile, we can avoid expanding the
5636 built-in call and just evaluate the arguments for side-effects. */
5637 if (target
== const0_rtx
5638 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
5639 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
5641 bool volatilep
= false;
5643 call_expr_arg_iterator iter
;
5645 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5646 if (TREE_THIS_VOLATILE (arg
))
5654 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5655 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5662 CASE_FLT_FN (BUILT_IN_FABS
):
5663 case BUILT_IN_FABSD32
:
5664 case BUILT_IN_FABSD64
:
5665 case BUILT_IN_FABSD128
:
5666 target
= expand_builtin_fabs (exp
, target
, subtarget
);
5671 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
5672 target
= expand_builtin_copysign (exp
, target
, subtarget
);
5677 /* Just do a normal library call if we were unable to fold
5679 CASE_FLT_FN (BUILT_IN_CABS
):
5682 CASE_FLT_FN (BUILT_IN_EXP
):
5683 CASE_FLT_FN (BUILT_IN_EXP10
):
5684 CASE_FLT_FN (BUILT_IN_POW10
):
5685 CASE_FLT_FN (BUILT_IN_EXP2
):
5686 CASE_FLT_FN (BUILT_IN_EXPM1
):
5687 CASE_FLT_FN (BUILT_IN_LOGB
):
5688 CASE_FLT_FN (BUILT_IN_LOG
):
5689 CASE_FLT_FN (BUILT_IN_LOG10
):
5690 CASE_FLT_FN (BUILT_IN_LOG2
):
5691 CASE_FLT_FN (BUILT_IN_LOG1P
):
5692 CASE_FLT_FN (BUILT_IN_TAN
):
5693 CASE_FLT_FN (BUILT_IN_ASIN
):
5694 CASE_FLT_FN (BUILT_IN_ACOS
):
5695 CASE_FLT_FN (BUILT_IN_ATAN
):
5696 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
5697 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5698 because of possible accuracy problems. */
5699 if (! flag_unsafe_math_optimizations
)
5701 CASE_FLT_FN (BUILT_IN_SQRT
):
5702 CASE_FLT_FN (BUILT_IN_FLOOR
):
5703 CASE_FLT_FN (BUILT_IN_CEIL
):
5704 CASE_FLT_FN (BUILT_IN_TRUNC
):
5705 CASE_FLT_FN (BUILT_IN_ROUND
):
5706 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
5707 CASE_FLT_FN (BUILT_IN_RINT
):
5708 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
5713 CASE_FLT_FN (BUILT_IN_FMA
):
5714 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
5719 CASE_FLT_FN (BUILT_IN_ILOGB
):
5720 if (! flag_unsafe_math_optimizations
)
5722 CASE_FLT_FN (BUILT_IN_ISINF
):
5723 CASE_FLT_FN (BUILT_IN_FINITE
):
5724 case BUILT_IN_ISFINITE
:
5725 case BUILT_IN_ISNORMAL
:
5726 target
= expand_builtin_interclass_mathfn (exp
, target
);
5731 CASE_FLT_FN (BUILT_IN_ICEIL
):
5732 CASE_FLT_FN (BUILT_IN_LCEIL
):
5733 CASE_FLT_FN (BUILT_IN_LLCEIL
):
5734 CASE_FLT_FN (BUILT_IN_LFLOOR
):
5735 CASE_FLT_FN (BUILT_IN_IFLOOR
):
5736 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
5737 target
= expand_builtin_int_roundingfn (exp
, target
);
5742 CASE_FLT_FN (BUILT_IN_IRINT
):
5743 CASE_FLT_FN (BUILT_IN_LRINT
):
5744 CASE_FLT_FN (BUILT_IN_LLRINT
):
5745 CASE_FLT_FN (BUILT_IN_IROUND
):
5746 CASE_FLT_FN (BUILT_IN_LROUND
):
5747 CASE_FLT_FN (BUILT_IN_LLROUND
):
5748 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
5753 CASE_FLT_FN (BUILT_IN_POWI
):
5754 target
= expand_builtin_powi (exp
, target
);
5759 CASE_FLT_FN (BUILT_IN_ATAN2
):
5760 CASE_FLT_FN (BUILT_IN_LDEXP
):
5761 CASE_FLT_FN (BUILT_IN_SCALB
):
5762 CASE_FLT_FN (BUILT_IN_SCALBN
):
5763 CASE_FLT_FN (BUILT_IN_SCALBLN
):
5764 if (! flag_unsafe_math_optimizations
)
5767 CASE_FLT_FN (BUILT_IN_FMOD
):
5768 CASE_FLT_FN (BUILT_IN_REMAINDER
):
5769 CASE_FLT_FN (BUILT_IN_DREM
):
5770 CASE_FLT_FN (BUILT_IN_POW
):
5771 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
5776 CASE_FLT_FN (BUILT_IN_CEXPI
):
5777 target
= expand_builtin_cexpi (exp
, target
);
5778 gcc_assert (target
);
5781 CASE_FLT_FN (BUILT_IN_SIN
):
5782 CASE_FLT_FN (BUILT_IN_COS
):
5783 if (! flag_unsafe_math_optimizations
)
5785 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
5790 CASE_FLT_FN (BUILT_IN_SINCOS
):
5791 if (! flag_unsafe_math_optimizations
)
5793 target
= expand_builtin_sincos (exp
);
5798 case BUILT_IN_APPLY_ARGS
:
5799 return expand_builtin_apply_args ();
5801 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5802 FUNCTION with a copy of the parameters described by
5803 ARGUMENTS, and ARGSIZE. It returns a block of memory
5804 allocated on the stack into which is stored all the registers
5805 that might possibly be used for returning the result of a
5806 function. ARGUMENTS is the value returned by
5807 __builtin_apply_args. ARGSIZE is the number of bytes of
5808 arguments that must be copied. ??? How should this value be
5809 computed? We'll also need a safe worst case value for varargs
5811 case BUILT_IN_APPLY
:
5812 if (!validate_arglist (exp
, POINTER_TYPE
,
5813 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
5814 && !validate_arglist (exp
, REFERENCE_TYPE
,
5815 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
5821 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
5822 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
5823 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
5825 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
5828 /* __builtin_return (RESULT) causes the function to return the
5829 value described by RESULT. RESULT is address of the block of
5830 memory returned by __builtin_apply. */
5831 case BUILT_IN_RETURN
:
5832 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5833 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
5836 case BUILT_IN_SAVEREGS
:
5837 return expand_builtin_saveregs ();
5839 case BUILT_IN_VA_ARG_PACK
:
5840 /* All valid uses of __builtin_va_arg_pack () are removed during
5842 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
5845 case BUILT_IN_VA_ARG_PACK_LEN
:
5846 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5848 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
5851 /* Return the address of the first anonymous stack arg. */
5852 case BUILT_IN_NEXT_ARG
:
5853 if (fold_builtin_next_arg (exp
, false))
5855 return expand_builtin_next_arg ();
5857 case BUILT_IN_CLEAR_CACHE
:
5858 target
= expand_builtin___clear_cache (exp
);
5863 case BUILT_IN_CLASSIFY_TYPE
:
5864 return expand_builtin_classify_type (exp
);
5866 case BUILT_IN_CONSTANT_P
:
5869 case BUILT_IN_FRAME_ADDRESS
:
5870 case BUILT_IN_RETURN_ADDRESS
:
5871 return expand_builtin_frame_address (fndecl
, exp
);
5873 /* Returns the address of the area where the structure is returned.
5875 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
5876 if (call_expr_nargs (exp
) != 0
5877 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
5878 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
5881 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
5883 case BUILT_IN_ALLOCA
:
5884 case BUILT_IN_ALLOCA_WITH_ALIGN
:
5885 /* If the allocation stems from the declaration of a variable-sized
5886 object, it cannot accumulate. */
5887 target
= expand_builtin_alloca (exp
, CALL_ALLOCA_FOR_VAR_P (exp
));
5892 case BUILT_IN_STACK_SAVE
:
5893 return expand_stack_save ();
5895 case BUILT_IN_STACK_RESTORE
:
5896 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
5899 case BUILT_IN_BSWAP16
:
5900 case BUILT_IN_BSWAP32
:
5901 case BUILT_IN_BSWAP64
:
5902 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
5907 CASE_INT_FN (BUILT_IN_FFS
):
5908 target
= expand_builtin_unop (target_mode
, exp
, target
,
5909 subtarget
, ffs_optab
);
5914 CASE_INT_FN (BUILT_IN_CLZ
):
5915 target
= expand_builtin_unop (target_mode
, exp
, target
,
5916 subtarget
, clz_optab
);
5921 CASE_INT_FN (BUILT_IN_CTZ
):
5922 target
= expand_builtin_unop (target_mode
, exp
, target
,
5923 subtarget
, ctz_optab
);
5928 CASE_INT_FN (BUILT_IN_CLRSB
):
5929 target
= expand_builtin_unop (target_mode
, exp
, target
,
5930 subtarget
, clrsb_optab
);
5935 CASE_INT_FN (BUILT_IN_POPCOUNT
):
5936 target
= expand_builtin_unop (target_mode
, exp
, target
,
5937 subtarget
, popcount_optab
);
5942 CASE_INT_FN (BUILT_IN_PARITY
):
5943 target
= expand_builtin_unop (target_mode
, exp
, target
,
5944 subtarget
, parity_optab
);
5949 case BUILT_IN_STRLEN
:
5950 target
= expand_builtin_strlen (exp
, target
, target_mode
);
5955 case BUILT_IN_STRCPY
:
5956 target
= expand_builtin_strcpy (exp
, target
);
5961 case BUILT_IN_STRNCPY
:
5962 target
= expand_builtin_strncpy (exp
, target
);
5967 case BUILT_IN_STPCPY
:
5968 target
= expand_builtin_stpcpy (exp
, target
, mode
);
5973 case BUILT_IN_MEMCPY
:
5974 target
= expand_builtin_memcpy (exp
, target
);
5979 case BUILT_IN_MEMPCPY
:
5980 target
= expand_builtin_mempcpy (exp
, target
, mode
);
5985 case BUILT_IN_MEMSET
:
5986 target
= expand_builtin_memset (exp
, target
, mode
);
5991 case BUILT_IN_BZERO
:
5992 target
= expand_builtin_bzero (exp
);
5997 case BUILT_IN_STRCMP
:
5998 target
= expand_builtin_strcmp (exp
, target
);
6003 case BUILT_IN_STRNCMP
:
6004 target
= expand_builtin_strncmp (exp
, target
, mode
);
6010 case BUILT_IN_MEMCMP
:
6011 target
= expand_builtin_memcmp (exp
, target
, mode
);
6016 case BUILT_IN_SETJMP
:
6017 /* This should have been lowered to the builtins below. */
6020 case BUILT_IN_SETJMP_SETUP
:
6021 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6022 and the receiver label. */
6023 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6025 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6026 VOIDmode
, EXPAND_NORMAL
);
6027 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6028 rtx label_r
= label_rtx (label
);
6030 /* This is copied from the handling of non-local gotos. */
6031 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6032 nonlocal_goto_handler_labels
6033 = gen_rtx_EXPR_LIST (VOIDmode
, label_r
,
6034 nonlocal_goto_handler_labels
);
6035 /* ??? Do not let expand_label treat us as such since we would
6036 not want to be both on the list of non-local labels and on
6037 the list of forced labels. */
6038 FORCED_LABEL (label
) = 0;
6043 case BUILT_IN_SETJMP_DISPATCHER
:
6044 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6045 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6047 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6048 rtx label_r
= label_rtx (label
);
6050 /* Remove the dispatcher label from the list of non-local labels
6051 since the receiver labels have been added to it above. */
6052 remove_node_from_expr_list (label_r
, &nonlocal_goto_handler_labels
);
6057 case BUILT_IN_SETJMP_RECEIVER
:
6058 /* __builtin_setjmp_receiver is passed the receiver label. */
6059 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6061 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6062 rtx label_r
= label_rtx (label
);
6064 expand_builtin_setjmp_receiver (label_r
);
6069 /* __builtin_longjmp is passed a pointer to an array of five words.
6070 It's similar to the C library longjmp function but works with
6071 __builtin_setjmp above. */
6072 case BUILT_IN_LONGJMP
:
6073 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6075 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6076 VOIDmode
, EXPAND_NORMAL
);
6077 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6079 if (value
!= const1_rtx
)
6081 error ("%<__builtin_longjmp%> second argument must be 1");
6085 expand_builtin_longjmp (buf_addr
, value
);
6090 case BUILT_IN_NONLOCAL_GOTO
:
6091 target
= expand_builtin_nonlocal_goto (exp
);
6096 /* This updates the setjmp buffer that is its argument with the value
6097 of the current stack pointer. */
6098 case BUILT_IN_UPDATE_SETJMP_BUF
:
6099 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6102 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6104 expand_builtin_update_setjmp_buf (buf_addr
);
6110 expand_builtin_trap ();
6113 case BUILT_IN_UNREACHABLE
:
6114 expand_builtin_unreachable ();
6117 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6118 case BUILT_IN_SIGNBITD32
:
6119 case BUILT_IN_SIGNBITD64
:
6120 case BUILT_IN_SIGNBITD128
:
6121 target
= expand_builtin_signbit (exp
, target
);
6126 /* Various hooks for the DWARF 2 __throw routine. */
6127 case BUILT_IN_UNWIND_INIT
:
6128 expand_builtin_unwind_init ();
6130 case BUILT_IN_DWARF_CFA
:
6131 return virtual_cfa_rtx
;
6132 #ifdef DWARF2_UNWIND_INFO
6133 case BUILT_IN_DWARF_SP_COLUMN
:
6134 return expand_builtin_dwarf_sp_column ();
6135 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6136 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6139 case BUILT_IN_FROB_RETURN_ADDR
:
6140 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6141 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6142 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6143 case BUILT_IN_EH_RETURN
:
6144 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6145 CALL_EXPR_ARG (exp
, 1));
6147 #ifdef EH_RETURN_DATA_REGNO
6148 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6149 return expand_builtin_eh_return_data_regno (exp
);
6151 case BUILT_IN_EXTEND_POINTER
:
6152 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6153 case BUILT_IN_EH_POINTER
:
6154 return expand_builtin_eh_pointer (exp
);
6155 case BUILT_IN_EH_FILTER
:
6156 return expand_builtin_eh_filter (exp
);
6157 case BUILT_IN_EH_COPY_VALUES
:
6158 return expand_builtin_eh_copy_values (exp
);
6160 case BUILT_IN_VA_START
:
6161 return expand_builtin_va_start (exp
);
6162 case BUILT_IN_VA_END
:
6163 return expand_builtin_va_end (exp
);
6164 case BUILT_IN_VA_COPY
:
6165 return expand_builtin_va_copy (exp
);
6166 case BUILT_IN_EXPECT
:
6167 return expand_builtin_expect (exp
, target
);
6168 case BUILT_IN_ASSUME_ALIGNED
:
6169 return expand_builtin_assume_aligned (exp
, target
);
6170 case BUILT_IN_PREFETCH
:
6171 expand_builtin_prefetch (exp
);
6174 case BUILT_IN_INIT_TRAMPOLINE
:
6175 return expand_builtin_init_trampoline (exp
, true);
6176 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
6177 return expand_builtin_init_trampoline (exp
, false);
6178 case BUILT_IN_ADJUST_TRAMPOLINE
:
6179 return expand_builtin_adjust_trampoline (exp
);
6182 case BUILT_IN_EXECL
:
6183 case BUILT_IN_EXECV
:
6184 case BUILT_IN_EXECLP
:
6185 case BUILT_IN_EXECLE
:
6186 case BUILT_IN_EXECVP
:
6187 case BUILT_IN_EXECVE
:
6188 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6193 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
6194 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
6195 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
6196 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
6197 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
6198 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
6199 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
6204 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
6205 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
6206 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
6207 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
6208 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
6209 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
6210 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
6215 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
6216 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
6217 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
6218 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
6219 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
6220 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
6221 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
6226 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
6227 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
6228 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
6229 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
6230 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
6231 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
6232 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
6237 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
6238 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
6239 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
6240 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
6241 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
6242 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
6243 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
6248 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6249 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6250 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6251 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6252 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6253 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
6254 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
6259 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
6260 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
6261 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
6262 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
6263 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
6264 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
6265 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
6270 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
6271 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
6272 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
6273 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
6274 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
6275 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
6276 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
6281 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
6282 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
6283 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
6284 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
6285 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
6286 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
6287 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
6292 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
6293 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
6294 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
6295 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
6296 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
6297 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
6298 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
6303 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
6304 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
6305 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
6306 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
6307 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
6308 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
6309 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
6314 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6315 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6316 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6317 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6318 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6319 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
6320 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
6325 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
6326 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
6327 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
6328 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
6329 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
6330 if (mode
== VOIDmode
)
6331 mode
= TYPE_MODE (boolean_type_node
);
6332 if (!target
|| !register_operand (target
, mode
))
6333 target
= gen_reg_rtx (mode
);
6335 mode
= get_builtin_sync_mode
6336 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
6337 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6342 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
6343 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
6344 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
6345 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
6346 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
6347 mode
= get_builtin_sync_mode
6348 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
6349 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6354 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
6355 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
6356 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
6357 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
6358 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
6359 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
6360 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
6365 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
6366 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
6367 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
6368 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
6369 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
6370 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
6371 expand_builtin_sync_lock_release (mode
, exp
);
6374 case BUILT_IN_SYNC_SYNCHRONIZE
:
6375 expand_builtin_sync_synchronize ();
6378 case BUILT_IN_ATOMIC_EXCHANGE_1
:
6379 case BUILT_IN_ATOMIC_EXCHANGE_2
:
6380 case BUILT_IN_ATOMIC_EXCHANGE_4
:
6381 case BUILT_IN_ATOMIC_EXCHANGE_8
:
6382 case BUILT_IN_ATOMIC_EXCHANGE_16
:
6383 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
6384 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
6389 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
6390 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
6391 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
6392 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
6393 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
6395 unsigned int nargs
, z
;
6396 vec
<tree
, va_gc
> *vec
;
6399 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
6400 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
6404 /* If this is turned into an external library call, the weak parameter
6405 must be dropped to match the expected parameter list. */
6406 nargs
= call_expr_nargs (exp
);
6407 vec_alloc (vec
, nargs
- 1);
6408 for (z
= 0; z
< 3; z
++)
6409 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6410 /* Skip the boolean weak parameter. */
6411 for (z
= 4; z
< 6; z
++)
6412 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6413 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
6417 case BUILT_IN_ATOMIC_LOAD_1
:
6418 case BUILT_IN_ATOMIC_LOAD_2
:
6419 case BUILT_IN_ATOMIC_LOAD_4
:
6420 case BUILT_IN_ATOMIC_LOAD_8
:
6421 case BUILT_IN_ATOMIC_LOAD_16
:
6422 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
6423 target
= expand_builtin_atomic_load (mode
, exp
, target
);
6428 case BUILT_IN_ATOMIC_STORE_1
:
6429 case BUILT_IN_ATOMIC_STORE_2
:
6430 case BUILT_IN_ATOMIC_STORE_4
:
6431 case BUILT_IN_ATOMIC_STORE_8
:
6432 case BUILT_IN_ATOMIC_STORE_16
:
6433 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
6434 target
= expand_builtin_atomic_store (mode
, exp
);
6439 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
6440 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
6441 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
6442 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
6443 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
6445 enum built_in_function lib
;
6446 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
6447 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
6448 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
6449 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
6455 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
6456 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
6457 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
6458 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
6459 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
6461 enum built_in_function lib
;
6462 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
6463 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
6464 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
6465 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
6471 case BUILT_IN_ATOMIC_AND_FETCH_1
:
6472 case BUILT_IN_ATOMIC_AND_FETCH_2
:
6473 case BUILT_IN_ATOMIC_AND_FETCH_4
:
6474 case BUILT_IN_ATOMIC_AND_FETCH_8
:
6475 case BUILT_IN_ATOMIC_AND_FETCH_16
:
6477 enum built_in_function lib
;
6478 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
6479 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
6480 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
6481 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
6487 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
6488 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
6489 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
6490 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
6491 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
6493 enum built_in_function lib
;
6494 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
6495 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
6496 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
6497 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
6503 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
6504 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
6505 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
6506 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
6507 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
6509 enum built_in_function lib
;
6510 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
6511 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
6512 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
6513 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
6519 case BUILT_IN_ATOMIC_OR_FETCH_1
:
6520 case BUILT_IN_ATOMIC_OR_FETCH_2
:
6521 case BUILT_IN_ATOMIC_OR_FETCH_4
:
6522 case BUILT_IN_ATOMIC_OR_FETCH_8
:
6523 case BUILT_IN_ATOMIC_OR_FETCH_16
:
6525 enum built_in_function lib
;
6526 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
6527 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
6528 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
6529 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
6535 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
6536 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
6537 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
6538 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
6539 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
6540 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
6541 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
6542 ignore
, BUILT_IN_NONE
);
6547 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
6548 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
6549 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
6550 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
6551 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
6552 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
6553 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
6554 ignore
, BUILT_IN_NONE
);
6559 case BUILT_IN_ATOMIC_FETCH_AND_1
:
6560 case BUILT_IN_ATOMIC_FETCH_AND_2
:
6561 case BUILT_IN_ATOMIC_FETCH_AND_4
:
6562 case BUILT_IN_ATOMIC_FETCH_AND_8
:
6563 case BUILT_IN_ATOMIC_FETCH_AND_16
:
6564 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
6565 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
6566 ignore
, BUILT_IN_NONE
);
6571 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
6572 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
6573 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
6574 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
6575 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
6576 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
6577 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
6578 ignore
, BUILT_IN_NONE
);
6583 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
6584 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
6585 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
6586 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
6587 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
6588 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
6589 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
6590 ignore
, BUILT_IN_NONE
);
6595 case BUILT_IN_ATOMIC_FETCH_OR_1
:
6596 case BUILT_IN_ATOMIC_FETCH_OR_2
:
6597 case BUILT_IN_ATOMIC_FETCH_OR_4
:
6598 case BUILT_IN_ATOMIC_FETCH_OR_8
:
6599 case BUILT_IN_ATOMIC_FETCH_OR_16
:
6600 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
6601 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
6602 ignore
, BUILT_IN_NONE
);
6607 case BUILT_IN_ATOMIC_TEST_AND_SET
:
6608 return expand_builtin_atomic_test_and_set (exp
, target
);
6610 case BUILT_IN_ATOMIC_CLEAR
:
6611 return expand_builtin_atomic_clear (exp
);
6613 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
6614 return expand_builtin_atomic_always_lock_free (exp
);
6616 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
6617 target
= expand_builtin_atomic_is_lock_free (exp
);
6622 case BUILT_IN_ATOMIC_THREAD_FENCE
:
6623 expand_builtin_atomic_thread_fence (exp
);
6626 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
6627 expand_builtin_atomic_signal_fence (exp
);
6630 case BUILT_IN_OBJECT_SIZE
:
6631 return expand_builtin_object_size (exp
);
6633 case BUILT_IN_MEMCPY_CHK
:
6634 case BUILT_IN_MEMPCPY_CHK
:
6635 case BUILT_IN_MEMMOVE_CHK
:
6636 case BUILT_IN_MEMSET_CHK
:
6637 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6642 case BUILT_IN_STRCPY_CHK
:
6643 case BUILT_IN_STPCPY_CHK
:
6644 case BUILT_IN_STRNCPY_CHK
:
6645 case BUILT_IN_STPNCPY_CHK
:
6646 case BUILT_IN_STRCAT_CHK
:
6647 case BUILT_IN_STRNCAT_CHK
:
6648 case BUILT_IN_SNPRINTF_CHK
:
6649 case BUILT_IN_VSNPRINTF_CHK
:
6650 maybe_emit_chk_warning (exp
, fcode
);
6653 case BUILT_IN_SPRINTF_CHK
:
6654 case BUILT_IN_VSPRINTF_CHK
:
6655 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6659 if (warn_free_nonheap_object
)
6660 maybe_emit_free_warning (exp
);
6663 case BUILT_IN_THREAD_POINTER
:
6664 return expand_builtin_thread_pointer (exp
, target
);
6666 case BUILT_IN_SET_THREAD_POINTER
:
6667 expand_builtin_set_thread_pointer (exp
);
6670 case BUILT_IN_CILK_DETACH
:
6671 expand_builtin_cilk_detach (exp
);
6674 case BUILT_IN_CILK_POP_FRAME
:
6675 expand_builtin_cilk_pop_frame (exp
);
6678 case BUILT_IN_CHKP_INIT_PTR_BOUNDS
:
6679 case BUILT_IN_CHKP_NULL_PTR_BOUNDS
:
6680 case BUILT_IN_CHKP_COPY_PTR_BOUNDS
:
6681 return expand_normal (CALL_EXPR_ARG (exp
, 0));
6683 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
:
6684 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
:
6685 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS
:
6686 case BUILT_IN_CHKP_SET_PTR_BOUNDS
:
6687 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS
:
6688 case BUILT_IN_CHKP_STORE_PTR_BOUNDS
:
6689 case BUILT_IN_CHKP_GET_PTR_LBOUND
:
6690 case BUILT_IN_CHKP_GET_PTR_UBOUND
:
6691 /* We allow user CHKP builtins if Pointer Bounds
6693 if (!flag_check_pointer_bounds
)
6695 if (fcode
== BUILT_IN_CHKP_SET_PTR_BOUNDS
6696 || fcode
== BUILT_IN_CHKP_NARROW_PTR_BOUNDS
)
6697 return expand_normal (CALL_EXPR_ARG (exp
, 0));
6698 else if (fcode
== BUILT_IN_CHKP_GET_PTR_LBOUND
)
6699 return expand_normal (size_zero_node
);
6700 else if (fcode
== BUILT_IN_CHKP_GET_PTR_UBOUND
)
6701 return expand_normal (size_int (-1));
6707 case BUILT_IN_CHKP_BNDMK
:
6708 case BUILT_IN_CHKP_BNDSTX
:
6709 case BUILT_IN_CHKP_BNDCL
:
6710 case BUILT_IN_CHKP_BNDCU
:
6711 case BUILT_IN_CHKP_BNDLDX
:
6712 case BUILT_IN_CHKP_BNDRET
:
6713 case BUILT_IN_CHKP_INTERSECT
:
6714 case BUILT_IN_CHKP_ARG_BND
:
6715 case BUILT_IN_CHKP_NARROW
:
6716 case BUILT_IN_CHKP_EXTRACT_LOWER
:
6717 case BUILT_IN_CHKP_EXTRACT_UPPER
:
6718 /* Software implementation of pointers checker is NYI.
6719 Target support is required. */
6720 error ("Your target platform does not support -fcheck-pointers");
6723 default: /* just do library call, if unknown builtin */
6727 /* The switch statement above can drop through to cause the function
6728 to be called normally. */
6729 return expand_call (exp
, target
, ignore
);
6732 /* Determine whether a tree node represents a call to a built-in
6733 function. If the tree T is a call to a built-in function with
6734 the right number of arguments of the appropriate types, return
6735 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6736 Otherwise the return value is END_BUILTINS. */
6738 enum built_in_function
6739 builtin_mathfn_code (const_tree t
)
6741 const_tree fndecl
, arg
, parmlist
;
6742 const_tree argtype
, parmtype
;
6743 const_call_expr_arg_iterator iter
;
6745 if (TREE_CODE (t
) != CALL_EXPR
6746 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
6747 return END_BUILTINS
;
6749 fndecl
= get_callee_fndecl (t
);
6750 if (fndecl
== NULL_TREE
6751 || TREE_CODE (fndecl
) != FUNCTION_DECL
6752 || ! DECL_BUILT_IN (fndecl
)
6753 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6754 return END_BUILTINS
;
6756 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
6757 init_const_call_expr_arg_iterator (t
, &iter
);
6758 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
6760 /* If a function doesn't take a variable number of arguments,
6761 the last element in the list will have type `void'. */
6762 parmtype
= TREE_VALUE (parmlist
);
6763 if (VOID_TYPE_P (parmtype
))
6765 if (more_const_call_expr_args_p (&iter
))
6766 return END_BUILTINS
;
6767 return DECL_FUNCTION_CODE (fndecl
);
6770 if (! more_const_call_expr_args_p (&iter
))
6771 return END_BUILTINS
;
6773 arg
= next_const_call_expr_arg (&iter
);
6774 argtype
= TREE_TYPE (arg
);
6776 if (SCALAR_FLOAT_TYPE_P (parmtype
))
6778 if (! SCALAR_FLOAT_TYPE_P (argtype
))
6779 return END_BUILTINS
;
6781 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
6783 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
6784 return END_BUILTINS
;
6786 else if (POINTER_TYPE_P (parmtype
))
6788 if (! POINTER_TYPE_P (argtype
))
6789 return END_BUILTINS
;
6791 else if (INTEGRAL_TYPE_P (parmtype
))
6793 if (! INTEGRAL_TYPE_P (argtype
))
6794 return END_BUILTINS
;
6797 return END_BUILTINS
;
6800 /* Variable-length argument list. */
6801 return DECL_FUNCTION_CODE (fndecl
);
6804 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6805 evaluate to a constant. */
6808 fold_builtin_constant_p (tree arg
)
6810 /* We return 1 for a numeric type that's known to be a constant
6811 value at compile-time or for an aggregate type that's a
6812 literal constant. */
6815 /* If we know this is a constant, emit the constant of one. */
6816 if (CONSTANT_CLASS_P (arg
)
6817 || (TREE_CODE (arg
) == CONSTRUCTOR
6818 && TREE_CONSTANT (arg
)))
6819 return integer_one_node
;
6820 if (TREE_CODE (arg
) == ADDR_EXPR
)
6822 tree op
= TREE_OPERAND (arg
, 0);
6823 if (TREE_CODE (op
) == STRING_CST
6824 || (TREE_CODE (op
) == ARRAY_REF
6825 && integer_zerop (TREE_OPERAND (op
, 1))
6826 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
6827 return integer_one_node
;
6830 /* If this expression has side effects, show we don't know it to be a
6831 constant. Likewise if it's a pointer or aggregate type since in
6832 those case we only want literals, since those are only optimized
6833 when generating RTL, not later.
6834 And finally, if we are compiling an initializer, not code, we
6835 need to return a definite result now; there's not going to be any
6836 more optimization done. */
6837 if (TREE_SIDE_EFFECTS (arg
)
6838 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
6839 || POINTER_TYPE_P (TREE_TYPE (arg
))
6841 || folding_initializer
6842 || force_folding_builtin_constant_p
)
6843 return integer_zero_node
;
6848 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6849 return it as a truthvalue. */
6852 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
)
6854 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
6856 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
6857 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
6858 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
6859 pred_type
= TREE_VALUE (arg_types
);
6860 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
6862 pred
= fold_convert_loc (loc
, pred_type
, pred
);
6863 expected
= fold_convert_loc (loc
, expected_type
, expected
);
6864 call_expr
= build_call_expr_loc (loc
, fn
, 2, pred
, expected
);
6866 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
6867 build_int_cst (ret_type
, 0));
6870 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6871 NULL_TREE if no simplification is possible. */
6874 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
)
6876 tree inner
, fndecl
, inner_arg0
;
6877 enum tree_code code
;
6879 /* Distribute the expected value over short-circuiting operators.
6880 See through the cast from truthvalue_type_node to long. */
6882 while (TREE_CODE (inner_arg0
) == NOP_EXPR
6883 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
6884 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
6885 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
6887 /* If this is a builtin_expect within a builtin_expect keep the
6888 inner one. See through a comparison against a constant. It
6889 might have been added to create a thruthvalue. */
6892 if (COMPARISON_CLASS_P (inner
)
6893 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
6894 inner
= TREE_OPERAND (inner
, 0);
6896 if (TREE_CODE (inner
) == CALL_EXPR
6897 && (fndecl
= get_callee_fndecl (inner
))
6898 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
6899 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
6903 code
= TREE_CODE (inner
);
6904 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6906 tree op0
= TREE_OPERAND (inner
, 0);
6907 tree op1
= TREE_OPERAND (inner
, 1);
6909 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
);
6910 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
);
6911 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
6913 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
6916 /* If the argument isn't invariant then there's nothing else we can do. */
6917 if (!TREE_CONSTANT (inner_arg0
))
6920 /* If we expect that a comparison against the argument will fold to
6921 a constant return the constant. In practice, this means a true
6922 constant or the address of a non-weak symbol. */
6925 if (TREE_CODE (inner
) == ADDR_EXPR
)
6929 inner
= TREE_OPERAND (inner
, 0);
6931 while (TREE_CODE (inner
) == COMPONENT_REF
6932 || TREE_CODE (inner
) == ARRAY_REF
);
6933 if ((TREE_CODE (inner
) == VAR_DECL
6934 || TREE_CODE (inner
) == FUNCTION_DECL
)
6935 && DECL_WEAK (inner
))
6939 /* Otherwise, ARG0 already has the proper type for the return value. */
6943 /* Fold a call to __builtin_classify_type with argument ARG. */
6946 fold_builtin_classify_type (tree arg
)
6949 return build_int_cst (integer_type_node
, no_type_class
);
6951 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
6954 /* Fold a call to __builtin_strlen with argument ARG. */
6957 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
6959 if (!validate_arg (arg
, POINTER_TYPE
))
6963 tree len
= c_strlen (arg
, 0);
6966 return fold_convert_loc (loc
, type
, len
);
6972 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6975 fold_builtin_inf (location_t loc
, tree type
, int warn
)
6977 REAL_VALUE_TYPE real
;
6979 /* __builtin_inff is intended to be usable to define INFINITY on all
6980 targets. If an infinity is not available, INFINITY expands "to a
6981 positive constant of type float that overflows at translation
6982 time", footnote "In this case, using INFINITY will violate the
6983 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6984 Thus we pedwarn to ensure this constraint violation is
6986 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
6987 pedwarn (loc
, 0, "target format does not support infinity");
6990 return build_real (type
, real
);
6993 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6996 fold_builtin_nan (tree arg
, tree type
, int quiet
)
6998 REAL_VALUE_TYPE real
;
7001 if (!validate_arg (arg
, POINTER_TYPE
))
7003 str
= c_getstr (arg
);
7007 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7010 return build_real (type
, real
);
7013 /* Return true if the floating point expression T has an integer value.
7014 We also allow +Inf, -Inf and NaN to be considered integer values. */
7017 integer_valued_real_p (tree t
)
7019 switch (TREE_CODE (t
))
7026 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7031 return integer_valued_real_p (TREE_OPERAND (t
, 1));
7038 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7039 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7042 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7043 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7046 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7050 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7051 if (TREE_CODE (type
) == INTEGER_TYPE
)
7053 if (TREE_CODE (type
) == REAL_TYPE
)
7054 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7059 switch (builtin_mathfn_code (t
))
7061 CASE_FLT_FN (BUILT_IN_CEIL
):
7062 CASE_FLT_FN (BUILT_IN_FLOOR
):
7063 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7064 CASE_FLT_FN (BUILT_IN_RINT
):
7065 CASE_FLT_FN (BUILT_IN_ROUND
):
7066 CASE_FLT_FN (BUILT_IN_TRUNC
):
7069 CASE_FLT_FN (BUILT_IN_FMIN
):
7070 CASE_FLT_FN (BUILT_IN_FMAX
):
7071 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7072 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7085 /* FNDECL is assumed to be a builtin where truncation can be propagated
7086 across (for instance floor((double)f) == (double)floorf (f).
7087 Do the transformation for a call with argument ARG. */
7090 fold_trunc_transparent_mathfn (location_t loc
, tree fndecl
, tree arg
)
7092 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7094 if (!validate_arg (arg
, REAL_TYPE
))
7097 /* Integer rounding functions are idempotent. */
7098 if (fcode
== builtin_mathfn_code (arg
))
7101 /* If argument is already integer valued, and we don't need to worry
7102 about setting errno, there's no need to perform rounding. */
7103 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7108 tree arg0
= strip_float_extensions (arg
);
7109 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7110 tree newtype
= TREE_TYPE (arg0
);
7113 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7114 && (decl
= mathfn_built_in (newtype
, fcode
)))
7115 return fold_convert_loc (loc
, ftype
,
7116 build_call_expr_loc (loc
, decl
, 1,
7117 fold_convert_loc (loc
,
7124 /* FNDECL is assumed to be builtin which can narrow the FP type of
7125 the argument, for instance lround((double)f) -> lroundf (f).
7126 Do the transformation for a call with argument ARG. */
7129 fold_fixed_mathfn (location_t loc
, tree fndecl
, tree arg
)
7131 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7133 if (!validate_arg (arg
, REAL_TYPE
))
7136 /* If argument is already integer valued, and we don't need to worry
7137 about setting errno, there's no need to perform rounding. */
7138 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7139 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7140 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7144 tree ftype
= TREE_TYPE (arg
);
7145 tree arg0
= strip_float_extensions (arg
);
7146 tree newtype
= TREE_TYPE (arg0
);
7149 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7150 && (decl
= mathfn_built_in (newtype
, fcode
)))
7151 return build_call_expr_loc (loc
, decl
, 1,
7152 fold_convert_loc (loc
, newtype
, arg0
));
7155 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7156 sizeof (int) == sizeof (long). */
7157 if (TYPE_PRECISION (integer_type_node
)
7158 == TYPE_PRECISION (long_integer_type_node
))
7160 tree newfn
= NULL_TREE
;
7163 CASE_FLT_FN (BUILT_IN_ICEIL
):
7164 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7167 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7168 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7171 CASE_FLT_FN (BUILT_IN_IROUND
):
7172 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7175 CASE_FLT_FN (BUILT_IN_IRINT
):
7176 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7185 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7186 return fold_convert_loc (loc
,
7187 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7191 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7192 sizeof (long long) == sizeof (long). */
7193 if (TYPE_PRECISION (long_long_integer_type_node
)
7194 == TYPE_PRECISION (long_integer_type_node
))
7196 tree newfn
= NULL_TREE
;
7199 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7200 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7203 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7204 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7207 CASE_FLT_FN (BUILT_IN_LLROUND
):
7208 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7211 CASE_FLT_FN (BUILT_IN_LLRINT
):
7212 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7221 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7222 return fold_convert_loc (loc
,
7223 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7230 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7231 return type. Return NULL_TREE if no simplification can be made. */
7234 fold_builtin_cabs (location_t loc
, tree arg
, tree type
, tree fndecl
)
7238 if (!validate_arg (arg
, COMPLEX_TYPE
)
7239 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7242 /* Calculate the result when the argument is a constant. */
7243 if (TREE_CODE (arg
) == COMPLEX_CST
7244 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7248 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7250 tree real
= TREE_OPERAND (arg
, 0);
7251 tree imag
= TREE_OPERAND (arg
, 1);
7253 /* If either part is zero, cabs is fabs of the other. */
7254 if (real_zerop (real
))
7255 return fold_build1_loc (loc
, ABS_EXPR
, type
, imag
);
7256 if (real_zerop (imag
))
7257 return fold_build1_loc (loc
, ABS_EXPR
, type
, real
);
7259 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7260 if (flag_unsafe_math_optimizations
7261 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7263 const REAL_VALUE_TYPE sqrt2_trunc
7264 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
7266 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7267 fold_build1_loc (loc
, ABS_EXPR
, type
, real
),
7268 build_real (type
, sqrt2_trunc
));
7272 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7273 if (TREE_CODE (arg
) == NEGATE_EXPR
7274 || TREE_CODE (arg
) == CONJ_EXPR
)
7275 return build_call_expr_loc (loc
, fndecl
, 1, TREE_OPERAND (arg
, 0));
7277 /* Don't do this when optimizing for size. */
7278 if (flag_unsafe_math_optimizations
7279 && optimize
&& optimize_function_for_speed_p (cfun
))
7281 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7283 if (sqrtfn
!= NULL_TREE
)
7285 tree rpart
, ipart
, result
;
7287 arg
= builtin_save_expr (arg
);
7289 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, type
, arg
);
7290 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg
);
7292 rpart
= builtin_save_expr (rpart
);
7293 ipart
= builtin_save_expr (ipart
);
7295 result
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
7296 fold_build2_loc (loc
, MULT_EXPR
, type
,
7298 fold_build2_loc (loc
, MULT_EXPR
, type
,
7301 return build_call_expr_loc (loc
, sqrtfn
, 1, result
);
7308 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7309 complex tree type of the result. If NEG is true, the imaginary
7310 zero is negative. */
7313 build_complex_cproj (tree type
, bool neg
)
7315 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
7319 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
7320 build_real (TREE_TYPE (type
), rzero
));
7323 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7324 return type. Return NULL_TREE if no simplification can be made. */
7327 fold_builtin_cproj (location_t loc
, tree arg
, tree type
)
7329 if (!validate_arg (arg
, COMPLEX_TYPE
)
7330 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7333 /* If there are no infinities, return arg. */
7334 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type
))))
7335 return non_lvalue_loc (loc
, arg
);
7337 /* Calculate the result when the argument is a constant. */
7338 if (TREE_CODE (arg
) == COMPLEX_CST
)
7340 const REAL_VALUE_TYPE
*real
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
7341 const REAL_VALUE_TYPE
*imag
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
7343 if (real_isinf (real
) || real_isinf (imag
))
7344 return build_complex_cproj (type
, imag
->sign
);
7348 else if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7350 tree real
= TREE_OPERAND (arg
, 0);
7351 tree imag
= TREE_OPERAND (arg
, 1);
7356 /* If the real part is inf and the imag part is known to be
7357 nonnegative, return (inf + 0i). Remember side-effects are
7358 possible in the imag part. */
7359 if (TREE_CODE (real
) == REAL_CST
7360 && real_isinf (TREE_REAL_CST_PTR (real
))
7361 && tree_expr_nonnegative_p (imag
))
7362 return omit_one_operand_loc (loc
, type
,
7363 build_complex_cproj (type
, false),
7366 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7367 Remember side-effects are possible in the real part. */
7368 if (TREE_CODE (imag
) == REAL_CST
7369 && real_isinf (TREE_REAL_CST_PTR (imag
)))
7371 omit_one_operand_loc (loc
, type
,
7372 build_complex_cproj (type
, TREE_REAL_CST_PTR
7373 (imag
)->sign
), arg
);
7379 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7380 Return NULL_TREE if no simplification can be made. */
7383 fold_builtin_sqrt (location_t loc
, tree arg
, tree type
)
7386 enum built_in_function fcode
;
7389 if (!validate_arg (arg
, REAL_TYPE
))
7392 /* Calculate the result when the argument is a constant. */
7393 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7396 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7397 fcode
= builtin_mathfn_code (arg
);
7398 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7400 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7401 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7402 CALL_EXPR_ARG (arg
, 0),
7403 build_real (type
, dconsthalf
));
7404 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7407 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7408 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7410 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7414 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7416 /* The inner root was either sqrt or cbrt. */
7417 /* This was a conditional expression but it triggered a bug
7419 REAL_VALUE_TYPE dconstroot
;
7420 if (BUILTIN_SQRT_P (fcode
))
7421 dconstroot
= dconsthalf
;
7423 dconstroot
= dconst_third ();
7425 /* Adjust for the outer root. */
7426 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7427 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7428 tree_root
= build_real (type
, dconstroot
);
7429 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7433 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7434 if (flag_unsafe_math_optimizations
7435 && (fcode
== BUILT_IN_POW
7436 || fcode
== BUILT_IN_POWF
7437 || fcode
== BUILT_IN_POWL
))
7439 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7440 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7441 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7443 if (!tree_expr_nonnegative_p (arg0
))
7444 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7445 narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
7446 build_real (type
, dconsthalf
));
7447 return build_call_expr_loc (loc
, powfn
, 2, arg0
, narg1
);
7453 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7454 Return NULL_TREE if no simplification can be made. */
7457 fold_builtin_cbrt (location_t loc
, tree arg
, tree type
)
7459 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7462 if (!validate_arg (arg
, REAL_TYPE
))
7465 /* Calculate the result when the argument is a constant. */
7466 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7469 if (flag_unsafe_math_optimizations
)
7471 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7472 if (BUILTIN_EXPONENT_P (fcode
))
7474 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7475 const REAL_VALUE_TYPE third_trunc
=
7476 real_value_truncate (TYPE_MODE (type
), dconst_third ());
7477 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7478 CALL_EXPR_ARG (arg
, 0),
7479 build_real (type
, third_trunc
));
7480 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7483 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7484 if (BUILTIN_SQRT_P (fcode
))
7486 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7490 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7492 REAL_VALUE_TYPE dconstroot
= dconst_third ();
7494 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7495 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7496 tree_root
= build_real (type
, dconstroot
);
7497 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7501 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7502 if (BUILTIN_CBRT_P (fcode
))
7504 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7505 if (tree_expr_nonnegative_p (arg0
))
7507 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7512 REAL_VALUE_TYPE dconstroot
;
7514 real_arithmetic (&dconstroot
, MULT_EXPR
,
7515 dconst_third_ptr (), dconst_third_ptr ());
7516 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7517 tree_root
= build_real (type
, dconstroot
);
7518 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7523 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7524 if (fcode
== BUILT_IN_POW
7525 || fcode
== BUILT_IN_POWF
7526 || fcode
== BUILT_IN_POWL
)
7528 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7529 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7530 if (tree_expr_nonnegative_p (arg00
))
7532 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7533 const REAL_VALUE_TYPE dconstroot
7534 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
7535 tree narg01
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
,
7536 build_real (type
, dconstroot
));
7537 return build_call_expr_loc (loc
, powfn
, 2, arg00
, narg01
);
7544 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7545 TYPE is the type of the return value. Return NULL_TREE if no
7546 simplification can be made. */
7549 fold_builtin_cos (location_t loc
,
7550 tree arg
, tree type
, tree fndecl
)
7554 if (!validate_arg (arg
, REAL_TYPE
))
7557 /* Calculate the result when the argument is a constant. */
7558 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7561 /* Optimize cos(-x) into cos (x). */
7562 if ((narg
= fold_strip_sign_ops (arg
)))
7563 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7568 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7569 Return NULL_TREE if no simplification can be made. */
7572 fold_builtin_cosh (location_t loc
, tree arg
, tree type
, tree fndecl
)
7574 if (validate_arg (arg
, REAL_TYPE
))
7578 /* Calculate the result when the argument is a constant. */
7579 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7582 /* Optimize cosh(-x) into cosh (x). */
7583 if ((narg
= fold_strip_sign_ops (arg
)))
7584 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7590 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7591 argument ARG. TYPE is the type of the return value. Return
7592 NULL_TREE if no simplification can be made. */
7595 fold_builtin_ccos (location_t loc
, tree arg
, tree type
, tree fndecl
,
7598 if (validate_arg (arg
, COMPLEX_TYPE
)
7599 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
7603 /* Calculate the result when the argument is a constant. */
7604 if ((tmp
= do_mpc_arg1 (arg
, type
, (hyper
? mpc_cosh
: mpc_cos
))))
7607 /* Optimize fn(-x) into fn(x). */
7608 if ((tmp
= fold_strip_sign_ops (arg
)))
7609 return build_call_expr_loc (loc
, fndecl
, 1, tmp
);
7615 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7616 Return NULL_TREE if no simplification can be made. */
7619 fold_builtin_tan (tree arg
, tree type
)
7621 enum built_in_function fcode
;
7624 if (!validate_arg (arg
, REAL_TYPE
))
7627 /* Calculate the result when the argument is a constant. */
7628 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
7631 /* Optimize tan(atan(x)) = x. */
7632 fcode
= builtin_mathfn_code (arg
);
7633 if (flag_unsafe_math_optimizations
7634 && (fcode
== BUILT_IN_ATAN
7635 || fcode
== BUILT_IN_ATANF
7636 || fcode
== BUILT_IN_ATANL
))
7637 return CALL_EXPR_ARG (arg
, 0);
7642 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7643 NULL_TREE if no simplification can be made. */
7646 fold_builtin_sincos (location_t loc
,
7647 tree arg0
, tree arg1
, tree arg2
)
7652 if (!validate_arg (arg0
, REAL_TYPE
)
7653 || !validate_arg (arg1
, POINTER_TYPE
)
7654 || !validate_arg (arg2
, POINTER_TYPE
))
7657 type
= TREE_TYPE (arg0
);
7659 /* Calculate the result when the argument is a constant. */
7660 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7663 /* Canonicalize sincos to cexpi. */
7664 if (!targetm
.libc_has_function (function_c99_math_complex
))
7666 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
7670 call
= build_call_expr_loc (loc
, fn
, 1, arg0
);
7671 call
= builtin_save_expr (call
);
7673 return build2 (COMPOUND_EXPR
, void_type_node
,
7674 build2 (MODIFY_EXPR
, void_type_node
,
7675 build_fold_indirect_ref_loc (loc
, arg1
),
7676 build1 (IMAGPART_EXPR
, type
, call
)),
7677 build2 (MODIFY_EXPR
, void_type_node
,
7678 build_fold_indirect_ref_loc (loc
, arg2
),
7679 build1 (REALPART_EXPR
, type
, call
)));
7682 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7683 NULL_TREE if no simplification can be made. */
7686 fold_builtin_cexp (location_t loc
, tree arg0
, tree type
)
7689 tree realp
, imagp
, ifn
;
7692 if (!validate_arg (arg0
, COMPLEX_TYPE
)
7693 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) != REAL_TYPE
)
7696 /* Calculate the result when the argument is a constant. */
7697 if ((res
= do_mpc_arg1 (arg0
, type
, mpc_exp
)))
7700 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
7702 /* In case we can figure out the real part of arg0 and it is constant zero
7704 if (!targetm
.libc_has_function (function_c99_math_complex
))
7706 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
7710 if ((realp
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
))
7711 && real_zerop (realp
))
7713 tree narg
= fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7714 return build_call_expr_loc (loc
, ifn
, 1, narg
);
7717 /* In case we can easily decompose real and imaginary parts split cexp
7718 to exp (r) * cexpi (i). */
7719 if (flag_unsafe_math_optimizations
7722 tree rfn
, rcall
, icall
;
7724 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
7728 imagp
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7732 icall
= build_call_expr_loc (loc
, ifn
, 1, imagp
);
7733 icall
= builtin_save_expr (icall
);
7734 rcall
= build_call_expr_loc (loc
, rfn
, 1, realp
);
7735 rcall
= builtin_save_expr (rcall
);
7736 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
7737 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7739 fold_build1_loc (loc
, REALPART_EXPR
,
7741 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7743 fold_build1_loc (loc
, IMAGPART_EXPR
,
7750 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7751 Return NULL_TREE if no simplification can be made. */
7754 fold_builtin_trunc (location_t loc
, tree fndecl
, tree arg
)
7756 if (!validate_arg (arg
, REAL_TYPE
))
7759 /* Optimize trunc of constant value. */
7760 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7762 REAL_VALUE_TYPE r
, x
;
7763 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7765 x
= TREE_REAL_CST (arg
);
7766 real_trunc (&r
, TYPE_MODE (type
), &x
);
7767 return build_real (type
, r
);
7770 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7773 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7774 Return NULL_TREE if no simplification can be made. */
7777 fold_builtin_floor (location_t loc
, tree fndecl
, tree arg
)
7779 if (!validate_arg (arg
, REAL_TYPE
))
7782 /* Optimize floor of constant value. */
7783 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7787 x
= TREE_REAL_CST (arg
);
7788 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7790 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7793 real_floor (&r
, TYPE_MODE (type
), &x
);
7794 return build_real (type
, r
);
7798 /* Fold floor (x) where x is nonnegative to trunc (x). */
7799 if (tree_expr_nonnegative_p (arg
))
7801 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
7803 return build_call_expr_loc (loc
, truncfn
, 1, arg
);
7806 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7809 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7810 Return NULL_TREE if no simplification can be made. */
7813 fold_builtin_ceil (location_t loc
, tree fndecl
, tree arg
)
7815 if (!validate_arg (arg
, REAL_TYPE
))
7818 /* Optimize ceil of constant value. */
7819 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7823 x
= TREE_REAL_CST (arg
);
7824 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7826 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7829 real_ceil (&r
, TYPE_MODE (type
), &x
);
7830 return build_real (type
, r
);
7834 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7837 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7838 Return NULL_TREE if no simplification can be made. */
7841 fold_builtin_round (location_t loc
, tree fndecl
, tree arg
)
7843 if (!validate_arg (arg
, REAL_TYPE
))
7846 /* Optimize round of constant value. */
7847 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7851 x
= TREE_REAL_CST (arg
);
7852 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7854 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7857 real_round (&r
, TYPE_MODE (type
), &x
);
7858 return build_real (type
, r
);
7862 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7865 /* Fold function call to builtin lround, lroundf or lroundl (or the
7866 corresponding long long versions) and other rounding functions. ARG
7867 is the argument to the call. Return NULL_TREE if no simplification
7871 fold_builtin_int_roundingfn (location_t loc
, tree fndecl
, tree arg
)
7873 if (!validate_arg (arg
, REAL_TYPE
))
7876 /* Optimize lround of constant value. */
7877 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7879 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
7881 if (real_isfinite (&x
))
7883 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
7884 tree ftype
= TREE_TYPE (arg
);
7888 switch (DECL_FUNCTION_CODE (fndecl
))
7890 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7891 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7892 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7893 real_floor (&r
, TYPE_MODE (ftype
), &x
);
7896 CASE_FLT_FN (BUILT_IN_ICEIL
):
7897 CASE_FLT_FN (BUILT_IN_LCEIL
):
7898 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7899 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
7902 CASE_FLT_FN (BUILT_IN_IROUND
):
7903 CASE_FLT_FN (BUILT_IN_LROUND
):
7904 CASE_FLT_FN (BUILT_IN_LLROUND
):
7905 real_round (&r
, TYPE_MODE (ftype
), &x
);
7912 real_to_integer2 ((HOST_WIDE_INT
*)&val
.low
, &val
.high
, &r
);
7913 if (double_int_fits_to_tree_p (itype
, val
))
7914 return double_int_to_tree (itype
, val
);
7918 switch (DECL_FUNCTION_CODE (fndecl
))
7920 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7921 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7922 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7923 if (tree_expr_nonnegative_p (arg
))
7924 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7925 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7930 return fold_fixed_mathfn (loc
, fndecl
, arg
);
7933 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7934 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7935 the argument to the call. Return NULL_TREE if no simplification can
7939 fold_builtin_bitop (tree fndecl
, tree arg
)
7941 if (!validate_arg (arg
, INTEGER_TYPE
))
7944 /* Optimize for constant argument. */
7945 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
7947 HOST_WIDE_INT hi
, width
, result
;
7948 unsigned HOST_WIDE_INT lo
;
7951 type
= TREE_TYPE (arg
);
7952 width
= TYPE_PRECISION (type
);
7953 lo
= TREE_INT_CST_LOW (arg
);
7955 /* Clear all the bits that are beyond the type's precision. */
7956 if (width
> HOST_BITS_PER_WIDE_INT
)
7958 hi
= TREE_INT_CST_HIGH (arg
);
7959 if (width
< HOST_BITS_PER_DOUBLE_INT
)
7960 hi
&= ~(HOST_WIDE_INT_M1U
<< (width
- HOST_BITS_PER_WIDE_INT
));
7965 if (width
< HOST_BITS_PER_WIDE_INT
)
7966 lo
&= ~(HOST_WIDE_INT_M1U
<< width
);
7969 switch (DECL_FUNCTION_CODE (fndecl
))
7971 CASE_INT_FN (BUILT_IN_FFS
):
7973 result
= ffs_hwi (lo
);
7975 result
= HOST_BITS_PER_WIDE_INT
+ ffs_hwi (hi
);
7980 CASE_INT_FN (BUILT_IN_CLZ
):
7982 result
= width
- floor_log2 (hi
) - 1 - HOST_BITS_PER_WIDE_INT
;
7984 result
= width
- floor_log2 (lo
) - 1;
7985 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
7989 CASE_INT_FN (BUILT_IN_CTZ
):
7991 result
= ctz_hwi (lo
);
7993 result
= HOST_BITS_PER_WIDE_INT
+ ctz_hwi (hi
);
7994 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
7998 CASE_INT_FN (BUILT_IN_CLRSB
):
7999 if (width
> 2 * HOST_BITS_PER_WIDE_INT
)
8001 if (width
> HOST_BITS_PER_WIDE_INT
8002 && (hi
& ((unsigned HOST_WIDE_INT
) 1
8003 << (width
- HOST_BITS_PER_WIDE_INT
- 1))) != 0)
8005 hi
= ~hi
& ~(HOST_WIDE_INT_M1U
8006 << (width
- HOST_BITS_PER_WIDE_INT
- 1));
8009 else if (width
<= HOST_BITS_PER_WIDE_INT
8010 && (lo
& ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))) != 0)
8011 lo
= ~lo
& ~(HOST_WIDE_INT_M1U
<< (width
- 1));
8013 result
= width
- floor_log2 (hi
) - 2 - HOST_BITS_PER_WIDE_INT
;
8015 result
= width
- floor_log2 (lo
) - 2;
8020 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8023 result
++, lo
&= lo
- 1;
8025 result
++, hi
&= (unsigned HOST_WIDE_INT
) hi
- 1;
8028 CASE_INT_FN (BUILT_IN_PARITY
):
8031 result
++, lo
&= lo
- 1;
8033 result
++, hi
&= (unsigned HOST_WIDE_INT
) hi
- 1;
8041 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
8047 /* Fold function call to builtin_bswap and the short, long and long long
8048 variants. Return NULL_TREE if no simplification can be made. */
8050 fold_builtin_bswap (tree fndecl
, tree arg
)
8052 if (! validate_arg (arg
, INTEGER_TYPE
))
8055 /* Optimize constant value. */
8056 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8058 HOST_WIDE_INT hi
, width
, r_hi
= 0;
8059 unsigned HOST_WIDE_INT lo
, r_lo
= 0;
8060 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8062 width
= TYPE_PRECISION (type
);
8063 lo
= TREE_INT_CST_LOW (arg
);
8064 hi
= TREE_INT_CST_HIGH (arg
);
8066 switch (DECL_FUNCTION_CODE (fndecl
))
8068 case BUILT_IN_BSWAP16
:
8069 case BUILT_IN_BSWAP32
:
8070 case BUILT_IN_BSWAP64
:
8074 for (s
= 0; s
< width
; s
+= 8)
8076 int d
= width
- s
- 8;
8077 unsigned HOST_WIDE_INT byte
;
8079 if (s
< HOST_BITS_PER_WIDE_INT
)
8080 byte
= (lo
>> s
) & 0xff;
8082 byte
= (hi
>> (s
- HOST_BITS_PER_WIDE_INT
)) & 0xff;
8084 if (d
< HOST_BITS_PER_WIDE_INT
)
8087 r_hi
|= byte
<< (d
- HOST_BITS_PER_WIDE_INT
);
8097 if (width
< HOST_BITS_PER_WIDE_INT
)
8098 return build_int_cst (type
, r_lo
);
8100 return build_int_cst_wide (type
, r_lo
, r_hi
);
8106 /* A subroutine of fold_builtin to fold the various logarithmic
8107 functions. Return NULL_TREE if no simplification can me made.
8108 FUNC is the corresponding MPFR logarithm function. */
8111 fold_builtin_logarithm (location_t loc
, tree fndecl
, tree arg
,
8112 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8114 if (validate_arg (arg
, REAL_TYPE
))
8116 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8118 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8120 /* Calculate the result when the argument is a constant. */
8121 if ((res
= do_mpfr_arg1 (arg
, type
, func
, &dconst0
, NULL
, false)))
8124 /* Special case, optimize logN(expN(x)) = x. */
8125 if (flag_unsafe_math_optimizations
8126 && ((func
== mpfr_log
8127 && (fcode
== BUILT_IN_EXP
8128 || fcode
== BUILT_IN_EXPF
8129 || fcode
== BUILT_IN_EXPL
))
8130 || (func
== mpfr_log2
8131 && (fcode
== BUILT_IN_EXP2
8132 || fcode
== BUILT_IN_EXP2F
8133 || fcode
== BUILT_IN_EXP2L
))
8134 || (func
== mpfr_log10
&& (BUILTIN_EXP10_P (fcode
)))))
8135 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8137 /* Optimize logN(func()) for various exponential functions. We
8138 want to determine the value "x" and the power "exponent" in
8139 order to transform logN(x**exponent) into exponent*logN(x). */
8140 if (flag_unsafe_math_optimizations
)
8142 tree exponent
= 0, x
= 0;
8146 CASE_FLT_FN (BUILT_IN_EXP
):
8147 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8148 x
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8150 exponent
= CALL_EXPR_ARG (arg
, 0);
8152 CASE_FLT_FN (BUILT_IN_EXP2
):
8153 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8154 x
= build_real (type
, dconst2
);
8155 exponent
= CALL_EXPR_ARG (arg
, 0);
8157 CASE_FLT_FN (BUILT_IN_EXP10
):
8158 CASE_FLT_FN (BUILT_IN_POW10
):
8159 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8161 REAL_VALUE_TYPE dconst10
;
8162 real_from_integer (&dconst10
, VOIDmode
, 10, 0, 0);
8163 x
= build_real (type
, dconst10
);
8165 exponent
= CALL_EXPR_ARG (arg
, 0);
8167 CASE_FLT_FN (BUILT_IN_SQRT
):
8168 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8169 x
= CALL_EXPR_ARG (arg
, 0);
8170 exponent
= build_real (type
, dconsthalf
);
8172 CASE_FLT_FN (BUILT_IN_CBRT
):
8173 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8174 x
= CALL_EXPR_ARG (arg
, 0);
8175 exponent
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8178 CASE_FLT_FN (BUILT_IN_POW
):
8179 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8180 x
= CALL_EXPR_ARG (arg
, 0);
8181 exponent
= CALL_EXPR_ARG (arg
, 1);
8187 /* Now perform the optimization. */
8190 tree logfn
= build_call_expr_loc (loc
, fndecl
, 1, x
);
8191 return fold_build2_loc (loc
, MULT_EXPR
, type
, exponent
, logfn
);
8199 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8200 NULL_TREE if no simplification can be made. */
8203 fold_builtin_hypot (location_t loc
, tree fndecl
,
8204 tree arg0
, tree arg1
, tree type
)
8206 tree res
, narg0
, narg1
;
8208 if (!validate_arg (arg0
, REAL_TYPE
)
8209 || !validate_arg (arg1
, REAL_TYPE
))
8212 /* Calculate the result when the argument is a constant. */
8213 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8216 /* If either argument to hypot has a negate or abs, strip that off.
8217 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8218 narg0
= fold_strip_sign_ops (arg0
);
8219 narg1
= fold_strip_sign_ops (arg1
);
8222 return build_call_expr_loc (loc
, fndecl
, 2, narg0
? narg0
: arg0
,
8223 narg1
? narg1
: arg1
);
8226 /* If either argument is zero, hypot is fabs of the other. */
8227 if (real_zerop (arg0
))
8228 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
);
8229 else if (real_zerop (arg1
))
8230 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
);
8232 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8233 if (flag_unsafe_math_optimizations
8234 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8236 const REAL_VALUE_TYPE sqrt2_trunc
8237 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
8238 return fold_build2_loc (loc
, MULT_EXPR
, type
,
8239 fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
),
8240 build_real (type
, sqrt2_trunc
));
8247 /* Fold a builtin function call to pow, powf, or powl. Return
8248 NULL_TREE if no simplification can be made. */
8250 fold_builtin_pow (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, tree type
)
8254 if (!validate_arg (arg0
, REAL_TYPE
)
8255 || !validate_arg (arg1
, REAL_TYPE
))
8258 /* Calculate the result when the argument is a constant. */
8259 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8262 /* Optimize pow(1.0,y) = 1.0. */
8263 if (real_onep (arg0
))
8264 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8266 if (TREE_CODE (arg1
) == REAL_CST
8267 && !TREE_OVERFLOW (arg1
))
8269 REAL_VALUE_TYPE cint
;
8273 c
= TREE_REAL_CST (arg1
);
8275 /* Optimize pow(x,0.0) = 1.0. */
8276 if (REAL_VALUES_EQUAL (c
, dconst0
))
8277 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8280 /* Optimize pow(x,1.0) = x. */
8281 if (REAL_VALUES_EQUAL (c
, dconst1
))
8284 /* Optimize pow(x,-1.0) = 1.0/x. */
8285 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8286 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8287 build_real (type
, dconst1
), arg0
);
8289 /* Optimize pow(x,0.5) = sqrt(x). */
8290 if (flag_unsafe_math_optimizations
8291 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8293 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8295 if (sqrtfn
!= NULL_TREE
)
8296 return build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
8299 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8300 if (flag_unsafe_math_optimizations
)
8302 const REAL_VALUE_TYPE dconstroot
8303 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8305 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8307 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8308 if (cbrtfn
!= NULL_TREE
)
8309 return build_call_expr_loc (loc
, cbrtfn
, 1, arg0
);
8313 /* Check for an integer exponent. */
8314 n
= real_to_integer (&c
);
8315 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
8316 if (real_identical (&c
, &cint
))
8318 /* Attempt to evaluate pow at compile-time, unless this should
8319 raise an exception. */
8320 if (TREE_CODE (arg0
) == REAL_CST
8321 && !TREE_OVERFLOW (arg0
)
8323 || (!flag_trapping_math
&& !flag_errno_math
)
8324 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
), dconst0
)))
8329 x
= TREE_REAL_CST (arg0
);
8330 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8331 if (flag_unsafe_math_optimizations
|| !inexact
)
8332 return build_real (type
, x
);
8335 /* Strip sign ops from even integer powers. */
8336 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8338 tree narg0
= fold_strip_sign_ops (arg0
);
8340 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, arg1
);
8345 if (flag_unsafe_math_optimizations
)
8347 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8349 /* Optimize pow(expN(x),y) = expN(x*y). */
8350 if (BUILTIN_EXPONENT_P (fcode
))
8352 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8353 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8354 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg
, arg1
);
8355 return build_call_expr_loc (loc
, expfn
, 1, arg
);
8358 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8359 if (BUILTIN_SQRT_P (fcode
))
8361 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8362 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8363 build_real (type
, dconsthalf
));
8364 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, narg1
);
8367 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8368 if (BUILTIN_CBRT_P (fcode
))
8370 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8371 if (tree_expr_nonnegative_p (arg
))
8373 const REAL_VALUE_TYPE dconstroot
8374 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8375 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8376 build_real (type
, dconstroot
));
8377 return build_call_expr_loc (loc
, fndecl
, 2, arg
, narg1
);
8381 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8382 if (fcode
== BUILT_IN_POW
8383 || fcode
== BUILT_IN_POWF
8384 || fcode
== BUILT_IN_POWL
)
8386 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8387 if (tree_expr_nonnegative_p (arg00
))
8389 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8390 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
, arg1
);
8391 return build_call_expr_loc (loc
, fndecl
, 2, arg00
, narg1
);
8399 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8400 Return NULL_TREE if no simplification can be made. */
8402 fold_builtin_powi (location_t loc
, tree fndecl ATTRIBUTE_UNUSED
,
8403 tree arg0
, tree arg1
, tree type
)
8405 if (!validate_arg (arg0
, REAL_TYPE
)
8406 || !validate_arg (arg1
, INTEGER_TYPE
))
8409 /* Optimize pow(1.0,y) = 1.0. */
8410 if (real_onep (arg0
))
8411 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8413 if (host_integerp (arg1
, 0))
8415 HOST_WIDE_INT c
= TREE_INT_CST_LOW (arg1
);
8417 /* Evaluate powi at compile-time. */
8418 if (TREE_CODE (arg0
) == REAL_CST
8419 && !TREE_OVERFLOW (arg0
))
8422 x
= TREE_REAL_CST (arg0
);
8423 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8424 return build_real (type
, x
);
8427 /* Optimize pow(x,0) = 1.0. */
8429 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8432 /* Optimize pow(x,1) = x. */
8436 /* Optimize pow(x,-1) = 1.0/x. */
8438 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8439 build_real (type
, dconst1
), arg0
);
8445 /* A subroutine of fold_builtin to fold the various exponent
8446 functions. Return NULL_TREE if no simplification can be made.
8447 FUNC is the corresponding MPFR exponent function. */
8450 fold_builtin_exponent (location_t loc
, tree fndecl
, tree arg
,
8451 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8453 if (validate_arg (arg
, REAL_TYPE
))
8455 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8458 /* Calculate the result when the argument is a constant. */
8459 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8462 /* Optimize expN(logN(x)) = x. */
8463 if (flag_unsafe_math_optimizations
)
8465 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8467 if ((func
== mpfr_exp
8468 && (fcode
== BUILT_IN_LOG
8469 || fcode
== BUILT_IN_LOGF
8470 || fcode
== BUILT_IN_LOGL
))
8471 || (func
== mpfr_exp2
8472 && (fcode
== BUILT_IN_LOG2
8473 || fcode
== BUILT_IN_LOG2F
8474 || fcode
== BUILT_IN_LOG2L
))
8475 || (func
== mpfr_exp10
8476 && (fcode
== BUILT_IN_LOG10
8477 || fcode
== BUILT_IN_LOG10F
8478 || fcode
== BUILT_IN_LOG10L
)))
8479 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8486 /* Return true if VAR is a VAR_DECL or a component thereof. */
8489 var_decl_component_p (tree var
)
8492 while (handled_component_p (inner
))
8493 inner
= TREE_OPERAND (inner
, 0);
8494 return SSA_VAR_P (inner
);
8497 /* Fold function call to builtin memset. Return
8498 NULL_TREE if no simplification can be made. */
8501 fold_builtin_memset (location_t loc
, tree dest
, tree c
, tree len
,
8502 tree type
, bool ignore
)
8504 tree var
, ret
, etype
;
8505 unsigned HOST_WIDE_INT length
, cval
;
8507 if (! validate_arg (dest
, POINTER_TYPE
)
8508 || ! validate_arg (c
, INTEGER_TYPE
)
8509 || ! validate_arg (len
, INTEGER_TYPE
))
8512 if (! host_integerp (len
, 1))
8515 /* If the LEN parameter is zero, return DEST. */
8516 if (integer_zerop (len
))
8517 return omit_one_operand_loc (loc
, type
, dest
, c
);
8519 if (TREE_CODE (c
) != INTEGER_CST
|| TREE_SIDE_EFFECTS (dest
))
8524 if (TREE_CODE (var
) != ADDR_EXPR
)
8527 var
= TREE_OPERAND (var
, 0);
8528 if (TREE_THIS_VOLATILE (var
))
8531 etype
= TREE_TYPE (var
);
8532 if (TREE_CODE (etype
) == ARRAY_TYPE
)
8533 etype
= TREE_TYPE (etype
);
8535 if (!INTEGRAL_TYPE_P (etype
)
8536 && !POINTER_TYPE_P (etype
))
8539 if (! var_decl_component_p (var
))
8542 length
= tree_low_cst (len
, 1);
8543 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
8544 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
8547 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
8550 if (integer_zerop (c
))
8554 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
8557 cval
= TREE_INT_CST_LOW (c
);
8561 cval
|= (cval
<< 31) << 1;
8564 ret
= build_int_cst_type (etype
, cval
);
8565 var
= build_fold_indirect_ref_loc (loc
,
8566 fold_convert_loc (loc
,
8567 build_pointer_type (etype
),
8569 ret
= build2 (MODIFY_EXPR
, etype
, var
, ret
);
8573 return omit_one_operand_loc (loc
, type
, dest
, ret
);
8576 /* Fold function call to builtin memset. Return
8577 NULL_TREE if no simplification can be made. */
8580 fold_builtin_bzero (location_t loc
, tree dest
, tree size
, bool ignore
)
8582 if (! validate_arg (dest
, POINTER_TYPE
)
8583 || ! validate_arg (size
, INTEGER_TYPE
))
8589 /* New argument list transforming bzero(ptr x, int y) to
8590 memset(ptr x, int 0, size_t y). This is done this way
8591 so that if it isn't expanded inline, we fallback to
8592 calling bzero instead of memset. */
8594 return fold_builtin_memset (loc
, dest
, integer_zero_node
,
8595 fold_convert_loc (loc
, size_type_node
, size
),
8596 void_type_node
, ignore
);
8599 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8600 NULL_TREE if no simplification can be made.
8601 If ENDP is 0, return DEST (like memcpy).
8602 If ENDP is 1, return DEST+LEN (like mempcpy).
8603 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8604 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8608 fold_builtin_memory_op (location_t loc
, tree dest
, tree src
,
8609 tree len
, tree type
, bool ignore
, int endp
)
8611 tree destvar
, srcvar
, expr
;
8613 if (! validate_arg (dest
, POINTER_TYPE
)
8614 || ! validate_arg (src
, POINTER_TYPE
)
8615 || ! validate_arg (len
, INTEGER_TYPE
))
8618 /* If the LEN parameter is zero, return DEST. */
8619 if (integer_zerop (len
))
8620 return omit_one_operand_loc (loc
, type
, dest
, src
);
8622 /* If SRC and DEST are the same (and not volatile), return
8623 DEST{,+LEN,+LEN-1}. */
8624 if (operand_equal_p (src
, dest
, 0))
8628 tree srctype
, desttype
;
8629 unsigned int src_align
, dest_align
;
8634 src_align
= get_pointer_alignment (src
);
8635 dest_align
= get_pointer_alignment (dest
);
8637 /* Both DEST and SRC must be pointer types.
8638 ??? This is what old code did. Is the testing for pointer types
8641 If either SRC is readonly or length is 1, we can use memcpy. */
8642 if (!dest_align
|| !src_align
)
8644 if (readonly_data_expr (src
)
8645 || (host_integerp (len
, 1)
8646 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
8647 >= (unsigned HOST_WIDE_INT
) tree_low_cst (len
, 1))))
8649 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8652 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8655 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8656 if (TREE_CODE (src
) == ADDR_EXPR
8657 && TREE_CODE (dest
) == ADDR_EXPR
)
8659 tree src_base
, dest_base
, fn
;
8660 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
8661 HOST_WIDE_INT size
= -1;
8662 HOST_WIDE_INT maxsize
= -1;
8664 srcvar
= TREE_OPERAND (src
, 0);
8665 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
8667 destvar
= TREE_OPERAND (dest
, 0);
8668 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
8670 if (host_integerp (len
, 1))
8671 maxsize
= tree_low_cst (len
, 1);
8674 src_offset
/= BITS_PER_UNIT
;
8675 dest_offset
/= BITS_PER_UNIT
;
8676 if (SSA_VAR_P (src_base
)
8677 && SSA_VAR_P (dest_base
))
8679 if (operand_equal_p (src_base
, dest_base
, 0)
8680 && ranges_overlap_p (src_offset
, maxsize
,
8681 dest_offset
, maxsize
))
8684 else if (TREE_CODE (src_base
) == MEM_REF
8685 && TREE_CODE (dest_base
) == MEM_REF
)
8688 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
8689 TREE_OPERAND (dest_base
, 0), 0))
8691 off
= mem_ref_offset (src_base
) +
8692 double_int::from_shwi (src_offset
);
8693 if (!off
.fits_shwi ())
8695 src_offset
= off
.low
;
8696 off
= mem_ref_offset (dest_base
) +
8697 double_int::from_shwi (dest_offset
);
8698 if (!off
.fits_shwi ())
8700 dest_offset
= off
.low
;
8701 if (ranges_overlap_p (src_offset
, maxsize
,
8702 dest_offset
, maxsize
))
8708 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8711 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8714 /* If the destination and source do not alias optimize into
8716 if ((is_gimple_min_invariant (dest
)
8717 || TREE_CODE (dest
) == SSA_NAME
)
8718 && (is_gimple_min_invariant (src
)
8719 || TREE_CODE (src
) == SSA_NAME
))
8722 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
8723 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
8724 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
8727 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8730 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
8737 if (!host_integerp (len
, 0))
8740 This logic lose for arguments like (type *)malloc (sizeof (type)),
8741 since we strip the casts of up to VOID return value from malloc.
8742 Perhaps we ought to inherit type from non-VOID argument here? */
8745 if (!POINTER_TYPE_P (TREE_TYPE (src
))
8746 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
8748 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8749 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
8751 tree tem
= TREE_OPERAND (src
, 0);
8753 if (tem
!= TREE_OPERAND (src
, 0))
8754 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
8756 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
8758 tree tem
= TREE_OPERAND (dest
, 0);
8760 if (tem
!= TREE_OPERAND (dest
, 0))
8761 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
8763 srctype
= TREE_TYPE (TREE_TYPE (src
));
8764 if (TREE_CODE (srctype
) == ARRAY_TYPE
8765 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
8767 srctype
= TREE_TYPE (srctype
);
8769 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
8771 desttype
= TREE_TYPE (TREE_TYPE (dest
));
8772 if (TREE_CODE (desttype
) == ARRAY_TYPE
8773 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8775 desttype
= TREE_TYPE (desttype
);
8777 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
8779 if (TREE_ADDRESSABLE (srctype
)
8780 || TREE_ADDRESSABLE (desttype
))
8783 src_align
= get_pointer_alignment (src
);
8784 dest_align
= get_pointer_alignment (dest
);
8785 if (dest_align
< TYPE_ALIGN (desttype
)
8786 || src_align
< TYPE_ALIGN (srctype
))
8790 dest
= builtin_save_expr (dest
);
8792 /* Build accesses at offset zero with a ref-all character type. */
8793 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
8794 ptr_mode
, true), 0);
8797 STRIP_NOPS (destvar
);
8798 if (TREE_CODE (destvar
) == ADDR_EXPR
8799 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
8800 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8801 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
8803 destvar
= NULL_TREE
;
8806 STRIP_NOPS (srcvar
);
8807 if (TREE_CODE (srcvar
) == ADDR_EXPR
8808 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
8809 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
8812 || src_align
>= TYPE_ALIGN (desttype
))
8813 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
8815 else if (!STRICT_ALIGNMENT
)
8817 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
8819 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
8827 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
8830 if (srcvar
== NULL_TREE
)
8833 if (src_align
>= TYPE_ALIGN (desttype
))
8834 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
8837 if (STRICT_ALIGNMENT
)
8839 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
8841 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
8844 else if (destvar
== NULL_TREE
)
8847 if (dest_align
>= TYPE_ALIGN (srctype
))
8848 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
8851 if (STRICT_ALIGNMENT
)
8853 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
8855 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
8859 expr
= build2 (MODIFY_EXPR
, TREE_TYPE (destvar
), destvar
, srcvar
);
8865 if (endp
== 0 || endp
== 3)
8866 return omit_one_operand_loc (loc
, type
, dest
, expr
);
8872 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
8875 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
8876 dest
= fold_convert_loc (loc
, type
, dest
);
8878 dest
= omit_one_operand_loc (loc
, type
, dest
, expr
);
8882 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8883 If LEN is not NULL, it represents the length of the string to be
8884 copied. Return NULL_TREE if no simplification can be made. */
8887 fold_builtin_strcpy (location_t loc
, tree fndecl
, tree dest
, tree src
, tree len
)
8891 if (!validate_arg (dest
, POINTER_TYPE
)
8892 || !validate_arg (src
, POINTER_TYPE
))
8895 /* If SRC and DEST are the same (and not volatile), return DEST. */
8896 if (operand_equal_p (src
, dest
, 0))
8897 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
8899 if (optimize_function_for_size_p (cfun
))
8902 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8908 len
= c_strlen (src
, 1);
8909 if (! len
|| TREE_SIDE_EFFECTS (len
))
8913 len
= fold_convert_loc (loc
, size_type_node
, len
);
8914 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
8915 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
8916 build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
));
8919 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8920 Return NULL_TREE if no simplification can be made. */
8923 fold_builtin_stpcpy (location_t loc
, tree fndecl
, tree dest
, tree src
)
8925 tree fn
, len
, lenp1
, call
, type
;
8927 if (!validate_arg (dest
, POINTER_TYPE
)
8928 || !validate_arg (src
, POINTER_TYPE
))
8931 len
= c_strlen (src
, 1);
8933 || TREE_CODE (len
) != INTEGER_CST
)
8936 if (optimize_function_for_size_p (cfun
)
8937 /* If length is zero it's small enough. */
8938 && !integer_zerop (len
))
8941 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
8945 lenp1
= size_binop_loc (loc
, PLUS_EXPR
,
8946 fold_convert_loc (loc
, size_type_node
, len
),
8947 build_int_cst (size_type_node
, 1));
8948 /* We use dest twice in building our expression. Save it from
8949 multiple expansions. */
8950 dest
= builtin_save_expr (dest
);
8951 call
= build_call_expr_loc (loc
, fn
, 3, dest
, src
, lenp1
);
8953 type
= TREE_TYPE (TREE_TYPE (fndecl
));
8954 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
8955 dest
= fold_convert_loc (loc
, type
, dest
);
8956 dest
= omit_one_operand_loc (loc
, type
, dest
, call
);
8960 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8961 If SLEN is not NULL, it represents the length of the source string.
8962 Return NULL_TREE if no simplification can be made. */
8965 fold_builtin_strncpy (location_t loc
, tree fndecl
, tree dest
,
8966 tree src
, tree len
, tree slen
)
8970 if (!validate_arg (dest
, POINTER_TYPE
)
8971 || !validate_arg (src
, POINTER_TYPE
)
8972 || !validate_arg (len
, INTEGER_TYPE
))
8975 /* If the LEN parameter is zero, return DEST. */
8976 if (integer_zerop (len
))
8977 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
8979 /* We can't compare slen with len as constants below if len is not a
8981 if (len
== 0 || TREE_CODE (len
) != INTEGER_CST
)
8985 slen
= c_strlen (src
, 1);
8987 /* Now, we must be passed a constant src ptr parameter. */
8988 if (slen
== 0 || TREE_CODE (slen
) != INTEGER_CST
)
8991 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
8993 /* We do not support simplification of this case, though we do
8994 support it when expanding trees into RTL. */
8995 /* FIXME: generate a call to __builtin_memset. */
8996 if (tree_int_cst_lt (slen
, len
))
8999 /* OK transform into builtin memcpy. */
9000 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
9004 len
= fold_convert_loc (loc
, size_type_node
, len
);
9005 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
9006 build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
));
9009 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9010 arguments to the call, and TYPE is its return type.
9011 Return NULL_TREE if no simplification can be made. */
9014 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
9016 if (!validate_arg (arg1
, POINTER_TYPE
)
9017 || !validate_arg (arg2
, INTEGER_TYPE
)
9018 || !validate_arg (len
, INTEGER_TYPE
))
9024 if (TREE_CODE (arg2
) != INTEGER_CST
9025 || !host_integerp (len
, 1))
9028 p1
= c_getstr (arg1
);
9029 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
9035 if (target_char_cast (arg2
, &c
))
9038 r
= (const char *) memchr (p1
, c
, tree_low_cst (len
, 1));
9041 return build_int_cst (TREE_TYPE (arg1
), 0);
9043 tem
= fold_build_pointer_plus_hwi_loc (loc
, arg1
, r
- p1
);
9044 return fold_convert_loc (loc
, type
, tem
);
9050 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9051 Return NULL_TREE if no simplification can be made. */
9054 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
9056 const char *p1
, *p2
;
9058 if (!validate_arg (arg1
, POINTER_TYPE
)
9059 || !validate_arg (arg2
, POINTER_TYPE
)
9060 || !validate_arg (len
, INTEGER_TYPE
))
9063 /* If the LEN parameter is zero, return zero. */
9064 if (integer_zerop (len
))
9065 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
9068 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9069 if (operand_equal_p (arg1
, arg2
, 0))
9070 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
9072 p1
= c_getstr (arg1
);
9073 p2
= c_getstr (arg2
);
9075 /* If all arguments are constant, and the value of len is not greater
9076 than the lengths of arg1 and arg2, evaluate at compile-time. */
9077 if (host_integerp (len
, 1) && p1
&& p2
9078 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
9079 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
9081 const int r
= memcmp (p1
, p2
, tree_low_cst (len
, 1));
9084 return integer_one_node
;
9086 return integer_minus_one_node
;
9088 return integer_zero_node
;
9091 /* If len parameter is one, return an expression corresponding to
9092 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9093 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
9095 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9096 tree cst_uchar_ptr_node
9097 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9100 = fold_convert_loc (loc
, integer_type_node
,
9101 build1 (INDIRECT_REF
, cst_uchar_node
,
9102 fold_convert_loc (loc
,
9106 = fold_convert_loc (loc
, integer_type_node
,
9107 build1 (INDIRECT_REF
, cst_uchar_node
,
9108 fold_convert_loc (loc
,
9111 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9117 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9118 Return NULL_TREE if no simplification can be made. */
9121 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
9123 const char *p1
, *p2
;
9125 if (!validate_arg (arg1
, POINTER_TYPE
)
9126 || !validate_arg (arg2
, POINTER_TYPE
))
9129 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9130 if (operand_equal_p (arg1
, arg2
, 0))
9131 return integer_zero_node
;
9133 p1
= c_getstr (arg1
);
9134 p2
= c_getstr (arg2
);
9138 const int i
= strcmp (p1
, p2
);
9140 return integer_minus_one_node
;
9142 return integer_one_node
;
9144 return integer_zero_node
;
9147 /* If the second arg is "", return *(const unsigned char*)arg1. */
9148 if (p2
&& *p2
== '\0')
9150 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9151 tree cst_uchar_ptr_node
9152 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9154 return fold_convert_loc (loc
, integer_type_node
,
9155 build1 (INDIRECT_REF
, cst_uchar_node
,
9156 fold_convert_loc (loc
,
9161 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9162 if (p1
&& *p1
== '\0')
9164 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9165 tree cst_uchar_ptr_node
9166 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9169 = fold_convert_loc (loc
, integer_type_node
,
9170 build1 (INDIRECT_REF
, cst_uchar_node
,
9171 fold_convert_loc (loc
,
9174 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
9180 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9181 Return NULL_TREE if no simplification can be made. */
9184 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
9186 const char *p1
, *p2
;
9188 if (!validate_arg (arg1
, POINTER_TYPE
)
9189 || !validate_arg (arg2
, POINTER_TYPE
)
9190 || !validate_arg (len
, INTEGER_TYPE
))
9193 /* If the LEN parameter is zero, return zero. */
9194 if (integer_zerop (len
))
9195 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
9198 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9199 if (operand_equal_p (arg1
, arg2
, 0))
9200 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
9202 p1
= c_getstr (arg1
);
9203 p2
= c_getstr (arg2
);
9205 if (host_integerp (len
, 1) && p1
&& p2
)
9207 const int i
= strncmp (p1
, p2
, tree_low_cst (len
, 1));
9209 return integer_one_node
;
9211 return integer_minus_one_node
;
9213 return integer_zero_node
;
9216 /* If the second arg is "", and the length is greater than zero,
9217 return *(const unsigned char*)arg1. */
9218 if (p2
&& *p2
== '\0'
9219 && TREE_CODE (len
) == INTEGER_CST
9220 && tree_int_cst_sgn (len
) == 1)
9222 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9223 tree cst_uchar_ptr_node
9224 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9226 return fold_convert_loc (loc
, integer_type_node
,
9227 build1 (INDIRECT_REF
, cst_uchar_node
,
9228 fold_convert_loc (loc
,
9233 /* If the first arg is "", and the length is greater than zero,
9234 return -*(const unsigned char*)arg2. */
9235 if (p1
&& *p1
== '\0'
9236 && TREE_CODE (len
) == INTEGER_CST
9237 && tree_int_cst_sgn (len
) == 1)
9239 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9240 tree cst_uchar_ptr_node
9241 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9243 tree temp
= fold_convert_loc (loc
, integer_type_node
,
9244 build1 (INDIRECT_REF
, cst_uchar_node
,
9245 fold_convert_loc (loc
,
9248 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
9251 /* If len parameter is one, return an expression corresponding to
9252 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9253 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
9255 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9256 tree cst_uchar_ptr_node
9257 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9259 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
9260 build1 (INDIRECT_REF
, cst_uchar_node
,
9261 fold_convert_loc (loc
,
9264 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
9265 build1 (INDIRECT_REF
, cst_uchar_node
,
9266 fold_convert_loc (loc
,
9269 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9275 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9276 ARG. Return NULL_TREE if no simplification can be made. */
9279 fold_builtin_signbit (location_t loc
, tree arg
, tree type
)
9281 if (!validate_arg (arg
, REAL_TYPE
))
9284 /* If ARG is a compile-time constant, determine the result. */
9285 if (TREE_CODE (arg
) == REAL_CST
9286 && !TREE_OVERFLOW (arg
))
9290 c
= TREE_REAL_CST (arg
);
9291 return (REAL_VALUE_NEGATIVE (c
)
9292 ? build_one_cst (type
)
9293 : build_zero_cst (type
));
9296 /* If ARG is non-negative, the result is always zero. */
9297 if (tree_expr_nonnegative_p (arg
))
9298 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9300 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9301 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg
))))
9302 return fold_convert (type
,
9303 fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, arg
,
9304 build_real (TREE_TYPE (arg
), dconst0
)));
9309 /* Fold function call to builtin copysign, copysignf or copysignl with
9310 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9314 fold_builtin_copysign (location_t loc
, tree fndecl
,
9315 tree arg1
, tree arg2
, tree type
)
9319 if (!validate_arg (arg1
, REAL_TYPE
)
9320 || !validate_arg (arg2
, REAL_TYPE
))
9323 /* copysign(X,X) is X. */
9324 if (operand_equal_p (arg1
, arg2
, 0))
9325 return fold_convert_loc (loc
, type
, arg1
);
9327 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9328 if (TREE_CODE (arg1
) == REAL_CST
9329 && TREE_CODE (arg2
) == REAL_CST
9330 && !TREE_OVERFLOW (arg1
)
9331 && !TREE_OVERFLOW (arg2
))
9333 REAL_VALUE_TYPE c1
, c2
;
9335 c1
= TREE_REAL_CST (arg1
);
9336 c2
= TREE_REAL_CST (arg2
);
9337 /* c1.sign := c2.sign. */
9338 real_copysign (&c1
, &c2
);
9339 return build_real (type
, c1
);
9342 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9343 Remember to evaluate Y for side-effects. */
9344 if (tree_expr_nonnegative_p (arg2
))
9345 return omit_one_operand_loc (loc
, type
,
9346 fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
),
9349 /* Strip sign changing operations for the first argument. */
9350 tem
= fold_strip_sign_ops (arg1
);
9352 return build_call_expr_loc (loc
, fndecl
, 2, tem
, arg2
);
9357 /* Fold a call to builtin isascii with argument ARG. */
9360 fold_builtin_isascii (location_t loc
, tree arg
)
9362 if (!validate_arg (arg
, INTEGER_TYPE
))
9366 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9367 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9368 build_int_cst (integer_type_node
,
9369 ~ (unsigned HOST_WIDE_INT
) 0x7f));
9370 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
9371 arg
, integer_zero_node
);
9375 /* Fold a call to builtin toascii with argument ARG. */
9378 fold_builtin_toascii (location_t loc
, tree arg
)
9380 if (!validate_arg (arg
, INTEGER_TYPE
))
9383 /* Transform toascii(c) -> (c & 0x7f). */
9384 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
9385 build_int_cst (integer_type_node
, 0x7f));
9388 /* Fold a call to builtin isdigit with argument ARG. */
9391 fold_builtin_isdigit (location_t loc
, tree arg
)
9393 if (!validate_arg (arg
, INTEGER_TYPE
))
9397 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9398 /* According to the C standard, isdigit is unaffected by locale.
9399 However, it definitely is affected by the target character set. */
9400 unsigned HOST_WIDE_INT target_digit0
9401 = lang_hooks
.to_target_charset ('0');
9403 if (target_digit0
== 0)
9406 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
9407 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
9408 build_int_cst (unsigned_type_node
, target_digit0
));
9409 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
9410 build_int_cst (unsigned_type_node
, 9));
9414 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9417 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
9419 if (!validate_arg (arg
, REAL_TYPE
))
9422 arg
= fold_convert_loc (loc
, type
, arg
);
9423 if (TREE_CODE (arg
) == REAL_CST
)
9424 return fold_abs_const (arg
, type
);
9425 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9428 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9431 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
9433 if (!validate_arg (arg
, INTEGER_TYPE
))
9436 arg
= fold_convert_loc (loc
, type
, arg
);
9437 if (TREE_CODE (arg
) == INTEGER_CST
)
9438 return fold_abs_const (arg
, type
);
9439 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9442 /* Fold a fma operation with arguments ARG[012]. */
9445 fold_fma (location_t loc ATTRIBUTE_UNUSED
,
9446 tree type
, tree arg0
, tree arg1
, tree arg2
)
9448 if (TREE_CODE (arg0
) == REAL_CST
9449 && TREE_CODE (arg1
) == REAL_CST
9450 && TREE_CODE (arg2
) == REAL_CST
)
9451 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
9456 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9459 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
9461 if (validate_arg (arg0
, REAL_TYPE
)
9462 && validate_arg (arg1
, REAL_TYPE
)
9463 && validate_arg (arg2
, REAL_TYPE
))
9465 tree tem
= fold_fma (loc
, type
, arg0
, arg1
, arg2
);
9469 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9470 if (optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
9471 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
9476 /* Fold a call to builtin fmin or fmax. */
9479 fold_builtin_fmin_fmax (location_t loc
, tree arg0
, tree arg1
,
9480 tree type
, bool max
)
9482 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9484 /* Calculate the result when the argument is a constant. */
9485 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9490 /* If either argument is NaN, return the other one. Avoid the
9491 transformation if we get (and honor) a signalling NaN. Using
9492 omit_one_operand() ensures we create a non-lvalue. */
9493 if (TREE_CODE (arg0
) == REAL_CST
9494 && real_isnan (&TREE_REAL_CST (arg0
))
9495 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9496 || ! TREE_REAL_CST (arg0
).signalling
))
9497 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
9498 if (TREE_CODE (arg1
) == REAL_CST
9499 && real_isnan (&TREE_REAL_CST (arg1
))
9500 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
9501 || ! TREE_REAL_CST (arg1
).signalling
))
9502 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9504 /* Transform fmin/fmax(x,x) -> x. */
9505 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9506 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9508 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9509 functions to return the numeric arg if the other one is NaN.
9510 These tree codes don't honor that, so only transform if
9511 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9512 handled, so we don't have to worry about it either. */
9513 if (flag_finite_math_only
)
9514 return fold_build2_loc (loc
, (max
? MAX_EXPR
: MIN_EXPR
), type
,
9515 fold_convert_loc (loc
, type
, arg0
),
9516 fold_convert_loc (loc
, type
, arg1
));
9521 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9524 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
9526 if (validate_arg (arg
, COMPLEX_TYPE
)
9527 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
9529 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9533 tree new_arg
= builtin_save_expr (arg
);
9534 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
9535 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
9536 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
9543 /* Fold a call to builtin logb/ilogb. */
9546 fold_builtin_logb (location_t loc
, tree arg
, tree rettype
)
9548 if (! validate_arg (arg
, REAL_TYPE
))
9553 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9555 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9561 /* If arg is Inf or NaN and we're logb, return it. */
9562 if (TREE_CODE (rettype
) == REAL_TYPE
)
9564 /* For logb(-Inf) we have to return +Inf. */
9565 if (real_isinf (value
) && real_isneg (value
))
9567 REAL_VALUE_TYPE tem
;
9569 return build_real (rettype
, tem
);
9571 return fold_convert_loc (loc
, rettype
, arg
);
9573 /* Fall through... */
9575 /* Zero may set errno and/or raise an exception for logb, also
9576 for ilogb we don't know FP_ILOGB0. */
9579 /* For normal numbers, proceed iff radix == 2. In GCC,
9580 normalized significands are in the range [0.5, 1.0). We
9581 want the exponent as if they were [1.0, 2.0) so get the
9582 exponent and subtract 1. */
9583 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9584 return fold_convert_loc (loc
, rettype
,
9585 build_int_cst (integer_type_node
,
9586 REAL_EXP (value
)-1));
9594 /* Fold a call to builtin significand, if radix == 2. */
9597 fold_builtin_significand (location_t loc
, tree arg
, tree rettype
)
9599 if (! validate_arg (arg
, REAL_TYPE
))
9604 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9606 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9613 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9614 return fold_convert_loc (loc
, rettype
, arg
);
9616 /* For normal numbers, proceed iff radix == 2. */
9617 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9619 REAL_VALUE_TYPE result
= *value
;
9620 /* In GCC, normalized significands are in the range [0.5,
9621 1.0). We want them to be [1.0, 2.0) so set the
9623 SET_REAL_EXP (&result
, 1);
9624 return build_real (rettype
, result
);
9633 /* Fold a call to builtin frexp, we can assume the base is 2. */
9636 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9638 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9643 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9646 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9648 /* Proceed if a valid pointer type was passed in. */
9649 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9651 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9657 /* For +-0, return (*exp = 0, +-0). */
9658 exp
= integer_zero_node
;
9663 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9664 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
9667 /* Since the frexp function always expects base 2, and in
9668 GCC normalized significands are already in the range
9669 [0.5, 1.0), we have exactly what frexp wants. */
9670 REAL_VALUE_TYPE frac_rvt
= *value
;
9671 SET_REAL_EXP (&frac_rvt
, 0);
9672 frac
= build_real (rettype
, frac_rvt
);
9673 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
9680 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9681 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
9682 TREE_SIDE_EFFECTS (arg1
) = 1;
9683 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
9689 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9690 then we can assume the base is two. If it's false, then we have to
9691 check the mode of the TYPE parameter in certain cases. */
9694 fold_builtin_load_exponent (location_t loc
, tree arg0
, tree arg1
,
9695 tree type
, bool ldexp
)
9697 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9702 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9703 if (real_zerop (arg0
) || integer_zerop (arg1
)
9704 || (TREE_CODE (arg0
) == REAL_CST
9705 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9706 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9708 /* If both arguments are constant, then try to evaluate it. */
9709 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9710 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9711 && host_integerp (arg1
, 0))
9713 /* Bound the maximum adjustment to twice the range of the
9714 mode's valid exponents. Use abs to ensure the range is
9715 positive as a sanity check. */
9716 const long max_exp_adj
= 2 *
9717 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9718 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9720 /* Get the user-requested adjustment. */
9721 const HOST_WIDE_INT req_exp_adj
= tree_low_cst (arg1
, 0);
9723 /* The requested adjustment must be inside this range. This
9724 is a preliminary cap to avoid things like overflow, we
9725 may still fail to compute the result for other reasons. */
9726 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9728 REAL_VALUE_TYPE initial_result
;
9730 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9732 /* Ensure we didn't overflow. */
9733 if (! real_isinf (&initial_result
))
9735 const REAL_VALUE_TYPE trunc_result
9736 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9738 /* Only proceed if the target mode can hold the
9740 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9741 return build_real (type
, trunc_result
);
9750 /* Fold a call to builtin modf. */
9753 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9755 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9760 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9763 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9765 /* Proceed if a valid pointer type was passed in. */
9766 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9768 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9769 REAL_VALUE_TYPE trunc
, frac
;
9775 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9776 trunc
= frac
= *value
;
9779 /* For +-Inf, return (*arg1 = arg0, +-0). */
9781 frac
.sign
= value
->sign
;
9785 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9786 real_trunc (&trunc
, VOIDmode
, value
);
9787 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9788 /* If the original number was negative and already
9789 integral, then the fractional part is -0.0. */
9790 if (value
->sign
&& frac
.cl
== rvc_zero
)
9791 frac
.sign
= value
->sign
;
9795 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9796 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
9797 build_real (rettype
, trunc
));
9798 TREE_SIDE_EFFECTS (arg1
) = 1;
9799 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
9800 build_real (rettype
, frac
));
9806 /* Given a location LOC, an interclass builtin function decl FNDECL
9807 and its single argument ARG, return an folded expression computing
9808 the same, or NULL_TREE if we either couldn't or didn't want to fold
9809 (the latter happen if there's an RTL instruction available). */
9812 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
9814 enum machine_mode mode
;
9816 if (!validate_arg (arg
, REAL_TYPE
))
9819 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9822 mode
= TYPE_MODE (TREE_TYPE (arg
));
9824 /* If there is no optab, try generic code. */
9825 switch (DECL_FUNCTION_CODE (fndecl
))
9829 CASE_FLT_FN (BUILT_IN_ISINF
):
9831 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9832 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9833 tree
const type
= TREE_TYPE (arg
);
9837 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9838 real_from_string (&r
, buf
);
9839 result
= build_call_expr (isgr_fn
, 2,
9840 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9841 build_real (type
, r
));
9844 CASE_FLT_FN (BUILT_IN_FINITE
):
9845 case BUILT_IN_ISFINITE
:
9847 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9848 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9849 tree
const type
= TREE_TYPE (arg
);
9853 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9854 real_from_string (&r
, buf
);
9855 result
= build_call_expr (isle_fn
, 2,
9856 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9857 build_real (type
, r
));
9858 /*result = fold_build2_loc (loc, UNGT_EXPR,
9859 TREE_TYPE (TREE_TYPE (fndecl)),
9860 fold_build1_loc (loc, ABS_EXPR, type, arg),
9861 build_real (type, r));
9862 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9863 TREE_TYPE (TREE_TYPE (fndecl)),
9867 case BUILT_IN_ISNORMAL
:
9869 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9870 islessequal(fabs(x),DBL_MAX). */
9871 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9872 tree
const isge_fn
= builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
9873 tree
const type
= TREE_TYPE (arg
);
9874 REAL_VALUE_TYPE rmax
, rmin
;
9877 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9878 real_from_string (&rmax
, buf
);
9879 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9880 real_from_string (&rmin
, buf
);
9881 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9882 result
= build_call_expr (isle_fn
, 2, arg
,
9883 build_real (type
, rmax
));
9884 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
9885 build_call_expr (isge_fn
, 2, arg
,
9886 build_real (type
, rmin
)));
9896 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9897 ARG is the argument for the call. */
9900 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
9902 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9905 if (!validate_arg (arg
, REAL_TYPE
))
9908 switch (builtin_index
)
9910 case BUILT_IN_ISINF
:
9911 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9912 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9914 if (TREE_CODE (arg
) == REAL_CST
)
9916 r
= TREE_REAL_CST (arg
);
9917 if (real_isinf (&r
))
9918 return real_compare (GT_EXPR
, &r
, &dconst0
)
9919 ? integer_one_node
: integer_minus_one_node
;
9921 return integer_zero_node
;
9926 case BUILT_IN_ISINF_SIGN
:
9928 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9929 /* In a boolean context, GCC will fold the inner COND_EXPR to
9930 1. So e.g. "if (isinf_sign(x))" would be folded to just
9931 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9932 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
9933 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
9934 tree tmp
= NULL_TREE
;
9936 arg
= builtin_save_expr (arg
);
9938 if (signbit_fn
&& isinf_fn
)
9940 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
9941 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
9943 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9944 signbit_call
, integer_zero_node
);
9945 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9946 isinf_call
, integer_zero_node
);
9948 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
9949 integer_minus_one_node
, integer_one_node
);
9950 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9958 case BUILT_IN_ISFINITE
:
9959 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
)))
9960 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9961 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9963 if (TREE_CODE (arg
) == REAL_CST
)
9965 r
= TREE_REAL_CST (arg
);
9966 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
9971 case BUILT_IN_ISNAN
:
9972 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
))))
9973 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9975 if (TREE_CODE (arg
) == REAL_CST
)
9977 r
= TREE_REAL_CST (arg
);
9978 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9981 arg
= builtin_save_expr (arg
);
9982 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
9989 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9990 This builtin will generate code to return the appropriate floating
9991 point classification depending on the value of the floating point
9992 number passed in. The possible return values must be supplied as
9993 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9994 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9995 one floating point argument which is "type generic". */
9998 fold_builtin_fpclassify (location_t loc
, tree exp
)
10000 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
10001 arg
, type
, res
, tmp
;
10002 enum machine_mode mode
;
10006 /* Verify the required arguments in the original call. */
10007 if (!validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
,
10008 INTEGER_TYPE
, INTEGER_TYPE
,
10009 INTEGER_TYPE
, REAL_TYPE
, VOID_TYPE
))
10012 fp_nan
= CALL_EXPR_ARG (exp
, 0);
10013 fp_infinite
= CALL_EXPR_ARG (exp
, 1);
10014 fp_normal
= CALL_EXPR_ARG (exp
, 2);
10015 fp_subnormal
= CALL_EXPR_ARG (exp
, 3);
10016 fp_zero
= CALL_EXPR_ARG (exp
, 4);
10017 arg
= CALL_EXPR_ARG (exp
, 5);
10018 type
= TREE_TYPE (arg
);
10019 mode
= TYPE_MODE (type
);
10020 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
10022 /* fpclassify(x) ->
10023 isnan(x) ? FP_NAN :
10024 (fabs(x) == Inf ? FP_INFINITE :
10025 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10026 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10028 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
10029 build_real (type
, dconst0
));
10030 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
10031 tmp
, fp_zero
, fp_subnormal
);
10033 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
10034 real_from_string (&r
, buf
);
10035 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
10036 arg
, build_real (type
, r
));
10037 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
10039 if (HONOR_INFINITIES (mode
))
10042 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
10043 build_real (type
, r
));
10044 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
10048 if (HONOR_NANS (mode
))
10050 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
10051 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
10057 /* Fold a call to an unordered comparison function such as
10058 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10059 being called and ARG0 and ARG1 are the arguments for the call.
10060 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10061 the opposite of the desired result. UNORDERED_CODE is used
10062 for modes that can hold NaNs and ORDERED_CODE is used for
10066 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
10067 enum tree_code unordered_code
,
10068 enum tree_code ordered_code
)
10070 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10071 enum tree_code code
;
10073 enum tree_code code0
, code1
;
10074 tree cmp_type
= NULL_TREE
;
10076 type0
= TREE_TYPE (arg0
);
10077 type1
= TREE_TYPE (arg1
);
10079 code0
= TREE_CODE (type0
);
10080 code1
= TREE_CODE (type1
);
10082 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
10083 /* Choose the wider of two real types. */
10084 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
10086 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
10088 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
10091 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
10092 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
10094 if (unordered_code
== UNORDERED_EXPR
)
10096 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
10097 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
10098 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
10101 code
= HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))) ? unordered_code
10103 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
10104 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
10107 /* Fold a call to built-in function FNDECL with 0 arguments.
10108 IGNORE is true if the result of the function call is ignored. This
10109 function returns NULL_TREE if no simplification was possible. */
10112 fold_builtin_0 (location_t loc
, tree fndecl
, bool ignore ATTRIBUTE_UNUSED
)
10114 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10115 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10118 CASE_FLT_FN (BUILT_IN_INF
):
10119 case BUILT_IN_INFD32
:
10120 case BUILT_IN_INFD64
:
10121 case BUILT_IN_INFD128
:
10122 return fold_builtin_inf (loc
, type
, true);
10124 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
10125 return fold_builtin_inf (loc
, type
, false);
10127 case BUILT_IN_CLASSIFY_TYPE
:
10128 return fold_builtin_classify_type (NULL_TREE
);
10130 case BUILT_IN_UNREACHABLE
:
10131 if (flag_sanitize
& SANITIZE_UNREACHABLE
10132 && (current_function_decl
== NULL
10133 || !lookup_attribute ("no_sanitize_undefined",
10134 DECL_ATTRIBUTES (current_function_decl
))))
10135 return ubsan_instrument_unreachable (loc
);
10144 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10145 IGNORE is true if the result of the function call is ignored. This
10146 function returns NULL_TREE if no simplification was possible. */
10149 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
, bool ignore
)
10151 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10152 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10155 case BUILT_IN_CONSTANT_P
:
10157 tree val
= fold_builtin_constant_p (arg0
);
10159 /* Gimplification will pull the CALL_EXPR for the builtin out of
10160 an if condition. When not optimizing, we'll not CSE it back.
10161 To avoid link error types of regressions, return false now. */
10162 if (!val
&& !optimize
)
10163 val
= integer_zero_node
;
10168 case BUILT_IN_CLASSIFY_TYPE
:
10169 return fold_builtin_classify_type (arg0
);
10171 case BUILT_IN_STRLEN
:
10172 return fold_builtin_strlen (loc
, type
, arg0
);
10174 CASE_FLT_FN (BUILT_IN_FABS
):
10175 case BUILT_IN_FABSD32
:
10176 case BUILT_IN_FABSD64
:
10177 case BUILT_IN_FABSD128
:
10178 return fold_builtin_fabs (loc
, arg0
, type
);
10181 case BUILT_IN_LABS
:
10182 case BUILT_IN_LLABS
:
10183 case BUILT_IN_IMAXABS
:
10184 return fold_builtin_abs (loc
, arg0
, type
);
10186 CASE_FLT_FN (BUILT_IN_CONJ
):
10187 if (validate_arg (arg0
, COMPLEX_TYPE
)
10188 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10189 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
10192 CASE_FLT_FN (BUILT_IN_CREAL
):
10193 if (validate_arg (arg0
, COMPLEX_TYPE
)
10194 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10195 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));;
10198 CASE_FLT_FN (BUILT_IN_CIMAG
):
10199 if (validate_arg (arg0
, COMPLEX_TYPE
)
10200 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10201 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
10204 CASE_FLT_FN (BUILT_IN_CCOS
):
10205 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ false);
10207 CASE_FLT_FN (BUILT_IN_CCOSH
):
10208 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ true);
10210 CASE_FLT_FN (BUILT_IN_CPROJ
):
10211 return fold_builtin_cproj (loc
, arg0
, type
);
10213 CASE_FLT_FN (BUILT_IN_CSIN
):
10214 if (validate_arg (arg0
, COMPLEX_TYPE
)
10215 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10216 return do_mpc_arg1 (arg0
, type
, mpc_sin
);
10219 CASE_FLT_FN (BUILT_IN_CSINH
):
10220 if (validate_arg (arg0
, COMPLEX_TYPE
)
10221 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10222 return do_mpc_arg1 (arg0
, type
, mpc_sinh
);
10225 CASE_FLT_FN (BUILT_IN_CTAN
):
10226 if (validate_arg (arg0
, COMPLEX_TYPE
)
10227 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10228 return do_mpc_arg1 (arg0
, type
, mpc_tan
);
10231 CASE_FLT_FN (BUILT_IN_CTANH
):
10232 if (validate_arg (arg0
, COMPLEX_TYPE
)
10233 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10234 return do_mpc_arg1 (arg0
, type
, mpc_tanh
);
10237 CASE_FLT_FN (BUILT_IN_CLOG
):
10238 if (validate_arg (arg0
, COMPLEX_TYPE
)
10239 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10240 return do_mpc_arg1 (arg0
, type
, mpc_log
);
10243 CASE_FLT_FN (BUILT_IN_CSQRT
):
10244 if (validate_arg (arg0
, COMPLEX_TYPE
)
10245 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10246 return do_mpc_arg1 (arg0
, type
, mpc_sqrt
);
10249 CASE_FLT_FN (BUILT_IN_CASIN
):
10250 if (validate_arg (arg0
, COMPLEX_TYPE
)
10251 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10252 return do_mpc_arg1 (arg0
, type
, mpc_asin
);
10255 CASE_FLT_FN (BUILT_IN_CACOS
):
10256 if (validate_arg (arg0
, COMPLEX_TYPE
)
10257 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10258 return do_mpc_arg1 (arg0
, type
, mpc_acos
);
10261 CASE_FLT_FN (BUILT_IN_CATAN
):
10262 if (validate_arg (arg0
, COMPLEX_TYPE
)
10263 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10264 return do_mpc_arg1 (arg0
, type
, mpc_atan
);
10267 CASE_FLT_FN (BUILT_IN_CASINH
):
10268 if (validate_arg (arg0
, COMPLEX_TYPE
)
10269 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10270 return do_mpc_arg1 (arg0
, type
, mpc_asinh
);
10273 CASE_FLT_FN (BUILT_IN_CACOSH
):
10274 if (validate_arg (arg0
, COMPLEX_TYPE
)
10275 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10276 return do_mpc_arg1 (arg0
, type
, mpc_acosh
);
10279 CASE_FLT_FN (BUILT_IN_CATANH
):
10280 if (validate_arg (arg0
, COMPLEX_TYPE
)
10281 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10282 return do_mpc_arg1 (arg0
, type
, mpc_atanh
);
10285 CASE_FLT_FN (BUILT_IN_CABS
):
10286 return fold_builtin_cabs (loc
, arg0
, type
, fndecl
);
10288 CASE_FLT_FN (BUILT_IN_CARG
):
10289 return fold_builtin_carg (loc
, arg0
, type
);
10291 CASE_FLT_FN (BUILT_IN_SQRT
):
10292 return fold_builtin_sqrt (loc
, arg0
, type
);
10294 CASE_FLT_FN (BUILT_IN_CBRT
):
10295 return fold_builtin_cbrt (loc
, arg0
, type
);
10297 CASE_FLT_FN (BUILT_IN_ASIN
):
10298 if (validate_arg (arg0
, REAL_TYPE
))
10299 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
10300 &dconstm1
, &dconst1
, true);
10303 CASE_FLT_FN (BUILT_IN_ACOS
):
10304 if (validate_arg (arg0
, REAL_TYPE
))
10305 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
10306 &dconstm1
, &dconst1
, true);
10309 CASE_FLT_FN (BUILT_IN_ATAN
):
10310 if (validate_arg (arg0
, REAL_TYPE
))
10311 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
10314 CASE_FLT_FN (BUILT_IN_ASINH
):
10315 if (validate_arg (arg0
, REAL_TYPE
))
10316 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
10319 CASE_FLT_FN (BUILT_IN_ACOSH
):
10320 if (validate_arg (arg0
, REAL_TYPE
))
10321 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
10322 &dconst1
, NULL
, true);
10325 CASE_FLT_FN (BUILT_IN_ATANH
):
10326 if (validate_arg (arg0
, REAL_TYPE
))
10327 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
10328 &dconstm1
, &dconst1
, false);
10331 CASE_FLT_FN (BUILT_IN_SIN
):
10332 if (validate_arg (arg0
, REAL_TYPE
))
10333 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
10336 CASE_FLT_FN (BUILT_IN_COS
):
10337 return fold_builtin_cos (loc
, arg0
, type
, fndecl
);
10339 CASE_FLT_FN (BUILT_IN_TAN
):
10340 return fold_builtin_tan (arg0
, type
);
10342 CASE_FLT_FN (BUILT_IN_CEXP
):
10343 return fold_builtin_cexp (loc
, arg0
, type
);
10345 CASE_FLT_FN (BUILT_IN_CEXPI
):
10346 if (validate_arg (arg0
, REAL_TYPE
))
10347 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
10350 CASE_FLT_FN (BUILT_IN_SINH
):
10351 if (validate_arg (arg0
, REAL_TYPE
))
10352 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
10355 CASE_FLT_FN (BUILT_IN_COSH
):
10356 return fold_builtin_cosh (loc
, arg0
, type
, fndecl
);
10358 CASE_FLT_FN (BUILT_IN_TANH
):
10359 if (validate_arg (arg0
, REAL_TYPE
))
10360 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
10363 CASE_FLT_FN (BUILT_IN_ERF
):
10364 if (validate_arg (arg0
, REAL_TYPE
))
10365 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
10368 CASE_FLT_FN (BUILT_IN_ERFC
):
10369 if (validate_arg (arg0
, REAL_TYPE
))
10370 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
10373 CASE_FLT_FN (BUILT_IN_TGAMMA
):
10374 if (validate_arg (arg0
, REAL_TYPE
))
10375 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
10378 CASE_FLT_FN (BUILT_IN_EXP
):
10379 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp
);
10381 CASE_FLT_FN (BUILT_IN_EXP2
):
10382 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp2
);
10384 CASE_FLT_FN (BUILT_IN_EXP10
):
10385 CASE_FLT_FN (BUILT_IN_POW10
):
10386 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp10
);
10388 CASE_FLT_FN (BUILT_IN_EXPM1
):
10389 if (validate_arg (arg0
, REAL_TYPE
))
10390 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
10393 CASE_FLT_FN (BUILT_IN_LOG
):
10394 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log
);
10396 CASE_FLT_FN (BUILT_IN_LOG2
):
10397 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log2
);
10399 CASE_FLT_FN (BUILT_IN_LOG10
):
10400 return fold_builtin_logarithm (loc
, fndecl
, arg0
, mpfr_log10
);
10402 CASE_FLT_FN (BUILT_IN_LOG1P
):
10403 if (validate_arg (arg0
, REAL_TYPE
))
10404 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
10405 &dconstm1
, NULL
, false);
10408 CASE_FLT_FN (BUILT_IN_J0
):
10409 if (validate_arg (arg0
, REAL_TYPE
))
10410 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
10414 CASE_FLT_FN (BUILT_IN_J1
):
10415 if (validate_arg (arg0
, REAL_TYPE
))
10416 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
10420 CASE_FLT_FN (BUILT_IN_Y0
):
10421 if (validate_arg (arg0
, REAL_TYPE
))
10422 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
10423 &dconst0
, NULL
, false);
10426 CASE_FLT_FN (BUILT_IN_Y1
):
10427 if (validate_arg (arg0
, REAL_TYPE
))
10428 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
10429 &dconst0
, NULL
, false);
10432 CASE_FLT_FN (BUILT_IN_NAN
):
10433 case BUILT_IN_NAND32
:
10434 case BUILT_IN_NAND64
:
10435 case BUILT_IN_NAND128
:
10436 return fold_builtin_nan (arg0
, type
, true);
10438 CASE_FLT_FN (BUILT_IN_NANS
):
10439 return fold_builtin_nan (arg0
, type
, false);
10441 CASE_FLT_FN (BUILT_IN_FLOOR
):
10442 return fold_builtin_floor (loc
, fndecl
, arg0
);
10444 CASE_FLT_FN (BUILT_IN_CEIL
):
10445 return fold_builtin_ceil (loc
, fndecl
, arg0
);
10447 CASE_FLT_FN (BUILT_IN_TRUNC
):
10448 return fold_builtin_trunc (loc
, fndecl
, arg0
);
10450 CASE_FLT_FN (BUILT_IN_ROUND
):
10451 return fold_builtin_round (loc
, fndecl
, arg0
);
10453 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
10454 CASE_FLT_FN (BUILT_IN_RINT
):
10455 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg0
);
10457 CASE_FLT_FN (BUILT_IN_ICEIL
):
10458 CASE_FLT_FN (BUILT_IN_LCEIL
):
10459 CASE_FLT_FN (BUILT_IN_LLCEIL
):
10460 CASE_FLT_FN (BUILT_IN_LFLOOR
):
10461 CASE_FLT_FN (BUILT_IN_IFLOOR
):
10462 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
10463 CASE_FLT_FN (BUILT_IN_IROUND
):
10464 CASE_FLT_FN (BUILT_IN_LROUND
):
10465 CASE_FLT_FN (BUILT_IN_LLROUND
):
10466 return fold_builtin_int_roundingfn (loc
, fndecl
, arg0
);
10468 CASE_FLT_FN (BUILT_IN_IRINT
):
10469 CASE_FLT_FN (BUILT_IN_LRINT
):
10470 CASE_FLT_FN (BUILT_IN_LLRINT
):
10471 return fold_fixed_mathfn (loc
, fndecl
, arg0
);
10473 case BUILT_IN_BSWAP16
:
10474 case BUILT_IN_BSWAP32
:
10475 case BUILT_IN_BSWAP64
:
10476 return fold_builtin_bswap (fndecl
, arg0
);
10478 CASE_INT_FN (BUILT_IN_FFS
):
10479 CASE_INT_FN (BUILT_IN_CLZ
):
10480 CASE_INT_FN (BUILT_IN_CTZ
):
10481 CASE_INT_FN (BUILT_IN_CLRSB
):
10482 CASE_INT_FN (BUILT_IN_POPCOUNT
):
10483 CASE_INT_FN (BUILT_IN_PARITY
):
10484 return fold_builtin_bitop (fndecl
, arg0
);
10486 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
10487 return fold_builtin_signbit (loc
, arg0
, type
);
10489 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
10490 return fold_builtin_significand (loc
, arg0
, type
);
10492 CASE_FLT_FN (BUILT_IN_ILOGB
):
10493 CASE_FLT_FN (BUILT_IN_LOGB
):
10494 return fold_builtin_logb (loc
, arg0
, type
);
10496 case BUILT_IN_ISASCII
:
10497 return fold_builtin_isascii (loc
, arg0
);
10499 case BUILT_IN_TOASCII
:
10500 return fold_builtin_toascii (loc
, arg0
);
10502 case BUILT_IN_ISDIGIT
:
10503 return fold_builtin_isdigit (loc
, arg0
);
10505 CASE_FLT_FN (BUILT_IN_FINITE
):
10506 case BUILT_IN_FINITED32
:
10507 case BUILT_IN_FINITED64
:
10508 case BUILT_IN_FINITED128
:
10509 case BUILT_IN_ISFINITE
:
10511 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
10514 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10517 CASE_FLT_FN (BUILT_IN_ISINF
):
10518 case BUILT_IN_ISINFD32
:
10519 case BUILT_IN_ISINFD64
:
10520 case BUILT_IN_ISINFD128
:
10522 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
10525 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10528 case BUILT_IN_ISNORMAL
:
10529 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10531 case BUILT_IN_ISINF_SIGN
:
10532 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
10534 CASE_FLT_FN (BUILT_IN_ISNAN
):
10535 case BUILT_IN_ISNAND32
:
10536 case BUILT_IN_ISNAND64
:
10537 case BUILT_IN_ISNAND128
:
10538 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
10540 case BUILT_IN_PRINTF
:
10541 case BUILT_IN_PRINTF_UNLOCKED
:
10542 case BUILT_IN_VPRINTF
:
10543 return fold_builtin_printf (loc
, fndecl
, arg0
, NULL_TREE
, ignore
, fcode
);
10545 case BUILT_IN_FREE
:
10546 if (integer_zerop (arg0
))
10547 return build_empty_stmt (loc
);
10558 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10559 IGNORE is true if the result of the function call is ignored. This
10560 function returns NULL_TREE if no simplification was possible. */
10563 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, bool ignore
)
10565 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10566 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10570 CASE_FLT_FN (BUILT_IN_JN
):
10571 if (validate_arg (arg0
, INTEGER_TYPE
)
10572 && validate_arg (arg1
, REAL_TYPE
))
10573 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10576 CASE_FLT_FN (BUILT_IN_YN
):
10577 if (validate_arg (arg0
, INTEGER_TYPE
)
10578 && validate_arg (arg1
, REAL_TYPE
))
10579 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10583 CASE_FLT_FN (BUILT_IN_DREM
):
10584 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10585 if (validate_arg (arg0
, REAL_TYPE
)
10586 && validate_arg (arg1
, REAL_TYPE
))
10587 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10590 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10591 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10592 if (validate_arg (arg0
, REAL_TYPE
)
10593 && validate_arg (arg1
, POINTER_TYPE
))
10594 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10597 CASE_FLT_FN (BUILT_IN_ATAN2
):
10598 if (validate_arg (arg0
, REAL_TYPE
)
10599 && validate_arg (arg1
, REAL_TYPE
))
10600 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10603 CASE_FLT_FN (BUILT_IN_FDIM
):
10604 if (validate_arg (arg0
, REAL_TYPE
)
10605 && validate_arg (arg1
, REAL_TYPE
))
10606 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10609 CASE_FLT_FN (BUILT_IN_HYPOT
):
10610 return fold_builtin_hypot (loc
, fndecl
, arg0
, arg1
, type
);
10612 CASE_FLT_FN (BUILT_IN_CPOW
):
10613 if (validate_arg (arg0
, COMPLEX_TYPE
)
10614 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10615 && validate_arg (arg1
, COMPLEX_TYPE
)
10616 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
)
10617 return do_mpc_arg2 (arg0
, arg1
, type
, /*do_nonfinite=*/ 0, mpc_pow
);
10620 CASE_FLT_FN (BUILT_IN_LDEXP
):
10621 return fold_builtin_load_exponent (loc
, arg0
, arg1
, type
, /*ldexp=*/true);
10622 CASE_FLT_FN (BUILT_IN_SCALBN
):
10623 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10624 return fold_builtin_load_exponent (loc
, arg0
, arg1
,
10625 type
, /*ldexp=*/false);
10627 CASE_FLT_FN (BUILT_IN_FREXP
):
10628 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
10630 CASE_FLT_FN (BUILT_IN_MODF
):
10631 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
10633 case BUILT_IN_BZERO
:
10634 return fold_builtin_bzero (loc
, arg0
, arg1
, ignore
);
10636 case BUILT_IN_FPUTS
:
10637 return fold_builtin_fputs (loc
, arg0
, arg1
, ignore
, false, NULL_TREE
);
10639 case BUILT_IN_FPUTS_UNLOCKED
:
10640 return fold_builtin_fputs (loc
, arg0
, arg1
, ignore
, true, NULL_TREE
);
10642 case BUILT_IN_STRSTR
:
10643 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
10645 case BUILT_IN_STRCAT
:
10646 return fold_builtin_strcat (loc
, arg0
, arg1
);
10648 case BUILT_IN_STRSPN
:
10649 return fold_builtin_strspn (loc
, arg0
, arg1
);
10651 case BUILT_IN_STRCSPN
:
10652 return fold_builtin_strcspn (loc
, arg0
, arg1
);
10654 case BUILT_IN_STRCHR
:
10655 case BUILT_IN_INDEX
:
10656 return fold_builtin_strchr (loc
, arg0
, arg1
, type
);
10658 case BUILT_IN_STRRCHR
:
10659 case BUILT_IN_RINDEX
:
10660 return fold_builtin_strrchr (loc
, arg0
, arg1
, type
);
10662 case BUILT_IN_STRCPY
:
10663 return fold_builtin_strcpy (loc
, fndecl
, arg0
, arg1
, NULL_TREE
);
10665 case BUILT_IN_STPCPY
:
10668 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
10672 return build_call_expr_loc (loc
, fn
, 2, arg0
, arg1
);
10675 return fold_builtin_stpcpy (loc
, fndecl
, arg0
, arg1
);
10678 case BUILT_IN_STRCMP
:
10679 return fold_builtin_strcmp (loc
, arg0
, arg1
);
10681 case BUILT_IN_STRPBRK
:
10682 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
10684 case BUILT_IN_EXPECT
:
10685 return fold_builtin_expect (loc
, arg0
, arg1
);
10687 CASE_FLT_FN (BUILT_IN_POW
):
10688 return fold_builtin_pow (loc
, fndecl
, arg0
, arg1
, type
);
10690 CASE_FLT_FN (BUILT_IN_POWI
):
10691 return fold_builtin_powi (loc
, fndecl
, arg0
, arg1
, type
);
10693 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10694 return fold_builtin_copysign (loc
, fndecl
, arg0
, arg1
, type
);
10696 CASE_FLT_FN (BUILT_IN_FMIN
):
10697 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/false);
10699 CASE_FLT_FN (BUILT_IN_FMAX
):
10700 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/true);
10702 case BUILT_IN_ISGREATER
:
10703 return fold_builtin_unordered_cmp (loc
, fndecl
,
10704 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10705 case BUILT_IN_ISGREATEREQUAL
:
10706 return fold_builtin_unordered_cmp (loc
, fndecl
,
10707 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10708 case BUILT_IN_ISLESS
:
10709 return fold_builtin_unordered_cmp (loc
, fndecl
,
10710 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10711 case BUILT_IN_ISLESSEQUAL
:
10712 return fold_builtin_unordered_cmp (loc
, fndecl
,
10713 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10714 case BUILT_IN_ISLESSGREATER
:
10715 return fold_builtin_unordered_cmp (loc
, fndecl
,
10716 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10717 case BUILT_IN_ISUNORDERED
:
10718 return fold_builtin_unordered_cmp (loc
, fndecl
,
10719 arg0
, arg1
, UNORDERED_EXPR
,
10722 /* We do the folding for va_start in the expander. */
10723 case BUILT_IN_VA_START
:
10726 case BUILT_IN_SPRINTF
:
10727 return fold_builtin_sprintf (loc
, arg0
, arg1
, NULL_TREE
, ignore
);
10729 case BUILT_IN_OBJECT_SIZE
:
10730 return fold_builtin_object_size (arg0
, arg1
);
10732 case BUILT_IN_PRINTF
:
10733 case BUILT_IN_PRINTF_UNLOCKED
:
10734 case BUILT_IN_VPRINTF
:
10735 return fold_builtin_printf (loc
, fndecl
, arg0
, arg1
, ignore
, fcode
);
10737 case BUILT_IN_PRINTF_CHK
:
10738 case BUILT_IN_VPRINTF_CHK
:
10739 if (!validate_arg (arg0
, INTEGER_TYPE
)
10740 || TREE_SIDE_EFFECTS (arg0
))
10743 return fold_builtin_printf (loc
, fndecl
,
10744 arg1
, NULL_TREE
, ignore
, fcode
);
10747 case BUILT_IN_FPRINTF
:
10748 case BUILT_IN_FPRINTF_UNLOCKED
:
10749 case BUILT_IN_VFPRINTF
:
10750 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg1
, NULL_TREE
,
10753 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
10754 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
10756 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
10757 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
10765 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10766 and ARG2. IGNORE is true if the result of the function call is ignored.
10767 This function returns NULL_TREE if no simplification was possible. */
10770 fold_builtin_3 (location_t loc
, tree fndecl
,
10771 tree arg0
, tree arg1
, tree arg2
, bool ignore
)
10773 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10774 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10778 CASE_FLT_FN (BUILT_IN_SINCOS
):
10779 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
10781 CASE_FLT_FN (BUILT_IN_FMA
):
10782 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
10785 CASE_FLT_FN (BUILT_IN_REMQUO
):
10786 if (validate_arg (arg0
, REAL_TYPE
)
10787 && validate_arg (arg1
, REAL_TYPE
)
10788 && validate_arg (arg2
, POINTER_TYPE
))
10789 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10792 case BUILT_IN_MEMSET
:
10793 return fold_builtin_memset (loc
, arg0
, arg1
, arg2
, type
, ignore
);
10795 case BUILT_IN_BCOPY
:
10796 return fold_builtin_memory_op (loc
, arg1
, arg0
, arg2
,
10797 void_type_node
, true, /*endp=*/3);
10799 case BUILT_IN_MEMCPY
:
10800 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10801 type
, ignore
, /*endp=*/0);
10803 case BUILT_IN_MEMPCPY
:
10804 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10805 type
, ignore
, /*endp=*/1);
10807 case BUILT_IN_MEMMOVE
:
10808 return fold_builtin_memory_op (loc
, arg0
, arg1
, arg2
,
10809 type
, ignore
, /*endp=*/3);
10811 case BUILT_IN_STRNCAT
:
10812 return fold_builtin_strncat (loc
, arg0
, arg1
, arg2
);
10814 case BUILT_IN_STRNCPY
:
10815 return fold_builtin_strncpy (loc
, fndecl
, arg0
, arg1
, arg2
, NULL_TREE
);
10817 case BUILT_IN_STRNCMP
:
10818 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
10820 case BUILT_IN_MEMCHR
:
10821 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
10823 case BUILT_IN_BCMP
:
10824 case BUILT_IN_MEMCMP
:
10825 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
10827 case BUILT_IN_SPRINTF
:
10828 return fold_builtin_sprintf (loc
, arg0
, arg1
, arg2
, ignore
);
10830 case BUILT_IN_SNPRINTF
:
10831 return fold_builtin_snprintf (loc
, arg0
, arg1
, arg2
, NULL_TREE
, ignore
);
10833 case BUILT_IN_STRCPY_CHK
:
10834 case BUILT_IN_STPCPY_CHK
:
10835 return fold_builtin_stxcpy_chk (loc
, fndecl
, arg0
, arg1
, arg2
, NULL_TREE
,
10838 case BUILT_IN_STRCAT_CHK
:
10839 return fold_builtin_strcat_chk (loc
, fndecl
, arg0
, arg1
, arg2
);
10841 case BUILT_IN_PRINTF_CHK
:
10842 case BUILT_IN_VPRINTF_CHK
:
10843 if (!validate_arg (arg0
, INTEGER_TYPE
)
10844 || TREE_SIDE_EFFECTS (arg0
))
10847 return fold_builtin_printf (loc
, fndecl
, arg1
, arg2
, ignore
, fcode
);
10850 case BUILT_IN_FPRINTF
:
10851 case BUILT_IN_FPRINTF_UNLOCKED
:
10852 case BUILT_IN_VFPRINTF
:
10853 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg1
, arg2
,
10856 case BUILT_IN_FPRINTF_CHK
:
10857 case BUILT_IN_VFPRINTF_CHK
:
10858 if (!validate_arg (arg1
, INTEGER_TYPE
)
10859 || TREE_SIDE_EFFECTS (arg1
))
10862 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg2
, NULL_TREE
,
10871 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10872 ARG2, and ARG3. IGNORE is true if the result of the function call is
10873 ignored. This function returns NULL_TREE if no simplification was
10877 fold_builtin_4 (location_t loc
, tree fndecl
,
10878 tree arg0
, tree arg1
, tree arg2
, tree arg3
, bool ignore
)
10880 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10884 case BUILT_IN_MEMCPY_CHK
:
10885 case BUILT_IN_MEMPCPY_CHK
:
10886 case BUILT_IN_MEMMOVE_CHK
:
10887 case BUILT_IN_MEMSET_CHK
:
10888 return fold_builtin_memory_chk (loc
, fndecl
, arg0
, arg1
, arg2
, arg3
,
10890 DECL_FUNCTION_CODE (fndecl
));
10892 case BUILT_IN_STRNCPY_CHK
:
10893 case BUILT_IN_STPNCPY_CHK
:
10894 return fold_builtin_stxncpy_chk (loc
, arg0
, arg1
, arg2
, arg3
, NULL_TREE
,
10897 case BUILT_IN_STRNCAT_CHK
:
10898 return fold_builtin_strncat_chk (loc
, fndecl
, arg0
, arg1
, arg2
, arg3
);
10900 case BUILT_IN_SNPRINTF
:
10901 return fold_builtin_snprintf (loc
, arg0
, arg1
, arg2
, arg3
, ignore
);
10903 case BUILT_IN_FPRINTF_CHK
:
10904 case BUILT_IN_VFPRINTF_CHK
:
10905 if (!validate_arg (arg1
, INTEGER_TYPE
)
10906 || TREE_SIDE_EFFECTS (arg1
))
10909 return fold_builtin_fprintf (loc
, fndecl
, arg0
, arg2
, arg3
,
10919 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10920 arguments, where NARGS <= 4. IGNORE is true if the result of the
10921 function call is ignored. This function returns NULL_TREE if no
10922 simplification was possible. Note that this only folds builtins with
10923 fixed argument patterns. Foldings that do varargs-to-varargs
10924 transformations, or that match calls with more than 4 arguments,
10925 need to be handled with fold_builtin_varargs instead. */
10927 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10930 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool ignore
)
10932 tree ret
= NULL_TREE
;
10937 ret
= fold_builtin_0 (loc
, fndecl
, ignore
);
10940 ret
= fold_builtin_1 (loc
, fndecl
, args
[0], ignore
);
10943 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1], ignore
);
10946 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2], ignore
);
10949 ret
= fold_builtin_4 (loc
, fndecl
, args
[0], args
[1], args
[2], args
[3],
10957 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10958 SET_EXPR_LOCATION (ret
, loc
);
10959 TREE_NO_WARNING (ret
) = 1;
10965 /* Builtins with folding operations that operate on "..." arguments
10966 need special handling; we need to store the arguments in a convenient
10967 data structure before attempting any folding. Fortunately there are
10968 only a few builtins that fall into this category. FNDECL is the
10969 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10970 result of the function call is ignored. */
10973 fold_builtin_varargs (location_t loc
, tree fndecl
, tree exp
,
10974 bool ignore ATTRIBUTE_UNUSED
)
10976 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10977 tree ret
= NULL_TREE
;
10981 case BUILT_IN_SPRINTF_CHK
:
10982 case BUILT_IN_VSPRINTF_CHK
:
10983 ret
= fold_builtin_sprintf_chk (loc
, exp
, fcode
);
10986 case BUILT_IN_SNPRINTF_CHK
:
10987 case BUILT_IN_VSNPRINTF_CHK
:
10988 ret
= fold_builtin_snprintf_chk (loc
, exp
, NULL_TREE
, fcode
);
10991 case BUILT_IN_FPCLASSIFY
:
10992 ret
= fold_builtin_fpclassify (loc
, exp
);
11000 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11001 SET_EXPR_LOCATION (ret
, loc
);
11002 TREE_NO_WARNING (ret
) = 1;
11008 /* Return true if FNDECL shouldn't be folded right now.
11009 If a built-in function has an inline attribute always_inline
11010 wrapper, defer folding it after always_inline functions have
11011 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11012 might not be performed. */
11015 avoid_folding_inline_builtin (tree fndecl
)
11017 return (DECL_DECLARED_INLINE_P (fndecl
)
11018 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
11020 && !cfun
->always_inline_functions_inlined
11021 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
11024 /* A wrapper function for builtin folding that prevents warnings for
11025 "statement without effect" and the like, caused by removing the
11026 call node earlier than the warning is generated. */
11029 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
11031 tree ret
= NULL_TREE
;
11032 tree fndecl
= get_callee_fndecl (exp
);
11034 && TREE_CODE (fndecl
) == FUNCTION_DECL
11035 && DECL_BUILT_IN (fndecl
)
11036 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11037 yet. Defer folding until we see all the arguments
11038 (after inlining). */
11039 && !CALL_EXPR_VA_ARG_PACK (exp
))
11041 int nargs
= call_expr_nargs (exp
);
11043 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11044 instead last argument is __builtin_va_arg_pack (). Defer folding
11045 even in that case, until arguments are finalized. */
11046 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
11048 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
11050 && TREE_CODE (fndecl2
) == FUNCTION_DECL
11051 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
11052 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
11056 if (avoid_folding_inline_builtin (fndecl
))
11059 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11060 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
11061 CALL_EXPR_ARGP (exp
), ignore
);
11064 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
11066 tree
*args
= CALL_EXPR_ARGP (exp
);
11067 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
11070 ret
= fold_builtin_varargs (loc
, fndecl
, exp
, ignore
);
11078 /* Conveniently construct a function call expression. FNDECL names the
11079 function to be called and N arguments are passed in the array
11083 build_call_expr_loc_array (location_t loc
, tree fndecl
, int n
, tree
*argarray
)
11085 tree fntype
= TREE_TYPE (fndecl
);
11086 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
11088 return fold_builtin_call_array (loc
, TREE_TYPE (fntype
), fn
, n
, argarray
);
11091 /* Conveniently construct a function call expression. FNDECL names the
11092 function to be called and the arguments are passed in the vector
11096 build_call_expr_loc_vec (location_t loc
, tree fndecl
, vec
<tree
, va_gc
> *vec
)
11098 return build_call_expr_loc_array (loc
, fndecl
, vec_safe_length (vec
),
11099 vec_safe_address (vec
));
11103 /* Conveniently construct a function call expression. FNDECL names the
11104 function to be called, N is the number of arguments, and the "..."
11105 parameters are the argument expressions. */
11108 build_call_expr_loc (location_t loc
, tree fndecl
, int n
, ...)
11111 tree
*argarray
= XALLOCAVEC (tree
, n
);
11115 for (i
= 0; i
< n
; i
++)
11116 argarray
[i
] = va_arg (ap
, tree
);
11118 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
11121 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11122 varargs macros aren't supported by all bootstrap compilers. */
11125 build_call_expr (tree fndecl
, int n
, ...)
11128 tree
*argarray
= XALLOCAVEC (tree
, n
);
11132 for (i
= 0; i
< n
; i
++)
11133 argarray
[i
] = va_arg (ap
, tree
);
11135 return build_call_expr_loc_array (UNKNOWN_LOCATION
, fndecl
, n
, argarray
);
11138 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11139 N arguments are passed in the array ARGARRAY. */
11142 fold_builtin_call_array (location_t loc
, tree type
,
11147 tree ret
= NULL_TREE
;
11150 if (TREE_CODE (fn
) == ADDR_EXPR
)
11152 tree fndecl
= TREE_OPERAND (fn
, 0);
11153 if (TREE_CODE (fndecl
) == FUNCTION_DECL
11154 && DECL_BUILT_IN (fndecl
))
11156 /* If last argument is __builtin_va_arg_pack (), arguments to this
11157 function are not finalized yet. Defer folding until they are. */
11158 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
11160 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
11162 && TREE_CODE (fndecl2
) == FUNCTION_DECL
11163 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
11164 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
11165 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11167 if (avoid_folding_inline_builtin (fndecl
))
11168 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11169 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11171 ret
= targetm
.fold_builtin (fndecl
, n
, argarray
, false);
11175 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11177 else if (n
<= MAX_ARGS_TO_FOLD_BUILTIN
)
11179 /* First try the transformations that don't require consing up
11181 ret
= fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
11186 /* If we got this far, we need to build an exp. */
11187 exp
= build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11188 ret
= fold_builtin_varargs (loc
, fndecl
, exp
, false);
11189 return ret
? ret
: exp
;
11193 return build_call_array_loc (loc
, type
, fn
, n
, argarray
);
11196 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11197 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11198 of arguments in ARGS to be omitted. OLDNARGS is the number of
11199 elements in ARGS. */
11202 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
11203 int skip
, tree fndecl
, int n
, va_list newargs
)
11205 int nargs
= oldnargs
- skip
+ n
;
11212 buffer
= XALLOCAVEC (tree
, nargs
);
11213 for (i
= 0; i
< n
; i
++)
11214 buffer
[i
] = va_arg (newargs
, tree
);
11215 for (j
= skip
; j
< oldnargs
; j
++, i
++)
11216 buffer
[i
] = args
[j
];
11219 buffer
= args
+ skip
;
11221 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
11224 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11225 list ARGS along with N new arguments specified as the "..."
11226 parameters. SKIP is the number of arguments in ARGS to be omitted.
11227 OLDNARGS is the number of elements in ARGS. */
11230 rewrite_call_expr_array (location_t loc
, int oldnargs
, tree
*args
,
11231 int skip
, tree fndecl
, int n
, ...)
11237 t
= rewrite_call_expr_valist (loc
, oldnargs
, args
, skip
, fndecl
, n
, ap
);
11243 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11244 along with N new arguments specified as the "..." parameters. SKIP
11245 is the number of arguments in EXP to be omitted. This function is used
11246 to do varargs-to-varargs transformations. */
11249 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
11255 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
11256 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
11262 /* Validate a single argument ARG against a tree code CODE representing
11266 validate_arg (const_tree arg
, enum tree_code code
)
11270 else if (code
== POINTER_TYPE
)
11271 return POINTER_TYPE_P (TREE_TYPE (arg
));
11272 else if (code
== INTEGER_TYPE
)
11273 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
11274 return code
== TREE_CODE (TREE_TYPE (arg
));
11277 /* This function validates the types of a function call argument list
11278 against a specified list of tree_codes. If the last specifier is a 0,
11279 that represents an ellipses, otherwise the last specifier must be a
11282 This is the GIMPLE version of validate_arglist. Eventually we want to
11283 completely convert builtins.c to work from GIMPLEs and the tree based
11284 validate_arglist will then be removed. */
11287 validate_gimple_arglist (const_gimple call
, ...)
11289 enum tree_code code
;
11295 va_start (ap
, call
);
11300 code
= (enum tree_code
) va_arg (ap
, int);
11304 /* This signifies an ellipses, any further arguments are all ok. */
11308 /* This signifies an endlink, if no arguments remain, return
11309 true, otherwise return false. */
11310 res
= (i
== gimple_call_num_args (call
));
11313 /* If no parameters remain or the parameter's code does not
11314 match the specified code, return false. Otherwise continue
11315 checking any remaining arguments. */
11316 arg
= gimple_call_arg (call
, i
++);
11317 if (!validate_arg (arg
, code
))
11324 /* We need gotos here since we can only have one VA_CLOSE in a
11332 /* This function validates the types of a function call argument list
11333 against a specified list of tree_codes. If the last specifier is a 0,
11334 that represents an ellipses, otherwise the last specifier must be a
11338 validate_arglist (const_tree callexpr
, ...)
11340 enum tree_code code
;
11343 const_call_expr_arg_iterator iter
;
11346 va_start (ap
, callexpr
);
11347 init_const_call_expr_arg_iterator (callexpr
, &iter
);
11351 code
= (enum tree_code
) va_arg (ap
, int);
11355 /* This signifies an ellipses, any further arguments are all ok. */
11359 /* This signifies an endlink, if no arguments remain, return
11360 true, otherwise return false. */
11361 res
= !more_const_call_expr_args_p (&iter
);
11364 /* If no parameters remain or the parameter's code does not
11365 match the specified code, return false. Otherwise continue
11366 checking any remaining arguments. */
11367 arg
= next_const_call_expr_arg (&iter
);
11368 if (!validate_arg (arg
, code
))
11375 /* We need gotos here since we can only have one VA_CLOSE in a
11383 /* Default target-specific builtin expander that does nothing. */
11386 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
11387 rtx target ATTRIBUTE_UNUSED
,
11388 rtx subtarget ATTRIBUTE_UNUSED
,
11389 enum machine_mode mode ATTRIBUTE_UNUSED
,
11390 int ignore ATTRIBUTE_UNUSED
)
11395 /* Returns true is EXP represents data that would potentially reside
11396 in a readonly section. */
11399 readonly_data_expr (tree exp
)
11403 if (TREE_CODE (exp
) != ADDR_EXPR
)
11406 exp
= get_base_address (TREE_OPERAND (exp
, 0));
11410 /* Make sure we call decl_readonly_section only for trees it
11411 can handle (since it returns true for everything it doesn't
11413 if (TREE_CODE (exp
) == STRING_CST
11414 || TREE_CODE (exp
) == CONSTRUCTOR
11415 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
11416 return decl_readonly_section (exp
, 0);
11421 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11422 to the call, and TYPE is its return type.
11424 Return NULL_TREE if no simplification was possible, otherwise return the
11425 simplified form of the call as a tree.
11427 The simplified form may be a constant or other expression which
11428 computes the same value, but in a more efficient manner (including
11429 calls to other builtin functions).
11431 The call may contain arguments which need to be evaluated, but
11432 which are not useful to determine the result of the call. In
11433 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11434 COMPOUND_EXPR will be an argument which must be evaluated.
11435 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11436 COMPOUND_EXPR in the chain will contain the tree for the simplified
11437 form of the builtin function call. */
11440 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
11442 if (!validate_arg (s1
, POINTER_TYPE
)
11443 || !validate_arg (s2
, POINTER_TYPE
))
11448 const char *p1
, *p2
;
11450 p2
= c_getstr (s2
);
11454 p1
= c_getstr (s1
);
11457 const char *r
= strstr (p1
, p2
);
11461 return build_int_cst (TREE_TYPE (s1
), 0);
11463 /* Return an offset into the constant string argument. */
11464 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11465 return fold_convert_loc (loc
, type
, tem
);
11468 /* The argument is const char *, and the result is char *, so we need
11469 a type conversion here to avoid a warning. */
11471 return fold_convert_loc (loc
, type
, s1
);
11476 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11480 /* New argument list transforming strstr(s1, s2) to
11481 strchr(s1, s2[0]). */
11482 return build_call_expr_loc (loc
, fn
, 2, s1
,
11483 build_int_cst (integer_type_node
, p2
[0]));
11487 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11488 the call, and TYPE is its return type.
11490 Return NULL_TREE if no simplification was possible, otherwise return the
11491 simplified form of the call as a tree.
11493 The simplified form may be a constant or other expression which
11494 computes the same value, but in a more efficient manner (including
11495 calls to other builtin functions).
11497 The call may contain arguments which need to be evaluated, but
11498 which are not useful to determine the result of the call. In
11499 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11500 COMPOUND_EXPR will be an argument which must be evaluated.
11501 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11502 COMPOUND_EXPR in the chain will contain the tree for the simplified
11503 form of the builtin function call. */
11506 fold_builtin_strchr (location_t loc
, tree s1
, tree s2
, tree type
)
11508 if (!validate_arg (s1
, POINTER_TYPE
)
11509 || !validate_arg (s2
, INTEGER_TYPE
))
11515 if (TREE_CODE (s2
) != INTEGER_CST
)
11518 p1
= c_getstr (s1
);
11525 if (target_char_cast (s2
, &c
))
11528 r
= strchr (p1
, c
);
11531 return build_int_cst (TREE_TYPE (s1
), 0);
11533 /* Return an offset into the constant string argument. */
11534 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11535 return fold_convert_loc (loc
, type
, tem
);
11541 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11542 the call, and TYPE is its return type.
11544 Return NULL_TREE if no simplification was possible, otherwise return the
11545 simplified form of the call as a tree.
11547 The simplified form may be a constant or other expression which
11548 computes the same value, but in a more efficient manner (including
11549 calls to other builtin functions).
11551 The call may contain arguments which need to be evaluated, but
11552 which are not useful to determine the result of the call. In
11553 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11554 COMPOUND_EXPR will be an argument which must be evaluated.
11555 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11556 COMPOUND_EXPR in the chain will contain the tree for the simplified
11557 form of the builtin function call. */
11560 fold_builtin_strrchr (location_t loc
, tree s1
, tree s2
, tree type
)
11562 if (!validate_arg (s1
, POINTER_TYPE
)
11563 || !validate_arg (s2
, INTEGER_TYPE
))
11570 if (TREE_CODE (s2
) != INTEGER_CST
)
11573 p1
= c_getstr (s1
);
11580 if (target_char_cast (s2
, &c
))
11583 r
= strrchr (p1
, c
);
11586 return build_int_cst (TREE_TYPE (s1
), 0);
11588 /* Return an offset into the constant string argument. */
11589 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11590 return fold_convert_loc (loc
, type
, tem
);
11593 if (! integer_zerop (s2
))
11596 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11600 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11601 return build_call_expr_loc (loc
, fn
, 2, s1
, s2
);
11605 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11606 to the call, and TYPE is its return type.
11608 Return NULL_TREE if no simplification was possible, otherwise return the
11609 simplified form of the call as a tree.
11611 The simplified form may be a constant or other expression which
11612 computes the same value, but in a more efficient manner (including
11613 calls to other builtin functions).
11615 The call may contain arguments which need to be evaluated, but
11616 which are not useful to determine the result of the call. In
11617 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11618 COMPOUND_EXPR will be an argument which must be evaluated.
11619 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11620 COMPOUND_EXPR in the chain will contain the tree for the simplified
11621 form of the builtin function call. */
11624 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
11626 if (!validate_arg (s1
, POINTER_TYPE
)
11627 || !validate_arg (s2
, POINTER_TYPE
))
11632 const char *p1
, *p2
;
11634 p2
= c_getstr (s2
);
11638 p1
= c_getstr (s1
);
11641 const char *r
= strpbrk (p1
, p2
);
11645 return build_int_cst (TREE_TYPE (s1
), 0);
11647 /* Return an offset into the constant string argument. */
11648 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11649 return fold_convert_loc (loc
, type
, tem
);
11653 /* strpbrk(x, "") == NULL.
11654 Evaluate and ignore s1 in case it had side-effects. */
11655 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
11658 return NULL_TREE
; /* Really call strpbrk. */
11660 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11664 /* New argument list transforming strpbrk(s1, s2) to
11665 strchr(s1, s2[0]). */
11666 return build_call_expr_loc (loc
, fn
, 2, s1
,
11667 build_int_cst (integer_type_node
, p2
[0]));
11671 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11674 Return NULL_TREE if no simplification was possible, otherwise return the
11675 simplified form of the call as a tree.
11677 The simplified form may be a constant or other expression which
11678 computes the same value, but in a more efficient manner (including
11679 calls to other builtin functions).
11681 The call may contain arguments which need to be evaluated, but
11682 which are not useful to determine the result of the call. In
11683 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11684 COMPOUND_EXPR will be an argument which must be evaluated.
11685 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11686 COMPOUND_EXPR in the chain will contain the tree for the simplified
11687 form of the builtin function call. */
11690 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED
, tree dst
, tree src
)
11692 if (!validate_arg (dst
, POINTER_TYPE
)
11693 || !validate_arg (src
, POINTER_TYPE
))
11697 const char *p
= c_getstr (src
);
11699 /* If the string length is zero, return the dst parameter. */
11700 if (p
&& *p
== '\0')
11703 if (optimize_insn_for_speed_p ())
11705 /* See if we can store by pieces into (dst + strlen(dst)). */
11707 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
11708 tree strcpy_fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
11710 if (!strlen_fn
|| !strcpy_fn
)
11713 /* If we don't have a movstr we don't want to emit an strcpy
11714 call. We have to do that if the length of the source string
11715 isn't computable (in that case we can use memcpy probably
11716 later expanding to a sequence of mov instructions). If we
11717 have movstr instructions we can emit strcpy calls. */
11720 tree len
= c_strlen (src
, 1);
11721 if (! len
|| TREE_SIDE_EFFECTS (len
))
11725 /* Stabilize the argument list. */
11726 dst
= builtin_save_expr (dst
);
11728 /* Create strlen (dst). */
11729 newdst
= build_call_expr_loc (loc
, strlen_fn
, 1, dst
);
11730 /* Create (dst p+ strlen (dst)). */
11732 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
11733 newdst
= builtin_save_expr (newdst
);
11735 call
= build_call_expr_loc (loc
, strcpy_fn
, 2, newdst
, src
);
11736 return build2 (COMPOUND_EXPR
, TREE_TYPE (dst
), call
, dst
);
11742 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11743 arguments to the call.
11745 Return NULL_TREE if no simplification was possible, otherwise return the
11746 simplified form of the call as a tree.
11748 The simplified form may be a constant or other expression which
11749 computes the same value, but in a more efficient manner (including
11750 calls to other builtin functions).
11752 The call may contain arguments which need to be evaluated, but
11753 which are not useful to determine the result of the call. In
11754 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11755 COMPOUND_EXPR will be an argument which must be evaluated.
11756 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11757 COMPOUND_EXPR in the chain will contain the tree for the simplified
11758 form of the builtin function call. */
11761 fold_builtin_strncat (location_t loc
, tree dst
, tree src
, tree len
)
11763 if (!validate_arg (dst
, POINTER_TYPE
)
11764 || !validate_arg (src
, POINTER_TYPE
)
11765 || !validate_arg (len
, INTEGER_TYPE
))
11769 const char *p
= c_getstr (src
);
11771 /* If the requested length is zero, or the src parameter string
11772 length is zero, return the dst parameter. */
11773 if (integer_zerop (len
) || (p
&& *p
== '\0'))
11774 return omit_two_operands_loc (loc
, TREE_TYPE (dst
), dst
, src
, len
);
11776 /* If the requested len is greater than or equal to the string
11777 length, call strcat. */
11778 if (TREE_CODE (len
) == INTEGER_CST
&& p
11779 && compare_tree_int (len
, strlen (p
)) >= 0)
11781 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
11783 /* If the replacement _DECL isn't initialized, don't do the
11788 return build_call_expr_loc (loc
, fn
, 2, dst
, src
);
11794 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11797 Return NULL_TREE if no simplification was possible, otherwise return the
11798 simplified form of the call as a tree.
11800 The simplified form may be a constant or other expression which
11801 computes the same value, but in a more efficient manner (including
11802 calls to other builtin functions).
11804 The call may contain arguments which need to be evaluated, but
11805 which are not useful to determine the result of the call. In
11806 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11807 COMPOUND_EXPR will be an argument which must be evaluated.
11808 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11809 COMPOUND_EXPR in the chain will contain the tree for the simplified
11810 form of the builtin function call. */
11813 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
11815 if (!validate_arg (s1
, POINTER_TYPE
)
11816 || !validate_arg (s2
, POINTER_TYPE
))
11820 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11822 /* If both arguments are constants, evaluate at compile-time. */
11825 const size_t r
= strspn (p1
, p2
);
11826 return build_int_cst (size_type_node
, r
);
11829 /* If either argument is "", return NULL_TREE. */
11830 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
11831 /* Evaluate and ignore both arguments in case either one has
11833 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
11839 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11842 Return NULL_TREE if no simplification was possible, otherwise return the
11843 simplified form of the call as a tree.
11845 The simplified form may be a constant or other expression which
11846 computes the same value, but in a more efficient manner (including
11847 calls to other builtin functions).
11849 The call may contain arguments which need to be evaluated, but
11850 which are not useful to determine the result of the call. In
11851 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11852 COMPOUND_EXPR will be an argument which must be evaluated.
11853 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11854 COMPOUND_EXPR in the chain will contain the tree for the simplified
11855 form of the builtin function call. */
11858 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
11860 if (!validate_arg (s1
, POINTER_TYPE
)
11861 || !validate_arg (s2
, POINTER_TYPE
))
11865 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11867 /* If both arguments are constants, evaluate at compile-time. */
11870 const size_t r
= strcspn (p1
, p2
);
11871 return build_int_cst (size_type_node
, r
);
11874 /* If the first argument is "", return NULL_TREE. */
11875 if (p1
&& *p1
== '\0')
11877 /* Evaluate and ignore argument s2 in case it has
11879 return omit_one_operand_loc (loc
, size_type_node
,
11880 size_zero_node
, s2
);
11883 /* If the second argument is "", return __builtin_strlen(s1). */
11884 if (p2
&& *p2
== '\0')
11886 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
11888 /* If the replacement _DECL isn't initialized, don't do the
11893 return build_call_expr_loc (loc
, fn
, 1, s1
);
11899 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11900 to the call. IGNORE is true if the value returned
11901 by the builtin will be ignored. UNLOCKED is true is true if this
11902 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11903 the known length of the string. Return NULL_TREE if no simplification
11907 fold_builtin_fputs (location_t loc
, tree arg0
, tree arg1
,
11908 bool ignore
, bool unlocked
, tree len
)
11910 /* If we're using an unlocked function, assume the other unlocked
11911 functions exist explicitly. */
11912 tree
const fn_fputc
= (unlocked
11913 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
11914 : builtin_decl_implicit (BUILT_IN_FPUTC
));
11915 tree
const fn_fwrite
= (unlocked
11916 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
11917 : builtin_decl_implicit (BUILT_IN_FWRITE
));
11919 /* If the return value is used, don't do the transformation. */
11923 /* Verify the arguments in the original call. */
11924 if (!validate_arg (arg0
, POINTER_TYPE
)
11925 || !validate_arg (arg1
, POINTER_TYPE
))
11929 len
= c_strlen (arg0
, 0);
11931 /* Get the length of the string passed to fputs. If the length
11932 can't be determined, punt. */
11934 || TREE_CODE (len
) != INTEGER_CST
)
11937 switch (compare_tree_int (len
, 1))
11939 case -1: /* length is 0, delete the call entirely . */
11940 return omit_one_operand_loc (loc
, integer_type_node
,
11941 integer_zero_node
, arg1
);;
11943 case 0: /* length is 1, call fputc. */
11945 const char *p
= c_getstr (arg0
);
11950 return build_call_expr_loc (loc
, fn_fputc
, 2,
11952 (integer_type_node
, p
[0]), arg1
);
11958 case 1: /* length is greater than 1, call fwrite. */
11960 /* If optimizing for size keep fputs. */
11961 if (optimize_function_for_size_p (cfun
))
11963 /* New argument list transforming fputs(string, stream) to
11964 fwrite(string, 1, len, stream). */
11966 return build_call_expr_loc (loc
, fn_fwrite
, 4, arg0
,
11967 size_one_node
, len
, arg1
);
11972 gcc_unreachable ();
11977 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11978 produced. False otherwise. This is done so that we don't output the error
11979 or warning twice or three times. */
11982 fold_builtin_next_arg (tree exp
, bool va_start_p
)
11984 tree fntype
= TREE_TYPE (current_function_decl
);
11985 int nargs
= call_expr_nargs (exp
);
11987 /* There is good chance the current input_location points inside the
11988 definition of the va_start macro (perhaps on the token for
11989 builtin) in a system header, so warnings will not be emitted.
11990 Use the location in real source code. */
11991 source_location current_location
=
11992 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
11995 if (!stdarg_p (fntype
))
11997 error ("%<va_start%> used in function with fixed args");
12003 if (va_start_p
&& (nargs
!= 2))
12005 error ("wrong number of arguments to function %<va_start%>");
12008 arg
= CALL_EXPR_ARG (exp
, 1);
12010 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12011 when we checked the arguments and if needed issued a warning. */
12016 /* Evidently an out of date version of <stdarg.h>; can't validate
12017 va_start's second argument, but can still work as intended. */
12018 warning_at (current_location
,
12020 "%<__builtin_next_arg%> called without an argument");
12023 else if (nargs
> 1)
12025 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12028 arg
= CALL_EXPR_ARG (exp
, 0);
12031 if (TREE_CODE (arg
) == SSA_NAME
)
12032 arg
= SSA_NAME_VAR (arg
);
12034 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12035 or __builtin_next_arg (0) the first time we see it, after checking
12036 the arguments and if needed issuing a warning. */
12037 if (!integer_zerop (arg
))
12039 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
12041 /* Strip off all nops for the sake of the comparison. This
12042 is not quite the same as STRIP_NOPS. It does more.
12043 We must also strip off INDIRECT_EXPR for C++ reference
12045 while (CONVERT_EXPR_P (arg
)
12046 || TREE_CODE (arg
) == INDIRECT_REF
)
12047 arg
= TREE_OPERAND (arg
, 0);
12048 if (arg
!= last_parm
)
12050 /* FIXME: Sometimes with the tree optimizers we can get the
12051 not the last argument even though the user used the last
12052 argument. We just warn and set the arg to be the last
12053 argument so that we will get wrong-code because of
12055 warning_at (current_location
,
12057 "second parameter of %<va_start%> not last named argument");
12060 /* Undefined by C99 7.15.1.4p4 (va_start):
12061 "If the parameter parmN is declared with the register storage
12062 class, with a function or array type, or with a type that is
12063 not compatible with the type that results after application of
12064 the default argument promotions, the behavior is undefined."
12066 else if (DECL_REGISTER (arg
))
12068 warning_at (current_location
,
12070 "undefined behaviour when second parameter of "
12071 "%<va_start%> is declared with %<register%> storage");
12074 /* We want to verify the second parameter just once before the tree
12075 optimizers are run and then avoid keeping it in the tree,
12076 as otherwise we could warn even for correct code like:
12077 void foo (int i, ...)
12078 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12080 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
12082 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
12088 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12089 ORIG may be null if this is a 2-argument call. We don't attempt to
12090 simplify calls with more than 3 arguments.
12092 Return NULL_TREE if no simplification was possible, otherwise return the
12093 simplified form of the call as a tree. If IGNORED is true, it means that
12094 the caller does not use the returned value of the function. */
12097 fold_builtin_sprintf (location_t loc
, tree dest
, tree fmt
,
12098 tree orig
, int ignored
)
12101 const char *fmt_str
= NULL
;
12103 /* Verify the required arguments in the original call. We deal with two
12104 types of sprintf() calls: 'sprintf (str, fmt)' and
12105 'sprintf (dest, "%s", orig)'. */
12106 if (!validate_arg (dest
, POINTER_TYPE
)
12107 || !validate_arg (fmt
, POINTER_TYPE
))
12109 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
12112 /* Check whether the format is a literal string constant. */
12113 fmt_str
= c_getstr (fmt
);
12114 if (fmt_str
== NULL
)
12118 retval
= NULL_TREE
;
12120 if (!init_target_chars ())
12123 /* If the format doesn't contain % args or %%, use strcpy. */
12124 if (strchr (fmt_str
, target_percent
) == NULL
)
12126 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12131 /* Don't optimize sprintf (buf, "abc", ptr++). */
12135 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12136 'format' is known to contain no % formats. */
12137 call
= build_call_expr_loc (loc
, fn
, 2, dest
, fmt
);
12139 retval
= build_int_cst (integer_type_node
, strlen (fmt_str
));
12142 /* If the format is "%s", use strcpy if the result isn't used. */
12143 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
12146 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12151 /* Don't crash on sprintf (str1, "%s"). */
12155 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12158 retval
= c_strlen (orig
, 1);
12159 if (!retval
|| TREE_CODE (retval
) != INTEGER_CST
)
12162 call
= build_call_expr_loc (loc
, fn
, 2, dest
, orig
);
12165 if (call
&& retval
)
12167 retval
= fold_convert_loc
12168 (loc
, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF
))),
12170 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
12176 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12177 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12178 attempt to simplify calls with more than 4 arguments.
12180 Return NULL_TREE if no simplification was possible, otherwise return the
12181 simplified form of the call as a tree. If IGNORED is true, it means that
12182 the caller does not use the returned value of the function. */
12185 fold_builtin_snprintf (location_t loc
, tree dest
, tree destsize
, tree fmt
,
12186 tree orig
, int ignored
)
12189 const char *fmt_str
= NULL
;
12190 unsigned HOST_WIDE_INT destlen
;
12192 /* Verify the required arguments in the original call. We deal with two
12193 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12194 'snprintf (dest, cst, "%s", orig)'. */
12195 if (!validate_arg (dest
, POINTER_TYPE
)
12196 || !validate_arg (destsize
, INTEGER_TYPE
)
12197 || !validate_arg (fmt
, POINTER_TYPE
))
12199 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
12202 if (!host_integerp (destsize
, 1))
12205 /* Check whether the format is a literal string constant. */
12206 fmt_str
= c_getstr (fmt
);
12207 if (fmt_str
== NULL
)
12211 retval
= NULL_TREE
;
12213 if (!init_target_chars ())
12216 destlen
= tree_low_cst (destsize
, 1);
12218 /* If the format doesn't contain % args or %%, use strcpy. */
12219 if (strchr (fmt_str
, target_percent
) == NULL
)
12221 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12222 size_t len
= strlen (fmt_str
);
12224 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12228 /* We could expand this as
12229 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12231 memcpy (str, fmt_with_nul_at_cstm1, cst);
12232 but in the former case that might increase code size
12233 and in the latter case grow .rodata section too much.
12234 So punt for now. */
12235 if (len
>= destlen
)
12241 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12242 'format' is known to contain no % formats and
12243 strlen (fmt) < cst. */
12244 call
= build_call_expr_loc (loc
, fn
, 2, dest
, fmt
);
12247 retval
= build_int_cst (integer_type_node
, strlen (fmt_str
));
12250 /* If the format is "%s", use strcpy if the result isn't used. */
12251 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
12253 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
12254 unsigned HOST_WIDE_INT origlen
;
12256 /* Don't crash on snprintf (str1, cst, "%s"). */
12260 retval
= c_strlen (orig
, 1);
12261 if (!retval
|| !host_integerp (retval
, 1))
12264 origlen
= tree_low_cst (retval
, 1);
12265 /* We could expand this as
12266 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12268 memcpy (str1, str2_with_nul_at_cstm1, cst);
12269 but in the former case that might increase code size
12270 and in the latter case grow .rodata section too much.
12271 So punt for now. */
12272 if (origlen
>= destlen
)
12275 /* Convert snprintf (str1, cst, "%s", str2) into
12276 strcpy (str1, str2) if strlen (str2) < cst. */
12280 call
= build_call_expr_loc (loc
, fn
, 2, dest
, orig
);
12283 retval
= NULL_TREE
;
12286 if (call
&& retval
)
12288 tree fn
= builtin_decl_explicit (BUILT_IN_SNPRINTF
);
12289 retval
= fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fn
)), retval
);
12290 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
12296 /* Expand a call EXP to __builtin_object_size. */
12299 expand_builtin_object_size (tree exp
)
12302 int object_size_type
;
12303 tree fndecl
= get_callee_fndecl (exp
);
12305 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
12307 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12309 expand_builtin_trap ();
12313 ost
= CALL_EXPR_ARG (exp
, 1);
12316 if (TREE_CODE (ost
) != INTEGER_CST
12317 || tree_int_cst_sgn (ost
) < 0
12318 || compare_tree_int (ost
, 3) > 0)
12320 error ("%Klast argument of %D is not integer constant between 0 and 3",
12322 expand_builtin_trap ();
12326 object_size_type
= tree_low_cst (ost
, 0);
12328 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
12331 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12332 FCODE is the BUILT_IN_* to use.
12333 Return NULL_RTX if we failed; the caller should emit a normal call,
12334 otherwise try to get the result in TARGET, if convenient (and in
12335 mode MODE if that's convenient). */
12338 expand_builtin_memory_chk (tree exp
, rtx target
, enum machine_mode mode
,
12339 enum built_in_function fcode
)
12341 tree dest
, src
, len
, size
;
12343 if (!validate_arglist (exp
,
12345 fcode
== BUILT_IN_MEMSET_CHK
12346 ? INTEGER_TYPE
: POINTER_TYPE
,
12347 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
12350 dest
= CALL_EXPR_ARG (exp
, 0);
12351 src
= CALL_EXPR_ARG (exp
, 1);
12352 len
= CALL_EXPR_ARG (exp
, 2);
12353 size
= CALL_EXPR_ARG (exp
, 3);
12355 if (! host_integerp (size
, 1))
12358 if (host_integerp (len
, 1) || integer_all_onesp (size
))
12362 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
12364 warning_at (tree_nonartificial_location (exp
),
12365 0, "%Kcall to %D will always overflow destination buffer",
12366 exp
, get_callee_fndecl (exp
));
12371 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12372 mem{cpy,pcpy,move,set} is available. */
12375 case BUILT_IN_MEMCPY_CHK
:
12376 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
12378 case BUILT_IN_MEMPCPY_CHK
:
12379 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
12381 case BUILT_IN_MEMMOVE_CHK
:
12382 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
12384 case BUILT_IN_MEMSET_CHK
:
12385 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
12394 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
12395 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
12396 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
12397 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
12399 else if (fcode
== BUILT_IN_MEMSET_CHK
)
12403 unsigned int dest_align
= get_pointer_alignment (dest
);
12405 /* If DEST is not a pointer type, call the normal function. */
12406 if (dest_align
== 0)
12409 /* If SRC and DEST are the same (and not volatile), do nothing. */
12410 if (operand_equal_p (src
, dest
, 0))
12414 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
12416 /* Evaluate and ignore LEN in case it has side-effects. */
12417 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
12418 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
12421 expr
= fold_build_pointer_plus (dest
, len
);
12422 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
12425 /* __memmove_chk special case. */
12426 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
12428 unsigned int src_align
= get_pointer_alignment (src
);
12430 if (src_align
== 0)
12433 /* If src is categorized for a readonly section we can use
12434 normal __memcpy_chk. */
12435 if (readonly_data_expr (src
))
12437 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
12440 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
12441 dest
, src
, len
, size
);
12442 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
12443 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
12444 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
12451 /* Emit warning if a buffer overflow is detected at compile time. */
12454 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
12458 location_t loc
= tree_nonartificial_location (exp
);
12462 case BUILT_IN_STRCPY_CHK
:
12463 case BUILT_IN_STPCPY_CHK
:
12464 /* For __strcat_chk the warning will be emitted only if overflowing
12465 by at least strlen (dest) + 1 bytes. */
12466 case BUILT_IN_STRCAT_CHK
:
12467 len
= CALL_EXPR_ARG (exp
, 1);
12468 size
= CALL_EXPR_ARG (exp
, 2);
12471 case BUILT_IN_STRNCAT_CHK
:
12472 case BUILT_IN_STRNCPY_CHK
:
12473 case BUILT_IN_STPNCPY_CHK
:
12474 len
= CALL_EXPR_ARG (exp
, 2);
12475 size
= CALL_EXPR_ARG (exp
, 3);
12477 case BUILT_IN_SNPRINTF_CHK
:
12478 case BUILT_IN_VSNPRINTF_CHK
:
12479 len
= CALL_EXPR_ARG (exp
, 1);
12480 size
= CALL_EXPR_ARG (exp
, 3);
12483 gcc_unreachable ();
12489 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
12494 len
= c_strlen (len
, 1);
12495 if (! len
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
12498 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
12500 tree src
= CALL_EXPR_ARG (exp
, 1);
12501 if (! src
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
12503 src
= c_strlen (src
, 1);
12504 if (! src
|| ! host_integerp (src
, 1))
12506 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
12507 exp
, get_callee_fndecl (exp
));
12510 else if (tree_int_cst_lt (src
, size
))
12513 else if (! host_integerp (len
, 1) || ! tree_int_cst_lt (size
, len
))
12516 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
12517 exp
, get_callee_fndecl (exp
));
12520 /* Emit warning if a buffer overflow is detected at compile time
12521 in __sprintf_chk/__vsprintf_chk calls. */
12524 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
12526 tree size
, len
, fmt
;
12527 const char *fmt_str
;
12528 int nargs
= call_expr_nargs (exp
);
12530 /* Verify the required arguments in the original call. */
12534 size
= CALL_EXPR_ARG (exp
, 2);
12535 fmt
= CALL_EXPR_ARG (exp
, 3);
12537 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
12540 /* Check whether the format is a literal string constant. */
12541 fmt_str
= c_getstr (fmt
);
12542 if (fmt_str
== NULL
)
12545 if (!init_target_chars ())
12548 /* If the format doesn't contain % args or %%, we know its size. */
12549 if (strchr (fmt_str
, target_percent
) == 0)
12550 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
12551 /* If the format is "%s" and first ... argument is a string literal,
12553 else if (fcode
== BUILT_IN_SPRINTF_CHK
12554 && strcmp (fmt_str
, target_percent_s
) == 0)
12560 arg
= CALL_EXPR_ARG (exp
, 4);
12561 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
12564 len
= c_strlen (arg
, 1);
12565 if (!len
|| ! host_integerp (len
, 1))
12571 if (! tree_int_cst_lt (len
, size
))
12572 warning_at (tree_nonartificial_location (exp
),
12573 0, "%Kcall to %D will always overflow destination buffer",
12574 exp
, get_callee_fndecl (exp
));
12577 /* Emit warning if a free is called with address of a variable. */
12580 maybe_emit_free_warning (tree exp
)
12582 tree arg
= CALL_EXPR_ARG (exp
, 0);
12585 if (TREE_CODE (arg
) != ADDR_EXPR
)
12588 arg
= get_base_address (TREE_OPERAND (arg
, 0));
12589 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
12592 if (SSA_VAR_P (arg
))
12593 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
12594 "%Kattempt to free a non-heap object %qD", exp
, arg
);
12596 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
12597 "%Kattempt to free a non-heap object", exp
);
12600 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12604 fold_builtin_object_size (tree ptr
, tree ost
)
12606 unsigned HOST_WIDE_INT bytes
;
12607 int object_size_type
;
12609 if (!validate_arg (ptr
, POINTER_TYPE
)
12610 || !validate_arg (ost
, INTEGER_TYPE
))
12615 if (TREE_CODE (ost
) != INTEGER_CST
12616 || tree_int_cst_sgn (ost
) < 0
12617 || compare_tree_int (ost
, 3) > 0)
12620 object_size_type
= tree_low_cst (ost
, 0);
12622 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12623 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12624 and (size_t) 0 for types 2 and 3. */
12625 if (TREE_SIDE_EFFECTS (ptr
))
12626 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
12628 if (TREE_CODE (ptr
) == ADDR_EXPR
)
12630 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
12631 if (double_int_fits_to_tree_p (size_type_node
,
12632 double_int::from_uhwi (bytes
)))
12633 return build_int_cstu (size_type_node
, bytes
);
12635 else if (TREE_CODE (ptr
) == SSA_NAME
)
12637 /* If object size is not known yet, delay folding until
12638 later. Maybe subsequent passes will help determining
12640 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
12641 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2 ? -1 : 0)
12642 && double_int_fits_to_tree_p (size_type_node
,
12643 double_int::from_uhwi (bytes
)))
12644 return build_int_cstu (size_type_node
, bytes
);
12650 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12651 DEST, SRC, LEN, and SIZE are the arguments to the call.
12652 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12653 code of the builtin. If MAXLEN is not NULL, it is maximum length
12654 passed as third argument. */
12657 fold_builtin_memory_chk (location_t loc
, tree fndecl
,
12658 tree dest
, tree src
, tree len
, tree size
,
12659 tree maxlen
, bool ignore
,
12660 enum built_in_function fcode
)
12664 if (!validate_arg (dest
, POINTER_TYPE
)
12665 || !validate_arg (src
,
12666 (fcode
== BUILT_IN_MEMSET_CHK
12667 ? INTEGER_TYPE
: POINTER_TYPE
))
12668 || !validate_arg (len
, INTEGER_TYPE
)
12669 || !validate_arg (size
, INTEGER_TYPE
))
12672 /* If SRC and DEST are the same (and not volatile), return DEST
12673 (resp. DEST+LEN for __mempcpy_chk). */
12674 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
12676 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
12677 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
12681 tree temp
= fold_build_pointer_plus_loc (loc
, dest
, len
);
12682 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), temp
);
12686 if (! host_integerp (size
, 1))
12689 if (! integer_all_onesp (size
))
12691 if (! host_integerp (len
, 1))
12693 /* If LEN is not constant, try MAXLEN too.
12694 For MAXLEN only allow optimizing into non-_ocs function
12695 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12696 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12698 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
12700 /* (void) __mempcpy_chk () can be optimized into
12701 (void) __memcpy_chk (). */
12702 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
12706 return build_call_expr_loc (loc
, fn
, 4, dest
, src
, len
, size
);
12714 if (tree_int_cst_lt (size
, maxlen
))
12719 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12720 mem{cpy,pcpy,move,set} is available. */
12723 case BUILT_IN_MEMCPY_CHK
:
12724 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
12726 case BUILT_IN_MEMPCPY_CHK
:
12727 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
12729 case BUILT_IN_MEMMOVE_CHK
:
12730 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
12732 case BUILT_IN_MEMSET_CHK
:
12733 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
12742 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
12745 /* Fold a call to the __st[rp]cpy_chk builtin.
12746 DEST, SRC, and SIZE are the arguments to the call.
12747 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12748 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12749 strings passed as second argument. */
12752 fold_builtin_stxcpy_chk (location_t loc
, tree fndecl
, tree dest
,
12753 tree src
, tree size
,
12754 tree maxlen
, bool ignore
,
12755 enum built_in_function fcode
)
12759 if (!validate_arg (dest
, POINTER_TYPE
)
12760 || !validate_arg (src
, POINTER_TYPE
)
12761 || !validate_arg (size
, INTEGER_TYPE
))
12764 /* If SRC and DEST are the same (and not volatile), return DEST. */
12765 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
12766 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
12768 if (! host_integerp (size
, 1))
12771 if (! integer_all_onesp (size
))
12773 len
= c_strlen (src
, 1);
12774 if (! len
|| ! host_integerp (len
, 1))
12776 /* If LEN is not constant, try MAXLEN too.
12777 For MAXLEN only allow optimizing into non-_ocs function
12778 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12779 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12781 if (fcode
== BUILT_IN_STPCPY_CHK
)
12786 /* If return value of __stpcpy_chk is ignored,
12787 optimize into __strcpy_chk. */
12788 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
12792 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, size
);
12795 if (! len
|| TREE_SIDE_EFFECTS (len
))
12798 /* If c_strlen returned something, but not a constant,
12799 transform __strcpy_chk into __memcpy_chk. */
12800 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
12804 len
= fold_convert_loc (loc
, size_type_node
, len
);
12805 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
12806 build_int_cst (size_type_node
, 1));
12807 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)),
12808 build_call_expr_loc (loc
, fn
, 4,
12809 dest
, src
, len
, size
));
12815 if (! tree_int_cst_lt (maxlen
, size
))
12819 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12820 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
12821 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
12825 return build_call_expr_loc (loc
, fn
, 2, dest
, src
);
12828 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12829 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12830 length passed as third argument. IGNORE is true if return value can be
12831 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12834 fold_builtin_stxncpy_chk (location_t loc
, tree dest
, tree src
,
12835 tree len
, tree size
, tree maxlen
, bool ignore
,
12836 enum built_in_function fcode
)
12840 if (!validate_arg (dest
, POINTER_TYPE
)
12841 || !validate_arg (src
, POINTER_TYPE
)
12842 || !validate_arg (len
, INTEGER_TYPE
)
12843 || !validate_arg (size
, INTEGER_TYPE
))
12846 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
12848 /* If return value of __stpncpy_chk is ignored,
12849 optimize into __strncpy_chk. */
12850 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
12852 return build_call_expr_loc (loc
, fn
, 4, dest
, src
, len
, size
);
12855 if (! host_integerp (size
, 1))
12858 if (! integer_all_onesp (size
))
12860 if (! host_integerp (len
, 1))
12862 /* If LEN is not constant, try MAXLEN too.
12863 For MAXLEN only allow optimizing into non-_ocs function
12864 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12865 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12871 if (tree_int_cst_lt (size
, maxlen
))
12875 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12876 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
12877 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
12881 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
12884 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12885 are the arguments to the call. */
12888 fold_builtin_strcat_chk (location_t loc
, tree fndecl
, tree dest
,
12889 tree src
, tree size
)
12894 if (!validate_arg (dest
, POINTER_TYPE
)
12895 || !validate_arg (src
, POINTER_TYPE
)
12896 || !validate_arg (size
, INTEGER_TYPE
))
12899 p
= c_getstr (src
);
12900 /* If the SRC parameter is "", return DEST. */
12901 if (p
&& *p
== '\0')
12902 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
12904 if (! host_integerp (size
, 1) || ! integer_all_onesp (size
))
12907 /* If __builtin_strcat_chk is used, assume strcat is available. */
12908 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
12912 return build_call_expr_loc (loc
, fn
, 2, dest
, src
);
12915 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12919 fold_builtin_strncat_chk (location_t loc
, tree fndecl
,
12920 tree dest
, tree src
, tree len
, tree size
)
12925 if (!validate_arg (dest
, POINTER_TYPE
)
12926 || !validate_arg (src
, POINTER_TYPE
)
12927 || !validate_arg (size
, INTEGER_TYPE
)
12928 || !validate_arg (size
, INTEGER_TYPE
))
12931 p
= c_getstr (src
);
12932 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12933 if (p
&& *p
== '\0')
12934 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
12935 else if (integer_zerop (len
))
12936 return omit_one_operand_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
12938 if (! host_integerp (size
, 1))
12941 if (! integer_all_onesp (size
))
12943 tree src_len
= c_strlen (src
, 1);
12945 && host_integerp (src_len
, 1)
12946 && host_integerp (len
, 1)
12947 && ! tree_int_cst_lt (len
, src_len
))
12949 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12950 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
12954 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, size
);
12959 /* If __builtin_strncat_chk is used, assume strncat is available. */
12960 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
12964 return build_call_expr_loc (loc
, fn
, 3, dest
, src
, len
);
12967 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12968 Return NULL_TREE if a normal call should be emitted rather than
12969 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12970 or BUILT_IN_VSPRINTF_CHK. */
12973 fold_builtin_sprintf_chk_1 (location_t loc
, int nargs
, tree
*args
,
12974 enum built_in_function fcode
)
12976 tree dest
, size
, len
, fn
, fmt
, flag
;
12977 const char *fmt_str
;
12979 /* Verify the required arguments in the original call. */
12983 if (!validate_arg (dest
, POINTER_TYPE
))
12986 if (!validate_arg (flag
, INTEGER_TYPE
))
12989 if (!validate_arg (size
, INTEGER_TYPE
))
12992 if (!validate_arg (fmt
, POINTER_TYPE
))
12995 if (! host_integerp (size
, 1))
13000 if (!init_target_chars ())
13003 /* Check whether the format is a literal string constant. */
13004 fmt_str
= c_getstr (fmt
);
13005 if (fmt_str
!= NULL
)
13007 /* If the format doesn't contain % args or %%, we know the size. */
13008 if (strchr (fmt_str
, target_percent
) == 0)
13010 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
13011 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
13013 /* If the format is "%s" and first ... argument is a string literal,
13014 we know the size too. */
13015 else if (fcode
== BUILT_IN_SPRINTF_CHK
13016 && strcmp (fmt_str
, target_percent_s
) == 0)
13023 if (validate_arg (arg
, POINTER_TYPE
))
13025 len
= c_strlen (arg
, 1);
13026 if (! len
|| ! host_integerp (len
, 1))
13033 if (! integer_all_onesp (size
))
13035 if (! len
|| ! tree_int_cst_lt (len
, size
))
13039 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13040 or if format doesn't contain % chars or is "%s". */
13041 if (! integer_zerop (flag
))
13043 if (fmt_str
== NULL
)
13045 if (strchr (fmt_str
, target_percent
) != NULL
13046 && strcmp (fmt_str
, target_percent_s
))
13050 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13051 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
13052 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
13056 return rewrite_call_expr_array (loc
, nargs
, args
, 4, fn
, 2, dest
, fmt
);
13059 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13060 a normal call should be emitted rather than expanding the function
13061 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13064 fold_builtin_sprintf_chk (location_t loc
, tree exp
,
13065 enum built_in_function fcode
)
13067 return fold_builtin_sprintf_chk_1 (loc
, call_expr_nargs (exp
),
13068 CALL_EXPR_ARGP (exp
), fcode
);
13071 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13072 NULL_TREE if a normal call should be emitted rather than expanding
13073 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13074 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13075 passed as second argument. */
13078 fold_builtin_snprintf_chk_1 (location_t loc
, int nargs
, tree
*args
,
13079 tree maxlen
, enum built_in_function fcode
)
13081 tree dest
, size
, len
, fn
, fmt
, flag
;
13082 const char *fmt_str
;
13084 /* Verify the required arguments in the original call. */
13088 if (!validate_arg (dest
, POINTER_TYPE
))
13091 if (!validate_arg (len
, INTEGER_TYPE
))
13094 if (!validate_arg (flag
, INTEGER_TYPE
))
13097 if (!validate_arg (size
, INTEGER_TYPE
))
13100 if (!validate_arg (fmt
, POINTER_TYPE
))
13103 if (! host_integerp (size
, 1))
13106 if (! integer_all_onesp (size
))
13108 if (! host_integerp (len
, 1))
13110 /* If LEN is not constant, try MAXLEN too.
13111 For MAXLEN only allow optimizing into non-_ocs function
13112 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13113 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
13119 if (tree_int_cst_lt (size
, maxlen
))
13123 if (!init_target_chars ())
13126 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13127 or if format doesn't contain % chars or is "%s". */
13128 if (! integer_zerop (flag
))
13130 fmt_str
= c_getstr (fmt
);
13131 if (fmt_str
== NULL
)
13133 if (strchr (fmt_str
, target_percent
) != NULL
13134 && strcmp (fmt_str
, target_percent_s
))
13138 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13140 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
13141 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
13145 return rewrite_call_expr_array (loc
, nargs
, args
, 5, fn
, 3, dest
, len
, fmt
);
13148 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13149 a normal call should be emitted rather than expanding the function
13150 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13151 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13152 passed as second argument. */
13155 fold_builtin_snprintf_chk (location_t loc
, tree exp
, tree maxlen
,
13156 enum built_in_function fcode
)
13158 return fold_builtin_snprintf_chk_1 (loc
, call_expr_nargs (exp
),
13159 CALL_EXPR_ARGP (exp
), maxlen
, fcode
);
13162 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13163 FMT and ARG are the arguments to the call; we don't fold cases with
13164 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13166 Return NULL_TREE if no simplification was possible, otherwise return the
13167 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13168 code of the function to be simplified. */
13171 fold_builtin_printf (location_t loc
, tree fndecl
, tree fmt
,
13172 tree arg
, bool ignore
,
13173 enum built_in_function fcode
)
13175 tree fn_putchar
, fn_puts
, newarg
, call
= NULL_TREE
;
13176 const char *fmt_str
= NULL
;
13178 /* If the return value is used, don't do the transformation. */
13182 /* Verify the required arguments in the original call. */
13183 if (!validate_arg (fmt
, POINTER_TYPE
))
13186 /* Check whether the format is a literal string constant. */
13187 fmt_str
= c_getstr (fmt
);
13188 if (fmt_str
== NULL
)
13191 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
13193 /* If we're using an unlocked function, assume the other
13194 unlocked functions exist explicitly. */
13195 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
13196 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
13200 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
13201 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
13204 if (!init_target_chars ())
13207 if (strcmp (fmt_str
, target_percent_s
) == 0
13208 || strchr (fmt_str
, target_percent
) == NULL
)
13212 if (strcmp (fmt_str
, target_percent_s
) == 0)
13214 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
13217 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
13220 str
= c_getstr (arg
);
13226 /* The format specifier doesn't contain any '%' characters. */
13227 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
13233 /* If the string was "", printf does nothing. */
13234 if (str
[0] == '\0')
13235 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
13237 /* If the string has length of 1, call putchar. */
13238 if (str
[1] == '\0')
13240 /* Given printf("c"), (where c is any one character,)
13241 convert "c"[0] to an int and pass that to the replacement
13243 newarg
= build_int_cst (integer_type_node
, str
[0]);
13245 call
= build_call_expr_loc (loc
, fn_putchar
, 1, newarg
);
13249 /* If the string was "string\n", call puts("string"). */
13250 size_t len
= strlen (str
);
13251 if ((unsigned char)str
[len
- 1] == target_newline
13252 && (size_t) (int) len
== len
13256 tree offset_node
, string_cst
;
13258 /* Create a NUL-terminated string that's one char shorter
13259 than the original, stripping off the trailing '\n'. */
13260 newarg
= build_string_literal (len
, str
);
13261 string_cst
= string_constant (newarg
, &offset_node
);
13262 gcc_checking_assert (string_cst
13263 && (TREE_STRING_LENGTH (string_cst
)
13265 && integer_zerop (offset_node
)
13267 TREE_STRING_POINTER (string_cst
)[len
- 1]
13268 == target_newline
);
13269 /* build_string_literal creates a new STRING_CST,
13270 modify it in place to avoid double copying. */
13271 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
13272 newstr
[len
- 1] = '\0';
13274 call
= build_call_expr_loc (loc
, fn_puts
, 1, newarg
);
13277 /* We'd like to arrange to call fputs(string,stdout) here,
13278 but we need stdout and don't have a way to get it yet. */
13283 /* The other optimizations can be done only on the non-va_list variants. */
13284 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
13287 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13288 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
13290 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
13293 call
= build_call_expr_loc (loc
, fn_puts
, 1, arg
);
13296 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13297 else if (strcmp (fmt_str
, target_percent_c
) == 0)
13299 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
13302 call
= build_call_expr_loc (loc
, fn_putchar
, 1, arg
);
13308 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), call
);
13311 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13312 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13313 more than 3 arguments, and ARG may be null in the 2-argument case.
13315 Return NULL_TREE if no simplification was possible, otherwise return the
13316 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13317 code of the function to be simplified. */
13320 fold_builtin_fprintf (location_t loc
, tree fndecl
, tree fp
,
13321 tree fmt
, tree arg
, bool ignore
,
13322 enum built_in_function fcode
)
13324 tree fn_fputc
, fn_fputs
, call
= NULL_TREE
;
13325 const char *fmt_str
= NULL
;
13327 /* If the return value is used, don't do the transformation. */
13331 /* Verify the required arguments in the original call. */
13332 if (!validate_arg (fp
, POINTER_TYPE
))
13334 if (!validate_arg (fmt
, POINTER_TYPE
))
13337 /* Check whether the format is a literal string constant. */
13338 fmt_str
= c_getstr (fmt
);
13339 if (fmt_str
== NULL
)
13342 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
13344 /* If we're using an unlocked function, assume the other
13345 unlocked functions exist explicitly. */
13346 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
13347 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
13351 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
13352 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
13355 if (!init_target_chars ())
13358 /* If the format doesn't contain % args or %%, use strcpy. */
13359 if (strchr (fmt_str
, target_percent
) == NULL
)
13361 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
13365 /* If the format specifier was "", fprintf does nothing. */
13366 if (fmt_str
[0] == '\0')
13368 /* If FP has side-effects, just wait until gimplification is
13370 if (TREE_SIDE_EFFECTS (fp
))
13373 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
13376 /* When "string" doesn't contain %, replace all cases of
13377 fprintf (fp, string) with fputs (string, fp). The fputs
13378 builtin will take care of special cases like length == 1. */
13380 call
= build_call_expr_loc (loc
, fn_fputs
, 2, fmt
, fp
);
13383 /* The other optimizations can be done only on the non-va_list variants. */
13384 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
13387 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13388 else if (strcmp (fmt_str
, target_percent_s
) == 0)
13390 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
13393 call
= build_call_expr_loc (loc
, fn_fputs
, 2, arg
, fp
);
13396 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13397 else if (strcmp (fmt_str
, target_percent_c
) == 0)
13399 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
13402 call
= build_call_expr_loc (loc
, fn_fputc
, 2, arg
, fp
);
13407 return fold_convert_loc (loc
, TREE_TYPE (TREE_TYPE (fndecl
)), call
);
13410 /* Initialize format string characters in the target charset. */
13413 init_target_chars (void)
13418 target_newline
= lang_hooks
.to_target_charset ('\n');
13419 target_percent
= lang_hooks
.to_target_charset ('%');
13420 target_c
= lang_hooks
.to_target_charset ('c');
13421 target_s
= lang_hooks
.to_target_charset ('s');
13422 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
13426 target_percent_c
[0] = target_percent
;
13427 target_percent_c
[1] = target_c
;
13428 target_percent_c
[2] = '\0';
13430 target_percent_s
[0] = target_percent
;
13431 target_percent_s
[1] = target_s
;
13432 target_percent_s
[2] = '\0';
13434 target_percent_s_newline
[0] = target_percent
;
13435 target_percent_s_newline
[1] = target_s
;
13436 target_percent_s_newline
[2] = target_newline
;
13437 target_percent_s_newline
[3] = '\0';
13444 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13445 and no overflow/underflow occurred. INEXACT is true if M was not
13446 exactly calculated. TYPE is the tree type for the result. This
13447 function assumes that you cleared the MPFR flags and then
13448 calculated M to see if anything subsequently set a flag prior to
13449 entering this function. Return NULL_TREE if any checks fail. */
13452 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
13454 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13455 overflow/underflow occurred. If -frounding-math, proceed iff the
13456 result of calling FUNC was exact. */
13457 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13458 && (!flag_rounding_math
|| !inexact
))
13460 REAL_VALUE_TYPE rr
;
13462 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
13463 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13464 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13465 but the mpft_t is not, then we underflowed in the
13467 if (real_isfinite (&rr
)
13468 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
13470 REAL_VALUE_TYPE rmode
;
13472 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
13473 /* Proceed iff the specified mode can hold the value. */
13474 if (real_identical (&rmode
, &rr
))
13475 return build_real (type
, rmode
);
13481 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13482 number and no overflow/underflow occurred. INEXACT is true if M
13483 was not exactly calculated. TYPE is the tree type for the result.
13484 This function assumes that you cleared the MPFR flags and then
13485 calculated M to see if anything subsequently set a flag prior to
13486 entering this function. Return NULL_TREE if any checks fail, if
13487 FORCE_CONVERT is true, then bypass the checks. */
13490 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
13492 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13493 overflow/underflow occurred. If -frounding-math, proceed iff the
13494 result of calling FUNC was exact. */
13496 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
13497 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13498 && (!flag_rounding_math
|| !inexact
)))
13500 REAL_VALUE_TYPE re
, im
;
13502 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
13503 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
13504 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13505 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13506 but the mpft_t is not, then we underflowed in the
13509 || (real_isfinite (&re
) && real_isfinite (&im
)
13510 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
13511 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
13513 REAL_VALUE_TYPE re_mode
, im_mode
;
13515 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
13516 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
13517 /* Proceed iff the specified mode can hold the value. */
13519 || (real_identical (&re_mode
, &re
)
13520 && real_identical (&im_mode
, &im
)))
13521 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
13522 build_real (TREE_TYPE (type
), im_mode
));
13528 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13529 FUNC on it and return the resulting value as a tree with type TYPE.
13530 If MIN and/or MAX are not NULL, then the supplied ARG must be
13531 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13532 acceptable values, otherwise they are not. The mpfr precision is
13533 set to the precision of TYPE. We assume that function FUNC returns
13534 zero if the result could be calculated exactly within the requested
13538 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
13539 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
13542 tree result
= NULL_TREE
;
13546 /* To proceed, MPFR must exactly represent the target floating point
13547 format, which only happens when the target base equals two. */
13548 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13549 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
13551 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
13553 if (real_isfinite (ra
)
13554 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
13555 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
13557 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13558 const int prec
= fmt
->p
;
13559 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13563 mpfr_init2 (m
, prec
);
13564 mpfr_from_real (m
, ra
, GMP_RNDN
);
13565 mpfr_clear_flags ();
13566 inexact
= func (m
, m
, rnd
);
13567 result
= do_mpfr_ckconv (m
, type
, inexact
);
13575 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13576 FUNC on it and return the resulting value as a tree with type TYPE.
13577 The mpfr precision is set to the precision of TYPE. We assume that
13578 function FUNC returns zero if the result could be calculated
13579 exactly within the requested precision. */
13582 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
13583 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
13585 tree result
= NULL_TREE
;
13590 /* To proceed, MPFR must exactly represent the target floating point
13591 format, which only happens when the target base equals two. */
13592 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13593 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
13594 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
13596 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
13597 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
13599 if (real_isfinite (ra1
) && real_isfinite (ra2
))
13601 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13602 const int prec
= fmt
->p
;
13603 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13607 mpfr_inits2 (prec
, m1
, m2
, NULL
);
13608 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13609 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
13610 mpfr_clear_flags ();
13611 inexact
= func (m1
, m1
, m2
, rnd
);
13612 result
= do_mpfr_ckconv (m1
, type
, inexact
);
13613 mpfr_clears (m1
, m2
, NULL
);
13620 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13621 FUNC on it and return the resulting value as a tree with type TYPE.
13622 The mpfr precision is set to the precision of TYPE. We assume that
13623 function FUNC returns zero if the result could be calculated
13624 exactly within the requested precision. */
13627 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
13628 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
13630 tree result
= NULL_TREE
;
13636 /* To proceed, MPFR must exactly represent the target floating point
13637 format, which only happens when the target base equals two. */
13638 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13639 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
13640 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
13641 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
13643 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
13644 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
13645 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
13647 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
13649 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13650 const int prec
= fmt
->p
;
13651 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13655 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
13656 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13657 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
13658 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
13659 mpfr_clear_flags ();
13660 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
13661 result
= do_mpfr_ckconv (m1
, type
, inexact
);
13662 mpfr_clears (m1
, m2
, m3
, NULL
);
13669 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13670 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13671 If ARG_SINP and ARG_COSP are NULL then the result is returned
13672 as a complex value.
13673 The type is taken from the type of ARG and is used for setting the
13674 precision of the calculation and results. */
13677 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
13679 tree
const type
= TREE_TYPE (arg
);
13680 tree result
= NULL_TREE
;
13684 /* To proceed, MPFR must exactly represent the target floating point
13685 format, which only happens when the target base equals two. */
13686 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13687 && TREE_CODE (arg
) == REAL_CST
13688 && !TREE_OVERFLOW (arg
))
13690 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
13692 if (real_isfinite (ra
))
13694 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13695 const int prec
= fmt
->p
;
13696 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13697 tree result_s
, result_c
;
13701 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
13702 mpfr_from_real (m
, ra
, GMP_RNDN
);
13703 mpfr_clear_flags ();
13704 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
13705 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
13706 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
13707 mpfr_clears (m
, ms
, mc
, NULL
);
13708 if (result_s
&& result_c
)
13710 /* If we are to return in a complex value do so. */
13711 if (!arg_sinp
&& !arg_cosp
)
13712 return build_complex (build_complex_type (type
),
13713 result_c
, result_s
);
13715 /* Dereference the sin/cos pointer arguments. */
13716 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
13717 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
13718 /* Proceed if valid pointer type were passed in. */
13719 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
13720 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
13722 /* Set the values. */
13723 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
13725 TREE_SIDE_EFFECTS (result_s
) = 1;
13726 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
13728 TREE_SIDE_EFFECTS (result_c
) = 1;
13729 /* Combine the assignments into a compound expr. */
13730 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13731 result_s
, result_c
));
13739 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13740 two-argument mpfr order N Bessel function FUNC on them and return
13741 the resulting value as a tree with type TYPE. The mpfr precision
13742 is set to the precision of TYPE. We assume that function FUNC
13743 returns zero if the result could be calculated exactly within the
13744 requested precision. */
13746 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
13747 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
13748 const REAL_VALUE_TYPE
*min
, bool inclusive
)
13750 tree result
= NULL_TREE
;
13755 /* To proceed, MPFR must exactly represent the target floating point
13756 format, which only happens when the target base equals two. */
13757 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13758 && host_integerp (arg1
, 0)
13759 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
13761 const HOST_WIDE_INT n
= tree_low_cst (arg1
, 0);
13762 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
13765 && real_isfinite (ra
)
13766 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
13768 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13769 const int prec
= fmt
->p
;
13770 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13774 mpfr_init2 (m
, prec
);
13775 mpfr_from_real (m
, ra
, GMP_RNDN
);
13776 mpfr_clear_flags ();
13777 inexact
= func (m
, n
, m
, rnd
);
13778 result
= do_mpfr_ckconv (m
, type
, inexact
);
13786 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13787 the pointer *(ARG_QUO) and return the result. The type is taken
13788 from the type of ARG0 and is used for setting the precision of the
13789 calculation and results. */
13792 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
13794 tree
const type
= TREE_TYPE (arg0
);
13795 tree result
= NULL_TREE
;
13800 /* To proceed, MPFR must exactly represent the target floating point
13801 format, which only happens when the target base equals two. */
13802 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13803 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
13804 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
13806 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
13807 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
13809 if (real_isfinite (ra0
) && real_isfinite (ra1
))
13811 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13812 const int prec
= fmt
->p
;
13813 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13818 mpfr_inits2 (prec
, m0
, m1
, NULL
);
13819 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
13820 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
13821 mpfr_clear_flags ();
13822 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
13823 /* Remquo is independent of the rounding mode, so pass
13824 inexact=0 to do_mpfr_ckconv(). */
13825 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
13826 mpfr_clears (m0
, m1
, NULL
);
13829 /* MPFR calculates quo in the host's long so it may
13830 return more bits in quo than the target int can hold
13831 if sizeof(host long) > sizeof(target int). This can
13832 happen even for native compilers in LP64 mode. In
13833 these cases, modulo the quo value with the largest
13834 number that the target int can hold while leaving one
13835 bit for the sign. */
13836 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
13837 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
13839 /* Dereference the quo pointer argument. */
13840 arg_quo
= build_fold_indirect_ref (arg_quo
);
13841 /* Proceed iff a valid pointer type was passed in. */
13842 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
13844 /* Set the value. */
13846 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
13847 build_int_cst (TREE_TYPE (arg_quo
),
13849 TREE_SIDE_EFFECTS (result_quo
) = 1;
13850 /* Combine the quo assignment with the rem. */
13851 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13852 result_quo
, result_rem
));
13860 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13861 resulting value as a tree with type TYPE. The mpfr precision is
13862 set to the precision of TYPE. We assume that this mpfr function
13863 returns zero if the result could be calculated exactly within the
13864 requested precision. In addition, the integer pointer represented
13865 by ARG_SG will be dereferenced and set to the appropriate signgam
13869 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
13871 tree result
= NULL_TREE
;
13875 /* To proceed, MPFR must exactly represent the target floating point
13876 format, which only happens when the target base equals two. Also
13877 verify ARG is a constant and that ARG_SG is an int pointer. */
13878 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
13879 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
13880 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
13881 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
13883 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
13885 /* In addition to NaN and Inf, the argument cannot be zero or a
13886 negative integer. */
13887 if (real_isfinite (ra
)
13888 && ra
->cl
!= rvc_zero
13889 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
13891 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
13892 const int prec
= fmt
->p
;
13893 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13898 mpfr_init2 (m
, prec
);
13899 mpfr_from_real (m
, ra
, GMP_RNDN
);
13900 mpfr_clear_flags ();
13901 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
13902 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
13908 /* Dereference the arg_sg pointer argument. */
13909 arg_sg
= build_fold_indirect_ref (arg_sg
);
13910 /* Assign the signgam value into *arg_sg. */
13911 result_sg
= fold_build2 (MODIFY_EXPR
,
13912 TREE_TYPE (arg_sg
), arg_sg
,
13913 build_int_cst (TREE_TYPE (arg_sg
), sg
));
13914 TREE_SIDE_EFFECTS (result_sg
) = 1;
13915 /* Combine the signgam assignment with the lgamma result. */
13916 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13917 result_sg
, result_lg
));
13925 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13926 function FUNC on it and return the resulting value as a tree with
13927 type TYPE. The mpfr precision is set to the precision of TYPE. We
13928 assume that function FUNC returns zero if the result could be
13929 calculated exactly within the requested precision. */
13932 do_mpc_arg1 (tree arg
, tree type
, int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
))
13934 tree result
= NULL_TREE
;
13938 /* To proceed, MPFR must exactly represent the target floating point
13939 format, which only happens when the target base equals two. */
13940 if (TREE_CODE (arg
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg
)
13941 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
13942 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg
))))->b
== 2)
13944 const REAL_VALUE_TYPE
*const re
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
13945 const REAL_VALUE_TYPE
*const im
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
13947 if (real_isfinite (re
) && real_isfinite (im
))
13949 const struct real_format
*const fmt
=
13950 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
13951 const int prec
= fmt
->p
;
13952 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
13953 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
13957 mpc_init2 (m
, prec
);
13958 mpfr_from_real (mpc_realref (m
), re
, rnd
);
13959 mpfr_from_real (mpc_imagref (m
), im
, rnd
);
13960 mpfr_clear_flags ();
13961 inexact
= func (m
, m
, crnd
);
13962 result
= do_mpc_ckconv (m
, type
, inexact
, /*force_convert=*/ 0);
13970 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13971 mpc function FUNC on it and return the resulting value as a tree
13972 with type TYPE. The mpfr precision is set to the precision of
13973 TYPE. We assume that function FUNC returns zero if the result
13974 could be calculated exactly within the requested precision. If
13975 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13976 in the arguments and/or results. */
13979 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
13980 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
13982 tree result
= NULL_TREE
;
13987 /* To proceed, MPFR must exactly represent the target floating point
13988 format, which only happens when the target base equals two. */
13989 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
13990 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
13991 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
13992 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
13993 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
13995 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
13996 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
13997 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
13998 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
14001 || (real_isfinite (re0
) && real_isfinite (im0
)
14002 && real_isfinite (re1
) && real_isfinite (im1
)))
14004 const struct real_format
*const fmt
=
14005 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
14006 const int prec
= fmt
->p
;
14007 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
14008 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
14012 mpc_init2 (m0
, prec
);
14013 mpc_init2 (m1
, prec
);
14014 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
14015 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
14016 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
14017 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
14018 mpfr_clear_flags ();
14019 inexact
= func (m0
, m0
, m1
, crnd
);
14020 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
14029 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14030 a normal call should be emitted rather than expanding the function
14031 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14034 gimple_fold_builtin_sprintf_chk (gimple stmt
, enum built_in_function fcode
)
14036 int nargs
= gimple_call_num_args (stmt
);
14038 return fold_builtin_sprintf_chk_1 (gimple_location (stmt
), nargs
,
14040 ? gimple_call_arg_ptr (stmt
, 0)
14041 : &error_mark_node
), fcode
);
14044 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14045 a normal call should be emitted rather than expanding the function
14046 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14047 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14048 passed as second argument. */
14051 gimple_fold_builtin_snprintf_chk (gimple stmt
, tree maxlen
,
14052 enum built_in_function fcode
)
14054 int nargs
= gimple_call_num_args (stmt
);
14056 return fold_builtin_snprintf_chk_1 (gimple_location (stmt
), nargs
,
14058 ? gimple_call_arg_ptr (stmt
, 0)
14059 : &error_mark_node
), maxlen
, fcode
);
14062 /* Builtins with folding operations that operate on "..." arguments
14063 need special handling; we need to store the arguments in a convenient
14064 data structure before attempting any folding. Fortunately there are
14065 only a few builtins that fall into this category. FNDECL is the
14066 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14067 result of the function call is ignored. */
14070 gimple_fold_builtin_varargs (tree fndecl
, gimple stmt
,
14071 bool ignore ATTRIBUTE_UNUSED
)
14073 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
14074 tree ret
= NULL_TREE
;
14078 case BUILT_IN_SPRINTF_CHK
:
14079 case BUILT_IN_VSPRINTF_CHK
:
14080 ret
= gimple_fold_builtin_sprintf_chk (stmt
, fcode
);
14083 case BUILT_IN_SNPRINTF_CHK
:
14084 case BUILT_IN_VSNPRINTF_CHK
:
14085 ret
= gimple_fold_builtin_snprintf_chk (stmt
, NULL_TREE
, fcode
);
14092 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
14093 TREE_NO_WARNING (ret
) = 1;
14099 /* A wrapper function for builtin folding that prevents warnings for
14100 "statement without effect" and the like, caused by removing the
14101 call node earlier than the warning is generated. */
14104 fold_call_stmt (gimple stmt
, bool ignore
)
14106 tree ret
= NULL_TREE
;
14107 tree fndecl
= gimple_call_fndecl (stmt
);
14108 location_t loc
= gimple_location (stmt
);
14110 && TREE_CODE (fndecl
) == FUNCTION_DECL
14111 && DECL_BUILT_IN (fndecl
)
14112 && !gimple_call_va_arg_pack_p (stmt
))
14114 int nargs
= gimple_call_num_args (stmt
);
14115 tree
*args
= (nargs
> 0
14116 ? gimple_call_arg_ptr (stmt
, 0)
14117 : &error_mark_node
);
14119 if (avoid_folding_inline_builtin (fndecl
))
14121 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
14123 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
14127 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
14128 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
14130 ret
= gimple_fold_builtin_varargs (fndecl
, stmt
, ignore
);
14133 /* Propagate location information from original call to
14134 expansion of builtin. Otherwise things like
14135 maybe_emit_chk_warning, that operate on the expansion
14136 of a builtin, will use the wrong location information. */
14137 if (gimple_has_location (stmt
))
14139 tree realret
= ret
;
14140 if (TREE_CODE (ret
) == NOP_EXPR
)
14141 realret
= TREE_OPERAND (ret
, 0);
14142 if (CAN_HAVE_LOCATION_P (realret
)
14143 && !EXPR_HAS_LOCATION (realret
))
14144 SET_EXPR_LOCATION (realret
, loc
);
14154 /* Look up the function in builtin_decl that corresponds to DECL
14155 and set ASMSPEC as its user assembler name. DECL must be a
14156 function decl that declares a builtin. */
14159 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
14162 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
14163 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
14166 builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
14167 set_user_assembler_name (builtin
, asmspec
);
14168 switch (DECL_FUNCTION_CODE (decl
))
14170 case BUILT_IN_MEMCPY
:
14171 init_block_move_fn (asmspec
);
14172 memcpy_libfunc
= set_user_assembler_libfunc ("memcpy", asmspec
);
14174 case BUILT_IN_MEMSET
:
14175 init_block_clear_fn (asmspec
);
14176 memset_libfunc
= set_user_assembler_libfunc ("memset", asmspec
);
14178 case BUILT_IN_MEMMOVE
:
14179 memmove_libfunc
= set_user_assembler_libfunc ("memmove", asmspec
);
14181 case BUILT_IN_MEMCMP
:
14182 memcmp_libfunc
= set_user_assembler_libfunc ("memcmp", asmspec
);
14184 case BUILT_IN_ABORT
:
14185 abort_libfunc
= set_user_assembler_libfunc ("abort", asmspec
);
14188 if (INT_TYPE_SIZE
< BITS_PER_WORD
)
14190 set_user_assembler_libfunc ("ffs", asmspec
);
14191 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
,
14192 MODE_INT
, 0), "ffs");
14200 /* Return true if DECL is a builtin that expands to a constant or similarly
14203 is_simple_builtin (tree decl
)
14205 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
14206 switch (DECL_FUNCTION_CODE (decl
))
14208 /* Builtins that expand to constants. */
14209 case BUILT_IN_CONSTANT_P
:
14210 case BUILT_IN_EXPECT
:
14211 case BUILT_IN_OBJECT_SIZE
:
14212 case BUILT_IN_UNREACHABLE
:
14213 /* Simple register moves or loads from stack. */
14214 case BUILT_IN_ASSUME_ALIGNED
:
14215 case BUILT_IN_RETURN_ADDRESS
:
14216 case BUILT_IN_EXTRACT_RETURN_ADDR
:
14217 case BUILT_IN_FROB_RETURN_ADDR
:
14218 case BUILT_IN_RETURN
:
14219 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
14220 case BUILT_IN_FRAME_ADDRESS
:
14221 case BUILT_IN_VA_END
:
14222 case BUILT_IN_STACK_SAVE
:
14223 case BUILT_IN_STACK_RESTORE
:
14224 /* Exception state returns or moves registers around. */
14225 case BUILT_IN_EH_FILTER
:
14226 case BUILT_IN_EH_POINTER
:
14227 case BUILT_IN_EH_COPY_VALUES
:
14237 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14238 most probably expanded inline into reasonably simple code. This is a
14239 superset of is_simple_builtin. */
14241 is_inexpensive_builtin (tree decl
)
14245 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
14247 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
14248 switch (DECL_FUNCTION_CODE (decl
))
14251 case BUILT_IN_ALLOCA
:
14252 case BUILT_IN_ALLOCA_WITH_ALIGN
:
14253 case BUILT_IN_BSWAP16
:
14254 case BUILT_IN_BSWAP32
:
14255 case BUILT_IN_BSWAP64
:
14257 case BUILT_IN_CLZIMAX
:
14258 case BUILT_IN_CLZL
:
14259 case BUILT_IN_CLZLL
:
14261 case BUILT_IN_CTZIMAX
:
14262 case BUILT_IN_CTZL
:
14263 case BUILT_IN_CTZLL
:
14265 case BUILT_IN_FFSIMAX
:
14266 case BUILT_IN_FFSL
:
14267 case BUILT_IN_FFSLL
:
14268 case BUILT_IN_IMAXABS
:
14269 case BUILT_IN_FINITE
:
14270 case BUILT_IN_FINITEF
:
14271 case BUILT_IN_FINITEL
:
14272 case BUILT_IN_FINITED32
:
14273 case BUILT_IN_FINITED64
:
14274 case BUILT_IN_FINITED128
:
14275 case BUILT_IN_FPCLASSIFY
:
14276 case BUILT_IN_ISFINITE
:
14277 case BUILT_IN_ISINF_SIGN
:
14278 case BUILT_IN_ISINF
:
14279 case BUILT_IN_ISINFF
:
14280 case BUILT_IN_ISINFL
:
14281 case BUILT_IN_ISINFD32
:
14282 case BUILT_IN_ISINFD64
:
14283 case BUILT_IN_ISINFD128
:
14284 case BUILT_IN_ISNAN
:
14285 case BUILT_IN_ISNANF
:
14286 case BUILT_IN_ISNANL
:
14287 case BUILT_IN_ISNAND32
:
14288 case BUILT_IN_ISNAND64
:
14289 case BUILT_IN_ISNAND128
:
14290 case BUILT_IN_ISNORMAL
:
14291 case BUILT_IN_ISGREATER
:
14292 case BUILT_IN_ISGREATEREQUAL
:
14293 case BUILT_IN_ISLESS
:
14294 case BUILT_IN_ISLESSEQUAL
:
14295 case BUILT_IN_ISLESSGREATER
:
14296 case BUILT_IN_ISUNORDERED
:
14297 case BUILT_IN_VA_ARG_PACK
:
14298 case BUILT_IN_VA_ARG_PACK_LEN
:
14299 case BUILT_IN_VA_COPY
:
14300 case BUILT_IN_TRAP
:
14301 case BUILT_IN_SAVEREGS
:
14302 case BUILT_IN_POPCOUNTL
:
14303 case BUILT_IN_POPCOUNTLL
:
14304 case BUILT_IN_POPCOUNTIMAX
:
14305 case BUILT_IN_POPCOUNT
:
14306 case BUILT_IN_PARITYL
:
14307 case BUILT_IN_PARITYLL
:
14308 case BUILT_IN_PARITYIMAX
:
14309 case BUILT_IN_PARITY
:
14310 case BUILT_IN_LABS
:
14311 case BUILT_IN_LLABS
:
14312 case BUILT_IN_PREFETCH
:
14316 return is_simple_builtin (decl
);