1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
26 #include "coretypes.h"
33 #include "fold-const.h"
34 #include "stringpool.h"
35 #include "stor-layout.h"
38 #include "tree-object-size.h"
41 #include "internal-fn.h"
45 #include "insn-config.h"
52 #include "insn-codes.h"
57 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "tree-ssanames.h"
63 #include "value-prof.h"
64 #include "diagnostic-core.h"
69 #include "tree-chkp.h"
73 static tree
do_mpc_arg1 (tree
, tree
, int (*)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
));
75 struct target_builtins default_target_builtins
;
77 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
80 /* Define the names of the builtin function types and codes. */
81 const char *const built_in_class_names
[BUILT_IN_LAST
]
82 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
84 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
85 const char * built_in_names
[(int) END_BUILTINS
] =
87 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info
[(int)END_BUILTINS
];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p
;
98 static rtx
c_readstr (const char *, machine_mode
);
99 static int target_char_cast (tree
, char *);
100 static rtx
get_memory_rtx (tree
, tree
);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx
result_vector (int, rtx
);
104 static void expand_builtin_prefetch (tree
);
105 static rtx
expand_builtin_apply_args (void);
106 static rtx
expand_builtin_apply_args_1 (void);
107 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
108 static void expand_builtin_return (rtx
);
109 static enum type_class
type_to_class (tree
);
110 static rtx
expand_builtin_classify_type (tree
);
111 static void expand_errno_check (tree
, rtx
);
112 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
113 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
114 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
115 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
116 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
117 static rtx
expand_builtin_sincos (tree
);
118 static rtx
expand_builtin_cexpi (tree
, rtx
);
119 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
120 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
121 static rtx
expand_builtin_next_arg (void);
122 static rtx
expand_builtin_va_start (tree
);
123 static rtx
expand_builtin_va_end (tree
);
124 static rtx
expand_builtin_va_copy (tree
);
125 static rtx
expand_builtin_strcmp (tree
, rtx
);
126 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
127 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, machine_mode
);
128 static rtx
expand_builtin_memcpy (tree
, rtx
);
129 static rtx
expand_builtin_memcpy_with_bounds (tree
, rtx
);
130 static rtx
expand_builtin_memcpy_args (tree
, tree
, tree
, rtx
, tree
);
131 static rtx
expand_builtin_mempcpy (tree
, rtx
, machine_mode
);
132 static rtx
expand_builtin_mempcpy_with_bounds (tree
, rtx
, machine_mode
);
133 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
134 machine_mode
, int, tree
);
135 static rtx
expand_builtin_strcpy (tree
, rtx
);
136 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
137 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
138 static rtx
expand_builtin_strncpy (tree
, rtx
);
139 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, machine_mode
);
140 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
141 static rtx
expand_builtin_memset_with_bounds (tree
, rtx
, machine_mode
);
142 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
143 static rtx
expand_builtin_bzero (tree
);
144 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
145 static rtx
expand_builtin_alloca (tree
, bool);
146 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
147 static rtx
expand_builtin_frame_address (tree
, tree
);
148 static tree
stabilize_va_list_loc (location_t
, tree
, int);
149 static rtx
expand_builtin_expect (tree
, rtx
);
150 static tree
fold_builtin_constant_p (tree
);
151 static tree
fold_builtin_classify_type (tree
);
152 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
153 static tree
fold_builtin_inf (location_t
, tree
, int);
154 static tree
fold_builtin_nan (tree
, tree
, int);
155 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
156 static bool validate_arg (const_tree
, enum tree_code code
);
157 static bool integer_valued_real_p (tree
);
158 static tree
fold_trunc_transparent_mathfn (location_t
, tree
, tree
);
159 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
160 static rtx
expand_builtin_signbit (tree
, rtx
);
161 static tree
fold_builtin_pow (location_t
, tree
, tree
, tree
, tree
);
162 static tree
fold_builtin_powi (location_t
, tree
, tree
, tree
, tree
);
163 static tree
fold_builtin_tan (tree
, tree
);
164 static tree
fold_builtin_trunc (location_t
, tree
, tree
);
165 static tree
fold_builtin_floor (location_t
, tree
, tree
);
166 static tree
fold_builtin_ceil (location_t
, tree
, tree
);
167 static tree
fold_builtin_round (location_t
, tree
, tree
);
168 static tree
fold_builtin_int_roundingfn (location_t
, tree
, tree
);
169 static tree
fold_builtin_bitop (tree
, tree
);
170 static tree
fold_builtin_strchr (location_t
, tree
, tree
, tree
);
171 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
172 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
173 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
174 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
175 static tree
fold_builtin_signbit (location_t
, tree
, tree
);
176 static tree
fold_builtin_isascii (location_t
, tree
);
177 static tree
fold_builtin_toascii (location_t
, tree
);
178 static tree
fold_builtin_isdigit (location_t
, tree
);
179 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
180 static tree
fold_builtin_abs (location_t
, tree
, tree
);
181 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
183 static tree
fold_builtin_0 (location_t
, tree
);
184 static tree
fold_builtin_1 (location_t
, tree
, tree
);
185 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
);
186 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
);
187 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
189 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
190 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
191 static tree
fold_builtin_strrchr (location_t
, tree
, tree
, tree
);
192 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
193 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
195 static rtx
expand_builtin_object_size (tree
);
196 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
197 enum built_in_function
);
198 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
199 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
200 static void maybe_emit_free_warning (tree
);
201 static tree
fold_builtin_object_size (tree
, tree
);
203 unsigned HOST_WIDE_INT target_newline
;
204 unsigned HOST_WIDE_INT target_percent
;
205 static unsigned HOST_WIDE_INT target_c
;
206 static unsigned HOST_WIDE_INT target_s
;
207 char target_percent_c
[3];
208 char target_percent_s
[3];
209 char target_percent_s_newline
[4];
210 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
211 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
212 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
213 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
214 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
215 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
216 static tree
do_mpfr_sincos (tree
, tree
, tree
);
217 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
218 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
219 const REAL_VALUE_TYPE
*, bool);
220 static tree
do_mpfr_remquo (tree
, tree
, tree
);
221 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
222 static void expand_builtin_sync_synchronize (void);
224 /* Return true if NAME starts with __builtin_ or __sync_. */
227 is_builtin_name (const char *name
)
229 if (strncmp (name
, "__builtin_", 10) == 0)
231 if (strncmp (name
, "__sync_", 7) == 0)
233 if (strncmp (name
, "__atomic_", 9) == 0)
236 && (!strcmp (name
, "__cilkrts_detach")
237 || !strcmp (name
, "__cilkrts_pop_frame")))
243 /* Return true if DECL is a function symbol representing a built-in. */
246 is_builtin_fn (tree decl
)
248 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
251 /* Return true if NODE should be considered for inline expansion regardless
252 of the optimization level. This means whenever a function is invoked with
253 its "internal" name, which normally contains the prefix "__builtin". */
256 called_as_built_in (tree node
)
258 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
259 we want the name used to call the function, not the name it
261 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
262 return is_builtin_name (name
);
265 /* Compute values M and N such that M divides (address of EXP - N) and such
266 that N < M. If these numbers can be determined, store M in alignp and N in
267 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
268 *alignp and any bit-offset to *bitposp.
270 Note that the address (and thus the alignment) computed here is based
271 on the address to which a symbol resolves, whereas DECL_ALIGN is based
272 on the address at which an object is actually located. These two
273 addresses are not always the same. For example, on ARM targets,
274 the address &foo of a Thumb function foo() has the lowest bit set,
275 whereas foo() itself starts on an even address.
277 If ADDR_P is true we are taking the address of the memory reference EXP
278 and thus cannot rely on the access taking place. */
281 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
282 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
284 HOST_WIDE_INT bitsize
, bitpos
;
287 int unsignedp
, volatilep
;
288 unsigned int align
= BITS_PER_UNIT
;
289 bool known_alignment
= false;
291 /* Get the innermost object and the constant (bitpos) and possibly
292 variable (offset) offset of the access. */
293 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
294 &mode
, &unsignedp
, &volatilep
, true);
296 /* Extract alignment information from the innermost object and
297 possibly adjust bitpos and offset. */
298 if (TREE_CODE (exp
) == FUNCTION_DECL
)
300 /* Function addresses can encode extra information besides their
301 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
302 allows the low bit to be used as a virtual bit, we know
303 that the address itself must be at least 2-byte aligned. */
304 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
305 align
= 2 * BITS_PER_UNIT
;
307 else if (TREE_CODE (exp
) == LABEL_DECL
)
309 else if (TREE_CODE (exp
) == CONST_DECL
)
311 /* The alignment of a CONST_DECL is determined by its initializer. */
312 exp
= DECL_INITIAL (exp
);
313 align
= TYPE_ALIGN (TREE_TYPE (exp
));
314 if (CONSTANT_CLASS_P (exp
))
315 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
317 known_alignment
= true;
319 else if (DECL_P (exp
))
321 align
= DECL_ALIGN (exp
);
322 known_alignment
= true;
324 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
326 align
= TYPE_ALIGN (TREE_TYPE (exp
));
328 else if (TREE_CODE (exp
) == INDIRECT_REF
329 || TREE_CODE (exp
) == MEM_REF
330 || TREE_CODE (exp
) == TARGET_MEM_REF
)
332 tree addr
= TREE_OPERAND (exp
, 0);
334 unsigned HOST_WIDE_INT ptr_bitpos
;
335 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
337 /* If the address is explicitely aligned, handle that. */
338 if (TREE_CODE (addr
) == BIT_AND_EXPR
339 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
341 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
342 ptr_bitmask
*= BITS_PER_UNIT
;
343 align
= ptr_bitmask
& -ptr_bitmask
;
344 addr
= TREE_OPERAND (addr
, 0);
348 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
349 align
= MAX (ptr_align
, align
);
351 /* Re-apply explicit alignment to the bitpos. */
352 ptr_bitpos
&= ptr_bitmask
;
354 /* The alignment of the pointer operand in a TARGET_MEM_REF
355 has to take the variable offset parts into account. */
356 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
360 unsigned HOST_WIDE_INT step
= 1;
362 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
363 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
365 if (TMR_INDEX2 (exp
))
366 align
= BITS_PER_UNIT
;
367 known_alignment
= false;
370 /* When EXP is an actual memory reference then we can use
371 TYPE_ALIGN of a pointer indirection to derive alignment.
372 Do so only if get_pointer_alignment_1 did not reveal absolute
373 alignment knowledge and if using that alignment would
374 improve the situation. */
375 if (!addr_p
&& !known_alignment
376 && TYPE_ALIGN (TREE_TYPE (exp
)) > align
)
377 align
= TYPE_ALIGN (TREE_TYPE (exp
));
380 /* Else adjust bitpos accordingly. */
381 bitpos
+= ptr_bitpos
;
382 if (TREE_CODE (exp
) == MEM_REF
383 || TREE_CODE (exp
) == TARGET_MEM_REF
)
384 bitpos
+= mem_ref_offset (exp
).to_short_addr () * BITS_PER_UNIT
;
387 else if (TREE_CODE (exp
) == STRING_CST
)
389 /* STRING_CST are the only constant objects we allow to be not
390 wrapped inside a CONST_DECL. */
391 align
= TYPE_ALIGN (TREE_TYPE (exp
));
392 if (CONSTANT_CLASS_P (exp
))
393 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
395 known_alignment
= true;
398 /* If there is a non-constant offset part extract the maximum
399 alignment that can prevail. */
402 unsigned int trailing_zeros
= tree_ctz (offset
);
403 if (trailing_zeros
< HOST_BITS_PER_INT
)
405 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
407 align
= MIN (align
, inner
);
412 *bitposp
= bitpos
& (*alignp
- 1);
413 return known_alignment
;
416 /* For a memory reference expression EXP compute values M and N such that M
417 divides (&EXP - N) and such that N < M. If these numbers can be determined,
418 store M in alignp and N in *BITPOSP and return true. Otherwise return false
419 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
422 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
423 unsigned HOST_WIDE_INT
*bitposp
)
425 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
428 /* Return the alignment in bits of EXP, an object. */
431 get_object_alignment (tree exp
)
433 unsigned HOST_WIDE_INT bitpos
= 0;
436 get_object_alignment_1 (exp
, &align
, &bitpos
);
438 /* align and bitpos now specify known low bits of the pointer.
439 ptr & (align - 1) == bitpos. */
442 align
= (bitpos
& -bitpos
);
446 /* For a pointer valued expression EXP compute values M and N such that M
447 divides (EXP - N) and such that N < M. If these numbers can be determined,
448 store M in alignp and N in *BITPOSP and return true. Return false if
449 the results are just a conservative approximation.
451 If EXP is not a pointer, false is returned too. */
454 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
455 unsigned HOST_WIDE_INT
*bitposp
)
459 if (TREE_CODE (exp
) == ADDR_EXPR
)
460 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
461 alignp
, bitposp
, true);
462 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
465 unsigned HOST_WIDE_INT bitpos
;
466 bool res
= get_pointer_alignment_1 (TREE_OPERAND (exp
, 0),
468 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
469 bitpos
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
;
472 unsigned int trailing_zeros
= tree_ctz (TREE_OPERAND (exp
, 1));
473 if (trailing_zeros
< HOST_BITS_PER_INT
)
475 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
477 align
= MIN (align
, inner
);
481 *bitposp
= bitpos
& (align
- 1);
484 else if (TREE_CODE (exp
) == SSA_NAME
485 && POINTER_TYPE_P (TREE_TYPE (exp
)))
487 unsigned int ptr_align
, ptr_misalign
;
488 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
490 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
492 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
493 *alignp
= ptr_align
* BITS_PER_UNIT
;
494 /* We cannot really tell whether this result is an approximation. */
500 *alignp
= BITS_PER_UNIT
;
504 else if (TREE_CODE (exp
) == INTEGER_CST
)
506 *alignp
= BIGGEST_ALIGNMENT
;
507 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
508 & (BIGGEST_ALIGNMENT
- 1));
513 *alignp
= BITS_PER_UNIT
;
517 /* Return the alignment in bits of EXP, a pointer valued expression.
518 The alignment returned is, by default, the alignment of the thing that
519 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
521 Otherwise, look at the expression to see if we can do better, i.e., if the
522 expression is actually pointing at an object whose alignment is tighter. */
525 get_pointer_alignment (tree exp
)
527 unsigned HOST_WIDE_INT bitpos
= 0;
530 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
532 /* align and bitpos now specify known low bits of the pointer.
533 ptr & (align - 1) == bitpos. */
536 align
= (bitpos
& -bitpos
);
541 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
542 way, because it could contain a zero byte in the middle.
543 TREE_STRING_LENGTH is the size of the character array, not the string.
545 ONLY_VALUE should be nonzero if the result is not going to be emitted
546 into the instruction stream and zero if it is going to be expanded.
547 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
548 is returned, otherwise NULL, since
549 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
550 evaluate the side-effects.
552 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
553 accesses. Note that this implies the result is not going to be emitted
554 into the instruction stream.
556 The value returned is of type `ssizetype'.
558 Unfortunately, string_constant can't access the values of const char
559 arrays with initializers, so neither can we do so here. */
562 c_strlen (tree src
, int only_value
)
565 HOST_WIDE_INT offset
;
571 if (TREE_CODE (src
) == COND_EXPR
572 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
576 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
577 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
578 if (tree_int_cst_equal (len1
, len2
))
582 if (TREE_CODE (src
) == COMPOUND_EXPR
583 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
584 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
586 loc
= EXPR_LOC_OR_LOC (src
, input_location
);
588 src
= string_constant (src
, &offset_node
);
592 max
= TREE_STRING_LENGTH (src
) - 1;
593 ptr
= TREE_STRING_POINTER (src
);
595 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
597 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
598 compute the offset to the following null if we don't know where to
599 start searching for it. */
602 for (i
= 0; i
< max
; i
++)
606 /* We don't know the starting offset, but we do know that the string
607 has no internal zero bytes. We can assume that the offset falls
608 within the bounds of the string; otherwise, the programmer deserves
609 what he gets. Subtract the offset from the length of the string,
610 and return that. This would perhaps not be valid if we were dealing
611 with named arrays in addition to literal string constants. */
613 return size_diffop_loc (loc
, size_int (max
), offset_node
);
616 /* We have a known offset into the string. Start searching there for
617 a null character if we can represent it as a single HOST_WIDE_INT. */
618 if (offset_node
== 0)
620 else if (! tree_fits_shwi_p (offset_node
))
623 offset
= tree_to_shwi (offset_node
);
625 /* If the offset is known to be out of bounds, warn, and call strlen at
627 if (offset
< 0 || offset
> max
)
629 /* Suppress multiple warnings for propagated constant strings. */
631 && !TREE_NO_WARNING (src
))
633 warning_at (loc
, 0, "offset outside bounds of constant string");
634 TREE_NO_WARNING (src
) = 1;
639 /* Use strlen to search for the first zero byte. Since any strings
640 constructed with build_string will have nulls appended, we win even
641 if we get handed something like (char[4])"abcd".
643 Since OFFSET is our starting index into the string, no further
644 calculation is needed. */
645 return ssize_int (strlen (ptr
+ offset
));
648 /* Return a char pointer for a C string if it is a string constant
649 or sum of string constant and integer constant. */
656 src
= string_constant (src
, &offset_node
);
660 if (offset_node
== 0)
661 return TREE_STRING_POINTER (src
);
662 else if (!tree_fits_uhwi_p (offset_node
)
663 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
666 return TREE_STRING_POINTER (src
) + tree_to_uhwi (offset_node
);
669 /* Return a constant integer corresponding to target reading
670 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
673 c_readstr (const char *str
, machine_mode mode
)
677 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
679 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
680 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
681 / HOST_BITS_PER_WIDE_INT
;
683 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
684 for (i
= 0; i
< len
; i
++)
688 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
691 if (WORDS_BIG_ENDIAN
)
692 j
= GET_MODE_SIZE (mode
) - i
- 1;
693 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
694 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
695 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
699 ch
= (unsigned char) str
[i
];
700 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
703 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
704 return immed_wide_int_const (c
, mode
);
707 /* Cast a target constant CST to target CHAR and if that value fits into
708 host char type, return zero and put that value into variable pointed to by
712 target_char_cast (tree cst
, char *p
)
714 unsigned HOST_WIDE_INT val
, hostval
;
716 if (TREE_CODE (cst
) != INTEGER_CST
717 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
720 /* Do not care if it fits or not right here. */
721 val
= TREE_INT_CST_LOW (cst
);
723 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
724 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
727 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
728 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
737 /* Similar to save_expr, but assumes that arbitrary code is not executed
738 in between the multiple evaluations. In particular, we assume that a
739 non-addressable local variable will not be modified. */
742 builtin_save_expr (tree exp
)
744 if (TREE_CODE (exp
) == SSA_NAME
745 || (TREE_ADDRESSABLE (exp
) == 0
746 && (TREE_CODE (exp
) == PARM_DECL
747 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
)))))
750 return save_expr (exp
);
753 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
754 times to get the address of either a higher stack frame, or a return
755 address located within it (depending on FNDECL_CODE). */
758 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
761 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
764 /* For a zero count with __builtin_return_address, we don't care what
765 frame address we return, because target-specific definitions will
766 override us. Therefore frame pointer elimination is OK, and using
767 the soft frame pointer is OK.
769 For a nonzero count, or a zero count with __builtin_frame_address,
770 we require a stable offset from the current frame pointer to the
771 previous one, so we must use the hard frame pointer, and
772 we must disable frame pointer elimination. */
773 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
774 tem
= frame_pointer_rtx
;
777 tem
= hard_frame_pointer_rtx
;
779 /* Tell reload not to eliminate the frame pointer. */
780 crtl
->accesses_prior_frames
= 1;
785 SETUP_FRAME_ADDRESSES ();
787 /* On the SPARC, the return address is not in the frame, it is in a
788 register. There is no way to access it off of the current frame
789 pointer, but it can be accessed off the previous frame pointer by
790 reading the value from the register window save area. */
791 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
794 /* Scan back COUNT frames to the specified frame. */
795 for (i
= 0; i
< count
; i
++)
797 /* Assume the dynamic chain pointer is in the word that the
798 frame address points to, unless otherwise specified. */
799 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
800 tem
= memory_address (Pmode
, tem
);
801 tem
= gen_frame_mem (Pmode
, tem
);
802 tem
= copy_to_reg (tem
);
805 /* For __builtin_frame_address, return what we've got. But, on
806 the SPARC for example, we may have to add a bias. */
807 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
808 return FRAME_ADDR_RTX (tem
);
810 /* For __builtin_return_address, get the return address from that frame. */
811 #ifdef RETURN_ADDR_RTX
812 tem
= RETURN_ADDR_RTX (count
, tem
);
814 tem
= memory_address (Pmode
,
815 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
816 tem
= gen_frame_mem (Pmode
, tem
);
821 /* Alias set used for setjmp buffer. */
822 static alias_set_type setjmp_alias_set
= -1;
824 /* Construct the leading half of a __builtin_setjmp call. Control will
825 return to RECEIVER_LABEL. This is also called directly by the SJLJ
826 exception handling code. */
829 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
831 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
835 if (setjmp_alias_set
== -1)
836 setjmp_alias_set
= new_alias_set ();
838 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
840 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
842 /* We store the frame pointer and the address of receiver_label in
843 the buffer and use the rest of it for the stack save area, which
844 is machine-dependent. */
846 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
847 set_mem_alias_set (mem
, setjmp_alias_set
);
848 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
850 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
851 GET_MODE_SIZE (Pmode
))),
852 set_mem_alias_set (mem
, setjmp_alias_set
);
854 emit_move_insn (validize_mem (mem
),
855 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
857 stack_save
= gen_rtx_MEM (sa_mode
,
858 plus_constant (Pmode
, buf_addr
,
859 2 * GET_MODE_SIZE (Pmode
)));
860 set_mem_alias_set (stack_save
, setjmp_alias_set
);
861 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
863 /* If there is further processing to do, do it. */
864 if (targetm
.have_builtin_setjmp_setup ())
865 emit_insn (targetm
.gen_builtin_setjmp_setup (buf_addr
));
867 /* We have a nonlocal label. */
868 cfun
->has_nonlocal_label
= 1;
871 /* Construct the trailing part of a __builtin_setjmp call. This is
872 also called directly by the SJLJ exception handling code.
873 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
876 expand_builtin_setjmp_receiver (rtx receiver_label
)
880 /* Mark the FP as used when we get here, so we have to make sure it's
881 marked as used by this function. */
882 emit_use (hard_frame_pointer_rtx
);
884 /* Mark the static chain as clobbered here so life information
885 doesn't get messed up for it. */
886 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
887 if (chain
&& REG_P (chain
))
888 emit_clobber (chain
);
890 /* Now put in the code to restore the frame pointer, and argument
891 pointer, if needed. */
892 if (! targetm
.have_nonlocal_goto ())
894 /* First adjust our frame pointer to its actual value. It was
895 previously set to the start of the virtual area corresponding to
896 the stacked variables when we branched here and now needs to be
897 adjusted to the actual hardware fp value.
899 Assignments to virtual registers are converted by
900 instantiate_virtual_regs into the corresponding assignment
901 to the underlying register (fp in this case) that makes
902 the original assignment true.
903 So the following insn will actually be decrementing fp by
904 STARTING_FRAME_OFFSET. */
905 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
907 /* Restoring the frame pointer also modifies the hard frame pointer.
908 Mark it used (so that the previous assignment remains live once
909 the frame pointer is eliminated) and clobbered (to represent the
910 implicit update from the assignment). */
911 emit_use (hard_frame_pointer_rtx
);
912 emit_clobber (hard_frame_pointer_rtx
);
915 if (!HARD_FRAME_POINTER_IS_ARG_POINTER
&& fixed_regs
[ARG_POINTER_REGNUM
])
917 #ifdef ELIMINABLE_REGS
918 /* If the argument pointer can be eliminated in favor of the
919 frame pointer, we don't need to restore it. We assume here
920 that if such an elimination is present, it can always be used.
921 This is the case on all known machines; if we don't make this
922 assumption, we do unnecessary saving on many machines. */
924 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
926 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
927 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
928 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
931 if (i
== ARRAY_SIZE (elim_regs
))
934 /* Now restore our arg pointer from the address at which it
935 was saved in our stack frame. */
936 emit_move_insn (crtl
->args
.internal_arg_pointer
,
937 copy_to_reg (get_arg_pointer_save_area ()));
941 if (receiver_label
!= NULL
&& targetm
.have_builtin_setjmp_receiver ())
942 emit_insn (targetm
.gen_builtin_setjmp_receiver (receiver_label
));
943 else if (targetm
.have_nonlocal_goto_receiver ())
944 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
948 /* We must not allow the code we just generated to be reordered by
949 scheduling. Specifically, the update of the frame pointer must
950 happen immediately, not later. */
951 emit_insn (gen_blockage ());
954 /* __builtin_longjmp is passed a pointer to an array of five words (not
955 all will be used on all machines). It operates similarly to the C
956 library function of the same name, but is more efficient. Much of
957 the code below is copied from the handling of non-local gotos. */
960 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
963 rtx_insn
*insn
, *last
;
964 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
966 /* DRAP is needed for stack realign if longjmp is expanded to current
968 if (SUPPORTS_STACK_ALIGNMENT
)
969 crtl
->need_drap
= true;
971 if (setjmp_alias_set
== -1)
972 setjmp_alias_set
= new_alias_set ();
974 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
976 buf_addr
= force_reg (Pmode
, buf_addr
);
978 /* We require that the user must pass a second argument of 1, because
979 that is what builtin_setjmp will return. */
980 gcc_assert (value
== const1_rtx
);
982 last
= get_last_insn ();
983 if (targetm
.have_builtin_longjmp ())
984 emit_insn (targetm
.gen_builtin_longjmp (buf_addr
));
987 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
988 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
989 GET_MODE_SIZE (Pmode
)));
991 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
992 2 * GET_MODE_SIZE (Pmode
)));
993 set_mem_alias_set (fp
, setjmp_alias_set
);
994 set_mem_alias_set (lab
, setjmp_alias_set
);
995 set_mem_alias_set (stack
, setjmp_alias_set
);
997 /* Pick up FP, label, and SP from the block and jump. This code is
998 from expand_goto in stmt.c; see there for detailed comments. */
999 if (targetm
.have_nonlocal_goto ())
1000 /* We have to pass a value to the nonlocal_goto pattern that will
1001 get copied into the static_chain pointer, but it does not matter
1002 what that value is, because builtin_setjmp does not use it. */
1003 emit_insn (targetm
.gen_nonlocal_goto (value
, lab
, stack
, fp
));
1006 lab
= copy_to_reg (lab
);
1008 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1009 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1011 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1012 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1014 emit_use (hard_frame_pointer_rtx
);
1015 emit_use (stack_pointer_rtx
);
1016 emit_indirect_jump (lab
);
1020 /* Search backwards and mark the jump insn as a non-local goto.
1021 Note that this precludes the use of __builtin_longjmp to a
1022 __builtin_setjmp target in the same function. However, we've
1023 already cautioned the user that these functions are for
1024 internal exception handling use only. */
1025 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1027 gcc_assert (insn
!= last
);
1031 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1034 else if (CALL_P (insn
))
1040 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1042 return (iter
->i
< iter
->n
);
1045 /* This function validates the types of a function call argument list
1046 against a specified list of tree_codes. If the last specifier is a 0,
1047 that represents an ellipses, otherwise the last specifier must be a
1051 validate_arglist (const_tree callexpr
, ...)
1053 enum tree_code code
;
1056 const_call_expr_arg_iterator iter
;
1059 va_start (ap
, callexpr
);
1060 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1064 code
= (enum tree_code
) va_arg (ap
, int);
1068 /* This signifies an ellipses, any further arguments are all ok. */
1072 /* This signifies an endlink, if no arguments remain, return
1073 true, otherwise return false. */
1074 res
= !more_const_call_expr_args_p (&iter
);
1077 /* If no parameters remain or the parameter's code does not
1078 match the specified code, return false. Otherwise continue
1079 checking any remaining arguments. */
1080 arg
= next_const_call_expr_arg (&iter
);
1081 if (!validate_arg (arg
, code
))
1088 /* We need gotos here since we can only have one VA_CLOSE in a
1096 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1097 and the address of the save area. */
1100 expand_builtin_nonlocal_goto (tree exp
)
1102 tree t_label
, t_save_area
;
1103 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1106 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1109 t_label
= CALL_EXPR_ARG (exp
, 0);
1110 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1112 r_label
= expand_normal (t_label
);
1113 r_label
= convert_memory_address (Pmode
, r_label
);
1114 r_save_area
= expand_normal (t_save_area
);
1115 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1116 /* Copy the address of the save location to a register just in case it was
1117 based on the frame pointer. */
1118 r_save_area
= copy_to_reg (r_save_area
);
1119 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1120 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1121 plus_constant (Pmode
, r_save_area
,
1122 GET_MODE_SIZE (Pmode
)));
1124 crtl
->has_nonlocal_goto
= 1;
1126 /* ??? We no longer need to pass the static chain value, afaik. */
1127 if (targetm
.have_nonlocal_goto ())
1128 emit_insn (targetm
.gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1131 r_label
= copy_to_reg (r_label
);
1133 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1134 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1136 /* Restore frame pointer for containing function. */
1137 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1138 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1140 /* USE of hard_frame_pointer_rtx added for consistency;
1141 not clear if really needed. */
1142 emit_use (hard_frame_pointer_rtx
);
1143 emit_use (stack_pointer_rtx
);
1145 /* If the architecture is using a GP register, we must
1146 conservatively assume that the target function makes use of it.
1147 The prologue of functions with nonlocal gotos must therefore
1148 initialize the GP register to the appropriate value, and we
1149 must then make sure that this value is live at the point
1150 of the jump. (Note that this doesn't necessarily apply
1151 to targets with a nonlocal_goto pattern; they are free
1152 to implement it in their own way. Note also that this is
1153 a no-op if the GP register is a global invariant.) */
1154 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1155 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1156 emit_use (pic_offset_table_rtx
);
1158 emit_indirect_jump (r_label
);
1161 /* Search backwards to the jump insn and mark it as a
1163 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1167 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1170 else if (CALL_P (insn
))
1177 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1178 (not all will be used on all machines) that was passed to __builtin_setjmp.
1179 It updates the stack pointer in that block to the current value. This is
1180 also called directly by the SJLJ exception handling code. */
1183 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1185 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1187 = gen_rtx_MEM (sa_mode
,
1190 plus_constant (Pmode
, buf_addr
,
1191 2 * GET_MODE_SIZE (Pmode
))));
1193 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1196 /* Expand a call to __builtin_prefetch. For a target that does not support
1197 data prefetch, evaluate the memory address argument in case it has side
1201 expand_builtin_prefetch (tree exp
)
1203 tree arg0
, arg1
, arg2
;
1207 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1210 arg0
= CALL_EXPR_ARG (exp
, 0);
1212 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1213 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1215 nargs
= call_expr_nargs (exp
);
1217 arg1
= CALL_EXPR_ARG (exp
, 1);
1219 arg1
= integer_zero_node
;
1221 arg2
= CALL_EXPR_ARG (exp
, 2);
1223 arg2
= integer_three_node
;
1225 /* Argument 0 is an address. */
1226 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1228 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1229 if (TREE_CODE (arg1
) != INTEGER_CST
)
1231 error ("second argument to %<__builtin_prefetch%> must be a constant");
1232 arg1
= integer_zero_node
;
1234 op1
= expand_normal (arg1
);
1235 /* Argument 1 must be either zero or one. */
1236 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1238 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1243 /* Argument 2 (locality) must be a compile-time constant int. */
1244 if (TREE_CODE (arg2
) != INTEGER_CST
)
1246 error ("third argument to %<__builtin_prefetch%> must be a constant");
1247 arg2
= integer_zero_node
;
1249 op2
= expand_normal (arg2
);
1250 /* Argument 2 must be 0, 1, 2, or 3. */
1251 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1253 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1257 if (targetm
.have_prefetch ())
1259 struct expand_operand ops
[3];
1261 create_address_operand (&ops
[0], op0
);
1262 create_integer_operand (&ops
[1], INTVAL (op1
));
1263 create_integer_operand (&ops
[2], INTVAL (op2
));
1264 if (maybe_expand_insn (targetm
.code_for_prefetch
, 3, ops
))
1268 /* Don't do anything with direct references to volatile memory, but
1269 generate code to handle other side effects. */
1270 if (!MEM_P (op0
) && side_effects_p (op0
))
1274 /* Get a MEM rtx for expression EXP which is the address of an operand
1275 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1276 the maximum length of the block of memory that might be accessed or
1280 get_memory_rtx (tree exp
, tree len
)
1282 tree orig_exp
= exp
;
1285 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1286 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1287 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1288 exp
= TREE_OPERAND (exp
, 0);
1290 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1291 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1293 /* Get an expression we can use to find the attributes to assign to MEM.
1294 First remove any nops. */
1295 while (CONVERT_EXPR_P (exp
)
1296 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1297 exp
= TREE_OPERAND (exp
, 0);
1299 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1300 (as builtin stringops may alias with anything). */
1301 exp
= fold_build2 (MEM_REF
,
1302 build_array_type (char_type_node
,
1303 build_range_type (sizetype
,
1304 size_one_node
, len
)),
1305 exp
, build_int_cst (ptr_type_node
, 0));
1307 /* If the MEM_REF has no acceptable address, try to get the base object
1308 from the original address we got, and build an all-aliasing
1309 unknown-sized access to that one. */
1310 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1311 set_mem_attributes (mem
, exp
, 0);
1312 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1313 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1316 exp
= build_fold_addr_expr (exp
);
1317 exp
= fold_build2 (MEM_REF
,
1318 build_array_type (char_type_node
,
1319 build_range_type (sizetype
,
1322 exp
, build_int_cst (ptr_type_node
, 0));
1323 set_mem_attributes (mem
, exp
, 0);
1325 set_mem_alias_set (mem
, 0);
1329 /* Built-in functions to perform an untyped call and return. */
1331 #define apply_args_mode \
1332 (this_target_builtins->x_apply_args_mode)
1333 #define apply_result_mode \
1334 (this_target_builtins->x_apply_result_mode)
1336 /* Return the size required for the block returned by __builtin_apply_args,
1337 and initialize apply_args_mode. */
1340 apply_args_size (void)
1342 static int size
= -1;
1347 /* The values computed by this function never change. */
1350 /* The first value is the incoming arg-pointer. */
1351 size
= GET_MODE_SIZE (Pmode
);
1353 /* The second value is the structure value address unless this is
1354 passed as an "invisible" first argument. */
1355 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1356 size
+= GET_MODE_SIZE (Pmode
);
1358 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1359 if (FUNCTION_ARG_REGNO_P (regno
))
1361 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1363 gcc_assert (mode
!= VOIDmode
);
1365 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1366 if (size
% align
!= 0)
1367 size
= CEIL (size
, align
) * align
;
1368 size
+= GET_MODE_SIZE (mode
);
1369 apply_args_mode
[regno
] = mode
;
1373 apply_args_mode
[regno
] = VOIDmode
;
1379 /* Return the size required for the block returned by __builtin_apply,
1380 and initialize apply_result_mode. */
1383 apply_result_size (void)
1385 static int size
= -1;
1389 /* The values computed by this function never change. */
1394 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1395 if (targetm
.calls
.function_value_regno_p (regno
))
1397 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1399 gcc_assert (mode
!= VOIDmode
);
1401 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1402 if (size
% align
!= 0)
1403 size
= CEIL (size
, align
) * align
;
1404 size
+= GET_MODE_SIZE (mode
);
1405 apply_result_mode
[regno
] = mode
;
1408 apply_result_mode
[regno
] = VOIDmode
;
1410 /* Allow targets that use untyped_call and untyped_return to override
1411 the size so that machine-specific information can be stored here. */
1412 #ifdef APPLY_RESULT_SIZE
1413 size
= APPLY_RESULT_SIZE
;
1419 /* Create a vector describing the result block RESULT. If SAVEP is true,
1420 the result block is used to save the values; otherwise it is used to
1421 restore the values. */
1424 result_vector (int savep
, rtx result
)
1426 int regno
, size
, align
, nelts
;
1429 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1432 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1433 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1435 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1436 if (size
% align
!= 0)
1437 size
= CEIL (size
, align
) * align
;
1438 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1439 mem
= adjust_address (result
, mode
, size
);
1440 savevec
[nelts
++] = (savep
1441 ? gen_rtx_SET (mem
, reg
)
1442 : gen_rtx_SET (reg
, mem
));
1443 size
+= GET_MODE_SIZE (mode
);
1445 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1448 /* Save the state required to perform an untyped call with the same
1449 arguments as were passed to the current function. */
1452 expand_builtin_apply_args_1 (void)
1455 int size
, align
, regno
;
1457 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1459 /* Create a block where the arg-pointer, structure value address,
1460 and argument registers can be saved. */
1461 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1463 /* Walk past the arg-pointer and structure value address. */
1464 size
= GET_MODE_SIZE (Pmode
);
1465 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1466 size
+= GET_MODE_SIZE (Pmode
);
1468 /* Save each register used in calling a function to the block. */
1469 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1470 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1472 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1473 if (size
% align
!= 0)
1474 size
= CEIL (size
, align
) * align
;
1476 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1478 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1479 size
+= GET_MODE_SIZE (mode
);
1482 /* Save the arg pointer to the block. */
1483 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1484 /* We need the pointer as the caller actually passed them to us, not
1485 as we might have pretended they were passed. Make sure it's a valid
1486 operand, as emit_move_insn isn't expected to handle a PLUS. */
1487 if (STACK_GROWS_DOWNWARD
)
1489 = force_operand (plus_constant (Pmode
, tem
,
1490 crtl
->args
.pretend_args_size
),
1492 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1494 size
= GET_MODE_SIZE (Pmode
);
1496 /* Save the structure value address unless this is passed as an
1497 "invisible" first argument. */
1498 if (struct_incoming_value
)
1500 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1501 copy_to_reg (struct_incoming_value
));
1502 size
+= GET_MODE_SIZE (Pmode
);
1505 /* Return the address of the block. */
1506 return copy_addr_to_reg (XEXP (registers
, 0));
1509 /* __builtin_apply_args returns block of memory allocated on
1510 the stack into which is stored the arg pointer, structure
1511 value address, static chain, and all the registers that might
1512 possibly be used in performing a function call. The code is
1513 moved to the start of the function so the incoming values are
1517 expand_builtin_apply_args (void)
1519 /* Don't do __builtin_apply_args more than once in a function.
1520 Save the result of the first call and reuse it. */
1521 if (apply_args_value
!= 0)
1522 return apply_args_value
;
1524 /* When this function is called, it means that registers must be
1525 saved on entry to this function. So we migrate the
1526 call to the first insn of this function. */
1530 temp
= expand_builtin_apply_args_1 ();
1531 rtx_insn
*seq
= get_insns ();
1534 apply_args_value
= temp
;
1536 /* Put the insns after the NOTE that starts the function.
1537 If this is inside a start_sequence, make the outer-level insn
1538 chain current, so the code is placed at the start of the
1539 function. If internal_arg_pointer is a non-virtual pseudo,
1540 it needs to be placed after the function that initializes
1542 push_topmost_sequence ();
1543 if (REG_P (crtl
->args
.internal_arg_pointer
)
1544 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1545 emit_insn_before (seq
, parm_birth_insn
);
1547 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1548 pop_topmost_sequence ();
1553 /* Perform an untyped call and save the state required to perform an
1554 untyped return of whatever value was returned by the given function. */
1557 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1559 int size
, align
, regno
;
1561 rtx incoming_args
, result
, reg
, dest
, src
;
1562 rtx_call_insn
*call_insn
;
1563 rtx old_stack_level
= 0;
1564 rtx call_fusage
= 0;
1565 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1567 arguments
= convert_memory_address (Pmode
, arguments
);
1569 /* Create a block where the return registers can be saved. */
1570 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1572 /* Fetch the arg pointer from the ARGUMENTS block. */
1573 incoming_args
= gen_reg_rtx (Pmode
);
1574 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1575 if (!STACK_GROWS_DOWNWARD
)
1576 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1577 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1579 /* Push a new argument block and copy the arguments. Do not allow
1580 the (potential) memcpy call below to interfere with our stack
1582 do_pending_stack_adjust ();
1585 /* Save the stack with nonlocal if available. */
1586 if (targetm
.have_save_stack_nonlocal ())
1587 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1589 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1591 /* Allocate a block of memory onto the stack and copy the memory
1592 arguments to the outgoing arguments address. We can pass TRUE
1593 as the 4th argument because we just saved the stack pointer
1594 and will restore it right after the call. */
1595 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1597 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1598 may have already set current_function_calls_alloca to true.
1599 current_function_calls_alloca won't be set if argsize is zero,
1600 so we have to guarantee need_drap is true here. */
1601 if (SUPPORTS_STACK_ALIGNMENT
)
1602 crtl
->need_drap
= true;
1604 dest
= virtual_outgoing_args_rtx
;
1605 if (!STACK_GROWS_DOWNWARD
)
1607 if (CONST_INT_P (argsize
))
1608 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1610 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1612 dest
= gen_rtx_MEM (BLKmode
, dest
);
1613 set_mem_align (dest
, PARM_BOUNDARY
);
1614 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1615 set_mem_align (src
, PARM_BOUNDARY
);
1616 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1618 /* Refer to the argument block. */
1620 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1621 set_mem_align (arguments
, PARM_BOUNDARY
);
1623 /* Walk past the arg-pointer and structure value address. */
1624 size
= GET_MODE_SIZE (Pmode
);
1626 size
+= GET_MODE_SIZE (Pmode
);
1628 /* Restore each of the registers previously saved. Make USE insns
1629 for each of these registers for use in making the call. */
1630 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1631 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1633 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1634 if (size
% align
!= 0)
1635 size
= CEIL (size
, align
) * align
;
1636 reg
= gen_rtx_REG (mode
, regno
);
1637 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1638 use_reg (&call_fusage
, reg
);
1639 size
+= GET_MODE_SIZE (mode
);
1642 /* Restore the structure value address unless this is passed as an
1643 "invisible" first argument. */
1644 size
= GET_MODE_SIZE (Pmode
);
1647 rtx value
= gen_reg_rtx (Pmode
);
1648 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1649 emit_move_insn (struct_value
, value
);
1650 if (REG_P (struct_value
))
1651 use_reg (&call_fusage
, struct_value
);
1652 size
+= GET_MODE_SIZE (Pmode
);
1655 /* All arguments and registers used for the call are set up by now! */
1656 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1658 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1659 and we don't want to load it into a register as an optimization,
1660 because prepare_call_address already did it if it should be done. */
1661 if (GET_CODE (function
) != SYMBOL_REF
)
1662 function
= memory_address (FUNCTION_MODE
, function
);
1664 /* Generate the actual call instruction and save the return value. */
1665 if (targetm
.have_untyped_call ())
1667 rtx mem
= gen_rtx_MEM (FUNCTION_MODE
, function
);
1668 emit_call_insn (targetm
.gen_untyped_call (mem
, result
,
1669 result_vector (1, result
)));
1671 else if (targetm
.have_call_value ())
1675 /* Locate the unique return register. It is not possible to
1676 express a call that sets more than one return register using
1677 call_value; use untyped_call for that. In fact, untyped_call
1678 only needs to save the return registers in the given block. */
1679 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1680 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1682 gcc_assert (!valreg
); /* have_untyped_call required. */
1684 valreg
= gen_rtx_REG (mode
, regno
);
1687 emit_insn (targetm
.gen_call_value (valreg
,
1688 gen_rtx_MEM (FUNCTION_MODE
, function
),
1689 const0_rtx
, NULL_RTX
, const0_rtx
));
1691 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1696 /* Find the CALL insn we just emitted, and attach the register usage
1698 call_insn
= last_call_insn ();
1699 add_function_usage_to (call_insn
, call_fusage
);
1701 /* Restore the stack. */
1702 if (targetm
.have_save_stack_nonlocal ())
1703 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1705 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1706 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1710 /* Return the address of the result block. */
1711 result
= copy_addr_to_reg (XEXP (result
, 0));
1712 return convert_memory_address (ptr_mode
, result
);
1715 /* Perform an untyped return. */
1718 expand_builtin_return (rtx result
)
1720 int size
, align
, regno
;
1723 rtx_insn
*call_fusage
= 0;
1725 result
= convert_memory_address (Pmode
, result
);
1727 apply_result_size ();
1728 result
= gen_rtx_MEM (BLKmode
, result
);
1730 if (targetm
.have_untyped_return ())
1732 rtx vector
= result_vector (0, result
);
1733 emit_jump_insn (targetm
.gen_untyped_return (result
, vector
));
1738 /* Restore the return value and note that each value is used. */
1740 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1741 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1743 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1744 if (size
% align
!= 0)
1745 size
= CEIL (size
, align
) * align
;
1746 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1747 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1749 push_to_sequence (call_fusage
);
1751 call_fusage
= get_insns ();
1753 size
+= GET_MODE_SIZE (mode
);
1756 /* Put the USE insns before the return. */
1757 emit_insn (call_fusage
);
1759 /* Return whatever values was restored by jumping directly to the end
1761 expand_naked_return ();
1764 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1766 static enum type_class
1767 type_to_class (tree type
)
1769 switch (TREE_CODE (type
))
1771 case VOID_TYPE
: return void_type_class
;
1772 case INTEGER_TYPE
: return integer_type_class
;
1773 case ENUMERAL_TYPE
: return enumeral_type_class
;
1774 case BOOLEAN_TYPE
: return boolean_type_class
;
1775 case POINTER_TYPE
: return pointer_type_class
;
1776 case REFERENCE_TYPE
: return reference_type_class
;
1777 case OFFSET_TYPE
: return offset_type_class
;
1778 case REAL_TYPE
: return real_type_class
;
1779 case COMPLEX_TYPE
: return complex_type_class
;
1780 case FUNCTION_TYPE
: return function_type_class
;
1781 case METHOD_TYPE
: return method_type_class
;
1782 case RECORD_TYPE
: return record_type_class
;
1784 case QUAL_UNION_TYPE
: return union_type_class
;
1785 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1786 ? string_type_class
: array_type_class
);
1787 case LANG_TYPE
: return lang_type_class
;
1788 default: return no_type_class
;
1792 /* Expand a call EXP to __builtin_classify_type. */
1795 expand_builtin_classify_type (tree exp
)
1797 if (call_expr_nargs (exp
))
1798 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1799 return GEN_INT (no_type_class
);
1802 /* This helper macro, meant to be used in mathfn_built_in below,
1803 determines which among a set of three builtin math functions is
1804 appropriate for a given type mode. The `F' and `L' cases are
1805 automatically generated from the `double' case. */
1806 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1807 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1808 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1809 fcodel = BUILT_IN_MATHFN##L ; break;
1810 /* Similar to above, but appends _R after any F/L suffix. */
1811 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1812 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1813 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1814 fcodel = BUILT_IN_MATHFN##L_R ; break;
1816 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1817 if available. If IMPLICIT is true use the implicit builtin declaration,
1818 otherwise use the explicit declaration. If we can't do the conversion,
1822 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit_p
)
1824 enum built_in_function fcode
, fcodef
, fcodel
, fcode2
;
1828 CASE_MATHFN (BUILT_IN_ACOS
)
1829 CASE_MATHFN (BUILT_IN_ACOSH
)
1830 CASE_MATHFN (BUILT_IN_ASIN
)
1831 CASE_MATHFN (BUILT_IN_ASINH
)
1832 CASE_MATHFN (BUILT_IN_ATAN
)
1833 CASE_MATHFN (BUILT_IN_ATAN2
)
1834 CASE_MATHFN (BUILT_IN_ATANH
)
1835 CASE_MATHFN (BUILT_IN_CBRT
)
1836 CASE_MATHFN (BUILT_IN_CEIL
)
1837 CASE_MATHFN (BUILT_IN_CEXPI
)
1838 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1839 CASE_MATHFN (BUILT_IN_COS
)
1840 CASE_MATHFN (BUILT_IN_COSH
)
1841 CASE_MATHFN (BUILT_IN_DREM
)
1842 CASE_MATHFN (BUILT_IN_ERF
)
1843 CASE_MATHFN (BUILT_IN_ERFC
)
1844 CASE_MATHFN (BUILT_IN_EXP
)
1845 CASE_MATHFN (BUILT_IN_EXP10
)
1846 CASE_MATHFN (BUILT_IN_EXP2
)
1847 CASE_MATHFN (BUILT_IN_EXPM1
)
1848 CASE_MATHFN (BUILT_IN_FABS
)
1849 CASE_MATHFN (BUILT_IN_FDIM
)
1850 CASE_MATHFN (BUILT_IN_FLOOR
)
1851 CASE_MATHFN (BUILT_IN_FMA
)
1852 CASE_MATHFN (BUILT_IN_FMAX
)
1853 CASE_MATHFN (BUILT_IN_FMIN
)
1854 CASE_MATHFN (BUILT_IN_FMOD
)
1855 CASE_MATHFN (BUILT_IN_FREXP
)
1856 CASE_MATHFN (BUILT_IN_GAMMA
)
1857 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1858 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1859 CASE_MATHFN (BUILT_IN_HYPOT
)
1860 CASE_MATHFN (BUILT_IN_ILOGB
)
1861 CASE_MATHFN (BUILT_IN_ICEIL
)
1862 CASE_MATHFN (BUILT_IN_IFLOOR
)
1863 CASE_MATHFN (BUILT_IN_INF
)
1864 CASE_MATHFN (BUILT_IN_IRINT
)
1865 CASE_MATHFN (BUILT_IN_IROUND
)
1866 CASE_MATHFN (BUILT_IN_ISINF
)
1867 CASE_MATHFN (BUILT_IN_J0
)
1868 CASE_MATHFN (BUILT_IN_J1
)
1869 CASE_MATHFN (BUILT_IN_JN
)
1870 CASE_MATHFN (BUILT_IN_LCEIL
)
1871 CASE_MATHFN (BUILT_IN_LDEXP
)
1872 CASE_MATHFN (BUILT_IN_LFLOOR
)
1873 CASE_MATHFN (BUILT_IN_LGAMMA
)
1874 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1875 CASE_MATHFN (BUILT_IN_LLCEIL
)
1876 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1877 CASE_MATHFN (BUILT_IN_LLRINT
)
1878 CASE_MATHFN (BUILT_IN_LLROUND
)
1879 CASE_MATHFN (BUILT_IN_LOG
)
1880 CASE_MATHFN (BUILT_IN_LOG10
)
1881 CASE_MATHFN (BUILT_IN_LOG1P
)
1882 CASE_MATHFN (BUILT_IN_LOG2
)
1883 CASE_MATHFN (BUILT_IN_LOGB
)
1884 CASE_MATHFN (BUILT_IN_LRINT
)
1885 CASE_MATHFN (BUILT_IN_LROUND
)
1886 CASE_MATHFN (BUILT_IN_MODF
)
1887 CASE_MATHFN (BUILT_IN_NAN
)
1888 CASE_MATHFN (BUILT_IN_NANS
)
1889 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1890 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1891 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1892 CASE_MATHFN (BUILT_IN_POW
)
1893 CASE_MATHFN (BUILT_IN_POWI
)
1894 CASE_MATHFN (BUILT_IN_POW10
)
1895 CASE_MATHFN (BUILT_IN_REMAINDER
)
1896 CASE_MATHFN (BUILT_IN_REMQUO
)
1897 CASE_MATHFN (BUILT_IN_RINT
)
1898 CASE_MATHFN (BUILT_IN_ROUND
)
1899 CASE_MATHFN (BUILT_IN_SCALB
)
1900 CASE_MATHFN (BUILT_IN_SCALBLN
)
1901 CASE_MATHFN (BUILT_IN_SCALBN
)
1902 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1903 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1904 CASE_MATHFN (BUILT_IN_SIN
)
1905 CASE_MATHFN (BUILT_IN_SINCOS
)
1906 CASE_MATHFN (BUILT_IN_SINH
)
1907 CASE_MATHFN (BUILT_IN_SQRT
)
1908 CASE_MATHFN (BUILT_IN_TAN
)
1909 CASE_MATHFN (BUILT_IN_TANH
)
1910 CASE_MATHFN (BUILT_IN_TGAMMA
)
1911 CASE_MATHFN (BUILT_IN_TRUNC
)
1912 CASE_MATHFN (BUILT_IN_Y0
)
1913 CASE_MATHFN (BUILT_IN_Y1
)
1914 CASE_MATHFN (BUILT_IN_YN
)
1920 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1922 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1924 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1929 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1932 return builtin_decl_explicit (fcode2
);
1935 /* Like mathfn_built_in_1(), but always use the implicit array. */
1938 mathfn_built_in (tree type
, enum built_in_function fn
)
1940 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1943 /* If errno must be maintained, expand the RTL to check if the result,
1944 TARGET, of a built-in function call, EXP, is NaN, and if so set
1948 expand_errno_check (tree exp
, rtx target
)
1950 rtx_code_label
*lab
= gen_label_rtx ();
1952 /* Test the result; if it is NaN, set errno=EDOM because
1953 the argument was not in the domain. */
1954 do_compare_rtx_and_jump (target
, target
, EQ
, 0, GET_MODE (target
),
1955 NULL_RTX
, NULL
, lab
,
1956 /* The jump is very likely. */
1957 REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1));
1960 /* If this built-in doesn't throw an exception, set errno directly. */
1961 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1963 #ifdef GEN_ERRNO_RTX
1964 rtx errno_rtx
= GEN_ERRNO_RTX
;
1967 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1969 emit_move_insn (errno_rtx
,
1970 gen_int_mode (TARGET_EDOM
, GET_MODE (errno_rtx
)));
1976 /* Make sure the library call isn't expanded as a tail call. */
1977 CALL_EXPR_TAILCALL (exp
) = 0;
1979 /* We can't set errno=EDOM directly; let the library call do it.
1980 Pop the arguments right away in case the call gets deleted. */
1982 expand_call (exp
, target
, 0);
1987 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1988 Return NULL_RTX if a normal call should be emitted rather than expanding
1989 the function in-line. EXP is the expression that is a call to the builtin
1990 function; if convenient, the result should be placed in TARGET.
1991 SUBTARGET may be used as the target for computing one of EXP's operands. */
1994 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
1996 optab builtin_optab
;
1999 tree fndecl
= get_callee_fndecl (exp
);
2001 bool errno_set
= false;
2002 bool try_widening
= false;
2005 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2008 arg
= CALL_EXPR_ARG (exp
, 0);
2010 switch (DECL_FUNCTION_CODE (fndecl
))
2012 CASE_FLT_FN (BUILT_IN_SQRT
):
2013 errno_set
= ! tree_expr_nonnegative_p (arg
);
2014 try_widening
= true;
2015 builtin_optab
= sqrt_optab
;
2017 CASE_FLT_FN (BUILT_IN_EXP
):
2018 errno_set
= true; builtin_optab
= exp_optab
; break;
2019 CASE_FLT_FN (BUILT_IN_EXP10
):
2020 CASE_FLT_FN (BUILT_IN_POW10
):
2021 errno_set
= true; builtin_optab
= exp10_optab
; break;
2022 CASE_FLT_FN (BUILT_IN_EXP2
):
2023 errno_set
= true; builtin_optab
= exp2_optab
; break;
2024 CASE_FLT_FN (BUILT_IN_EXPM1
):
2025 errno_set
= true; builtin_optab
= expm1_optab
; break;
2026 CASE_FLT_FN (BUILT_IN_LOGB
):
2027 errno_set
= true; builtin_optab
= logb_optab
; break;
2028 CASE_FLT_FN (BUILT_IN_LOG
):
2029 errno_set
= true; builtin_optab
= log_optab
; break;
2030 CASE_FLT_FN (BUILT_IN_LOG10
):
2031 errno_set
= true; builtin_optab
= log10_optab
; break;
2032 CASE_FLT_FN (BUILT_IN_LOG2
):
2033 errno_set
= true; builtin_optab
= log2_optab
; break;
2034 CASE_FLT_FN (BUILT_IN_LOG1P
):
2035 errno_set
= true; builtin_optab
= log1p_optab
; break;
2036 CASE_FLT_FN (BUILT_IN_ASIN
):
2037 builtin_optab
= asin_optab
; break;
2038 CASE_FLT_FN (BUILT_IN_ACOS
):
2039 builtin_optab
= acos_optab
; break;
2040 CASE_FLT_FN (BUILT_IN_TAN
):
2041 builtin_optab
= tan_optab
; break;
2042 CASE_FLT_FN (BUILT_IN_ATAN
):
2043 builtin_optab
= atan_optab
; break;
2044 CASE_FLT_FN (BUILT_IN_FLOOR
):
2045 builtin_optab
= floor_optab
; break;
2046 CASE_FLT_FN (BUILT_IN_CEIL
):
2047 builtin_optab
= ceil_optab
; break;
2048 CASE_FLT_FN (BUILT_IN_TRUNC
):
2049 builtin_optab
= btrunc_optab
; break;
2050 CASE_FLT_FN (BUILT_IN_ROUND
):
2051 builtin_optab
= round_optab
; break;
2052 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
2053 builtin_optab
= nearbyint_optab
;
2054 if (flag_trapping_math
)
2056 /* Else fallthrough and expand as rint. */
2057 CASE_FLT_FN (BUILT_IN_RINT
):
2058 builtin_optab
= rint_optab
; break;
2059 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
2060 builtin_optab
= significand_optab
; break;
2065 /* Make a suitable register to place result in. */
2066 mode
= TYPE_MODE (TREE_TYPE (exp
));
2068 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2071 /* Before working hard, check whether the instruction is available, but try
2072 to widen the mode for specific operations. */
2073 if ((optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
2074 || (try_widening
&& !excess_precision_type (TREE_TYPE (exp
))))
2075 && (!errno_set
|| !optimize_insn_for_size_p ()))
2077 rtx result
= gen_reg_rtx (mode
);
2079 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2080 need to expand the argument again. This way, we will not perform
2081 side-effects more the once. */
2082 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2084 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2088 /* Compute into RESULT.
2089 Set RESULT to wherever the result comes back. */
2090 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2095 expand_errno_check (exp
, result
);
2097 /* Output the entire sequence. */
2098 insns
= get_insns ();
2104 /* If we were unable to expand via the builtin, stop the sequence
2105 (without outputting the insns) and call to the library function
2106 with the stabilized argument list. */
2110 return expand_call (exp
, target
, target
== const0_rtx
);
2113 /* Expand a call to the builtin binary math functions (pow and atan2).
2114 Return NULL_RTX if a normal call should be emitted rather than expanding the
2115 function in-line. EXP is the expression that is a call to the builtin
2116 function; if convenient, the result should be placed in TARGET.
2117 SUBTARGET may be used as the target for computing one of EXP's
2121 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2123 optab builtin_optab
;
2124 rtx op0
, op1
, result
;
2126 int op1_type
= REAL_TYPE
;
2127 tree fndecl
= get_callee_fndecl (exp
);
2130 bool errno_set
= true;
2132 switch (DECL_FUNCTION_CODE (fndecl
))
2134 CASE_FLT_FN (BUILT_IN_SCALBN
):
2135 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2136 CASE_FLT_FN (BUILT_IN_LDEXP
):
2137 op1_type
= INTEGER_TYPE
;
2142 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2145 arg0
= CALL_EXPR_ARG (exp
, 0);
2146 arg1
= CALL_EXPR_ARG (exp
, 1);
2148 switch (DECL_FUNCTION_CODE (fndecl
))
2150 CASE_FLT_FN (BUILT_IN_POW
):
2151 builtin_optab
= pow_optab
; break;
2152 CASE_FLT_FN (BUILT_IN_ATAN2
):
2153 builtin_optab
= atan2_optab
; break;
2154 CASE_FLT_FN (BUILT_IN_SCALB
):
2155 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2157 builtin_optab
= scalb_optab
; break;
2158 CASE_FLT_FN (BUILT_IN_SCALBN
):
2159 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2160 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2162 /* Fall through... */
2163 CASE_FLT_FN (BUILT_IN_LDEXP
):
2164 builtin_optab
= ldexp_optab
; break;
2165 CASE_FLT_FN (BUILT_IN_FMOD
):
2166 builtin_optab
= fmod_optab
; break;
2167 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2168 CASE_FLT_FN (BUILT_IN_DREM
):
2169 builtin_optab
= remainder_optab
; break;
2174 /* Make a suitable register to place result in. */
2175 mode
= TYPE_MODE (TREE_TYPE (exp
));
2177 /* Before working hard, check whether the instruction is available. */
2178 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2181 result
= gen_reg_rtx (mode
);
2183 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2186 if (errno_set
&& optimize_insn_for_size_p ())
2189 /* Always stabilize the argument list. */
2190 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2191 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2193 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2194 op1
= expand_normal (arg1
);
2198 /* Compute into RESULT.
2199 Set RESULT to wherever the result comes back. */
2200 result
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2201 result
, 0, OPTAB_DIRECT
);
2203 /* If we were unable to expand via the builtin, stop the sequence
2204 (without outputting the insns) and call to the library function
2205 with the stabilized argument list. */
2209 return expand_call (exp
, target
, target
== const0_rtx
);
2213 expand_errno_check (exp
, result
);
2215 /* Output the entire sequence. */
2216 insns
= get_insns ();
2223 /* Expand a call to the builtin trinary math functions (fma).
2224 Return NULL_RTX if a normal call should be emitted rather than expanding the
2225 function in-line. EXP is the expression that is a call to the builtin
2226 function; if convenient, the result should be placed in TARGET.
2227 SUBTARGET may be used as the target for computing one of EXP's
2231 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2233 optab builtin_optab
;
2234 rtx op0
, op1
, op2
, result
;
2236 tree fndecl
= get_callee_fndecl (exp
);
2237 tree arg0
, arg1
, arg2
;
2240 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2243 arg0
= CALL_EXPR_ARG (exp
, 0);
2244 arg1
= CALL_EXPR_ARG (exp
, 1);
2245 arg2
= CALL_EXPR_ARG (exp
, 2);
2247 switch (DECL_FUNCTION_CODE (fndecl
))
2249 CASE_FLT_FN (BUILT_IN_FMA
):
2250 builtin_optab
= fma_optab
; break;
2255 /* Make a suitable register to place result in. */
2256 mode
= TYPE_MODE (TREE_TYPE (exp
));
2258 /* Before working hard, check whether the instruction is available. */
2259 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2262 result
= gen_reg_rtx (mode
);
2264 /* Always stabilize the argument list. */
2265 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2266 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2267 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2269 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2270 op1
= expand_normal (arg1
);
2271 op2
= expand_normal (arg2
);
2275 /* Compute into RESULT.
2276 Set RESULT to wherever the result comes back. */
2277 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2280 /* If we were unable to expand via the builtin, stop the sequence
2281 (without outputting the insns) and call to the library function
2282 with the stabilized argument list. */
2286 return expand_call (exp
, target
, target
== const0_rtx
);
2289 /* Output the entire sequence. */
2290 insns
= get_insns ();
2297 /* Expand a call to the builtin sin and cos math functions.
2298 Return NULL_RTX if a normal call should be emitted rather than expanding the
2299 function in-line. EXP is the expression that is a call to the builtin
2300 function; if convenient, the result should be placed in TARGET.
2301 SUBTARGET may be used as the target for computing one of EXP's
2305 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2307 optab builtin_optab
;
2310 tree fndecl
= get_callee_fndecl (exp
);
2314 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2317 arg
= CALL_EXPR_ARG (exp
, 0);
2319 switch (DECL_FUNCTION_CODE (fndecl
))
2321 CASE_FLT_FN (BUILT_IN_SIN
):
2322 CASE_FLT_FN (BUILT_IN_COS
):
2323 builtin_optab
= sincos_optab
; break;
2328 /* Make a suitable register to place result in. */
2329 mode
= TYPE_MODE (TREE_TYPE (exp
));
2331 /* Check if sincos insn is available, otherwise fallback
2332 to sin or cos insn. */
2333 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2334 switch (DECL_FUNCTION_CODE (fndecl
))
2336 CASE_FLT_FN (BUILT_IN_SIN
):
2337 builtin_optab
= sin_optab
; break;
2338 CASE_FLT_FN (BUILT_IN_COS
):
2339 builtin_optab
= cos_optab
; break;
2344 /* Before working hard, check whether the instruction is available. */
2345 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2347 rtx result
= gen_reg_rtx (mode
);
2349 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2350 need to expand the argument again. This way, we will not perform
2351 side-effects more the once. */
2352 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2354 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2358 /* Compute into RESULT.
2359 Set RESULT to wherever the result comes back. */
2360 if (builtin_optab
== sincos_optab
)
2364 switch (DECL_FUNCTION_CODE (fndecl
))
2366 CASE_FLT_FN (BUILT_IN_SIN
):
2367 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2369 CASE_FLT_FN (BUILT_IN_COS
):
2370 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2378 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2382 /* Output the entire sequence. */
2383 insns
= get_insns ();
2389 /* If we were unable to expand via the builtin, stop the sequence
2390 (without outputting the insns) and call to the library function
2391 with the stabilized argument list. */
2395 return expand_call (exp
, target
, target
== const0_rtx
);
2398 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2399 return an RTL instruction code that implements the functionality.
2400 If that isn't possible or available return CODE_FOR_nothing. */
2402 static enum insn_code
2403 interclass_mathfn_icode (tree arg
, tree fndecl
)
2405 bool errno_set
= false;
2406 optab builtin_optab
= unknown_optab
;
2409 switch (DECL_FUNCTION_CODE (fndecl
))
2411 CASE_FLT_FN (BUILT_IN_ILOGB
):
2412 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2413 CASE_FLT_FN (BUILT_IN_ISINF
):
2414 builtin_optab
= isinf_optab
; break;
2415 case BUILT_IN_ISNORMAL
:
2416 case BUILT_IN_ISFINITE
:
2417 CASE_FLT_FN (BUILT_IN_FINITE
):
2418 case BUILT_IN_FINITED32
:
2419 case BUILT_IN_FINITED64
:
2420 case BUILT_IN_FINITED128
:
2421 case BUILT_IN_ISINFD32
:
2422 case BUILT_IN_ISINFD64
:
2423 case BUILT_IN_ISINFD128
:
2424 /* These builtins have no optabs (yet). */
2430 /* There's no easy way to detect the case we need to set EDOM. */
2431 if (flag_errno_math
&& errno_set
)
2432 return CODE_FOR_nothing
;
2434 /* Optab mode depends on the mode of the input argument. */
2435 mode
= TYPE_MODE (TREE_TYPE (arg
));
2438 return optab_handler (builtin_optab
, mode
);
2439 return CODE_FOR_nothing
;
2442 /* Expand a call to one of the builtin math functions that operate on
2443 floating point argument and output an integer result (ilogb, isinf,
2445 Return 0 if a normal call should be emitted rather than expanding the
2446 function in-line. EXP is the expression that is a call to the builtin
2447 function; if convenient, the result should be placed in TARGET. */
2450 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2452 enum insn_code icode
= CODE_FOR_nothing
;
2454 tree fndecl
= get_callee_fndecl (exp
);
2458 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2461 arg
= CALL_EXPR_ARG (exp
, 0);
2462 icode
= interclass_mathfn_icode (arg
, fndecl
);
2463 mode
= TYPE_MODE (TREE_TYPE (arg
));
2465 if (icode
!= CODE_FOR_nothing
)
2467 struct expand_operand ops
[1];
2468 rtx_insn
*last
= get_last_insn ();
2469 tree orig_arg
= arg
;
2471 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2472 need to expand the argument again. This way, we will not perform
2473 side-effects more the once. */
2474 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2476 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2478 if (mode
!= GET_MODE (op0
))
2479 op0
= convert_to_mode (mode
, op0
, 0);
2481 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2482 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2483 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2484 return ops
[0].value
;
2486 delete_insns_since (last
);
2487 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2493 /* Expand a call to the builtin sincos math function.
2494 Return NULL_RTX if a normal call should be emitted rather than expanding the
2495 function in-line. EXP is the expression that is a call to the builtin
2499 expand_builtin_sincos (tree exp
)
2501 rtx op0
, op1
, op2
, target1
, target2
;
2503 tree arg
, sinp
, cosp
;
2505 location_t loc
= EXPR_LOCATION (exp
);
2506 tree alias_type
, alias_off
;
2508 if (!validate_arglist (exp
, REAL_TYPE
,
2509 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2512 arg
= CALL_EXPR_ARG (exp
, 0);
2513 sinp
= CALL_EXPR_ARG (exp
, 1);
2514 cosp
= CALL_EXPR_ARG (exp
, 2);
2516 /* Make a suitable register to place result in. */
2517 mode
= TYPE_MODE (TREE_TYPE (arg
));
2519 /* Check if sincos insn is available, otherwise emit the call. */
2520 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2523 target1
= gen_reg_rtx (mode
);
2524 target2
= gen_reg_rtx (mode
);
2526 op0
= expand_normal (arg
);
2527 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2528 alias_off
= build_int_cst (alias_type
, 0);
2529 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2531 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2534 /* Compute into target1 and target2.
2535 Set TARGET to wherever the result comes back. */
2536 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2537 gcc_assert (result
);
2539 /* Move target1 and target2 to the memory locations indicated
2541 emit_move_insn (op1
, target1
);
2542 emit_move_insn (op2
, target2
);
2547 /* Expand a call to the internal cexpi builtin to the sincos math function.
2548 EXP is the expression that is a call to the builtin function; if convenient,
2549 the result should be placed in TARGET. */
2552 expand_builtin_cexpi (tree exp
, rtx target
)
2554 tree fndecl
= get_callee_fndecl (exp
);
2558 location_t loc
= EXPR_LOCATION (exp
);
2560 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2563 arg
= CALL_EXPR_ARG (exp
, 0);
2564 type
= TREE_TYPE (arg
);
2565 mode
= TYPE_MODE (TREE_TYPE (arg
));
2567 /* Try expanding via a sincos optab, fall back to emitting a libcall
2568 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2569 is only generated from sincos, cexp or if we have either of them. */
2570 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2572 op1
= gen_reg_rtx (mode
);
2573 op2
= gen_reg_rtx (mode
);
2575 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2577 /* Compute into op1 and op2. */
2578 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2580 else if (targetm
.libc_has_function (function_sincos
))
2582 tree call
, fn
= NULL_TREE
;
2586 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2587 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2588 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2589 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2590 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2591 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2595 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2596 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2597 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2598 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2599 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2600 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2602 /* Make sure not to fold the sincos call again. */
2603 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2604 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2605 call
, 3, arg
, top1
, top2
));
2609 tree call
, fn
= NULL_TREE
, narg
;
2610 tree ctype
= build_complex_type (type
);
2612 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2613 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2614 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2615 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2616 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2617 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2621 /* If we don't have a decl for cexp create one. This is the
2622 friendliest fallback if the user calls __builtin_cexpi
2623 without full target C99 function support. */
2624 if (fn
== NULL_TREE
)
2627 const char *name
= NULL
;
2629 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2631 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2633 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2636 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2637 fn
= build_fn_decl (name
, fntype
);
2640 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2641 build_real (type
, dconst0
), arg
);
2643 /* Make sure not to fold the cexp call again. */
2644 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2645 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2646 target
, VOIDmode
, EXPAND_NORMAL
);
2649 /* Now build the proper return type. */
2650 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2651 make_tree (TREE_TYPE (arg
), op2
),
2652 make_tree (TREE_TYPE (arg
), op1
)),
2653 target
, VOIDmode
, EXPAND_NORMAL
);
2656 /* Conveniently construct a function call expression. FNDECL names the
2657 function to be called, N is the number of arguments, and the "..."
2658 parameters are the argument expressions. Unlike build_call_exr
2659 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2662 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2665 tree fntype
= TREE_TYPE (fndecl
);
2666 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2669 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2671 SET_EXPR_LOCATION (fn
, loc
);
2675 /* Expand a call to one of the builtin rounding functions gcc defines
2676 as an extension (lfloor and lceil). As these are gcc extensions we
2677 do not need to worry about setting errno to EDOM.
2678 If expanding via optab fails, lower expression to (int)(floor(x)).
2679 EXP is the expression that is a call to the builtin function;
2680 if convenient, the result should be placed in TARGET. */
2683 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2685 convert_optab builtin_optab
;
2688 tree fndecl
= get_callee_fndecl (exp
);
2689 enum built_in_function fallback_fn
;
2690 tree fallback_fndecl
;
2694 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2697 arg
= CALL_EXPR_ARG (exp
, 0);
2699 switch (DECL_FUNCTION_CODE (fndecl
))
2701 CASE_FLT_FN (BUILT_IN_ICEIL
):
2702 CASE_FLT_FN (BUILT_IN_LCEIL
):
2703 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2704 builtin_optab
= lceil_optab
;
2705 fallback_fn
= BUILT_IN_CEIL
;
2708 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2709 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2710 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2711 builtin_optab
= lfloor_optab
;
2712 fallback_fn
= BUILT_IN_FLOOR
;
2719 /* Make a suitable register to place result in. */
2720 mode
= TYPE_MODE (TREE_TYPE (exp
));
2722 target
= gen_reg_rtx (mode
);
2724 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2725 need to expand the argument again. This way, we will not perform
2726 side-effects more the once. */
2727 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2729 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2733 /* Compute into TARGET. */
2734 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2736 /* Output the entire sequence. */
2737 insns
= get_insns ();
2743 /* If we were unable to expand via the builtin, stop the sequence
2744 (without outputting the insns). */
2747 /* Fall back to floating point rounding optab. */
2748 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2750 /* For non-C99 targets we may end up without a fallback fndecl here
2751 if the user called __builtin_lfloor directly. In this case emit
2752 a call to the floor/ceil variants nevertheless. This should result
2753 in the best user experience for not full C99 targets. */
2754 if (fallback_fndecl
== NULL_TREE
)
2757 const char *name
= NULL
;
2759 switch (DECL_FUNCTION_CODE (fndecl
))
2761 case BUILT_IN_ICEIL
:
2762 case BUILT_IN_LCEIL
:
2763 case BUILT_IN_LLCEIL
:
2766 case BUILT_IN_ICEILF
:
2767 case BUILT_IN_LCEILF
:
2768 case BUILT_IN_LLCEILF
:
2771 case BUILT_IN_ICEILL
:
2772 case BUILT_IN_LCEILL
:
2773 case BUILT_IN_LLCEILL
:
2776 case BUILT_IN_IFLOOR
:
2777 case BUILT_IN_LFLOOR
:
2778 case BUILT_IN_LLFLOOR
:
2781 case BUILT_IN_IFLOORF
:
2782 case BUILT_IN_LFLOORF
:
2783 case BUILT_IN_LLFLOORF
:
2786 case BUILT_IN_IFLOORL
:
2787 case BUILT_IN_LFLOORL
:
2788 case BUILT_IN_LLFLOORL
:
2795 fntype
= build_function_type_list (TREE_TYPE (arg
),
2796 TREE_TYPE (arg
), NULL_TREE
);
2797 fallback_fndecl
= build_fn_decl (name
, fntype
);
2800 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2802 tmp
= expand_normal (exp
);
2803 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2805 /* Truncate the result of floating point optab to integer
2806 via expand_fix (). */
2807 target
= gen_reg_rtx (mode
);
2808 expand_fix (target
, tmp
, 0);
2813 /* Expand a call to one of the builtin math functions doing integer
2815 Return 0 if a normal call should be emitted rather than expanding the
2816 function in-line. EXP is the expression that is a call to the builtin
2817 function; if convenient, the result should be placed in TARGET. */
2820 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2822 convert_optab builtin_optab
;
2825 tree fndecl
= get_callee_fndecl (exp
);
2828 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2830 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2833 arg
= CALL_EXPR_ARG (exp
, 0);
2835 switch (DECL_FUNCTION_CODE (fndecl
))
2837 CASE_FLT_FN (BUILT_IN_IRINT
):
2838 fallback_fn
= BUILT_IN_LRINT
;
2840 CASE_FLT_FN (BUILT_IN_LRINT
):
2841 CASE_FLT_FN (BUILT_IN_LLRINT
):
2842 builtin_optab
= lrint_optab
;
2845 CASE_FLT_FN (BUILT_IN_IROUND
):
2846 fallback_fn
= BUILT_IN_LROUND
;
2848 CASE_FLT_FN (BUILT_IN_LROUND
):
2849 CASE_FLT_FN (BUILT_IN_LLROUND
):
2850 builtin_optab
= lround_optab
;
2857 /* There's no easy way to detect the case we need to set EDOM. */
2858 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2861 /* Make a suitable register to place result in. */
2862 mode
= TYPE_MODE (TREE_TYPE (exp
));
2864 /* There's no easy way to detect the case we need to set EDOM. */
2865 if (!flag_errno_math
)
2867 rtx result
= gen_reg_rtx (mode
);
2869 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2870 need to expand the argument again. This way, we will not perform
2871 side-effects more the once. */
2872 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2874 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2878 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2880 /* Output the entire sequence. */
2881 insns
= get_insns ();
2887 /* If we were unable to expand via the builtin, stop the sequence
2888 (without outputting the insns) and call to the library function
2889 with the stabilized argument list. */
2893 if (fallback_fn
!= BUILT_IN_NONE
)
2895 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2896 targets, (int) round (x) should never be transformed into
2897 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2898 a call to lround in the hope that the target provides at least some
2899 C99 functions. This should result in the best user experience for
2900 not full C99 targets. */
2901 tree fallback_fndecl
= mathfn_built_in_1 (TREE_TYPE (arg
),
2904 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2905 fallback_fndecl
, 1, arg
);
2907 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2908 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2909 return convert_to_mode (mode
, target
, 0);
2912 return expand_call (exp
, target
, target
== const0_rtx
);
2915 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2916 a normal call should be emitted rather than expanding the function
2917 in-line. EXP is the expression that is a call to the builtin
2918 function; if convenient, the result should be placed in TARGET. */
2921 expand_builtin_powi (tree exp
, rtx target
)
2928 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2931 arg0
= CALL_EXPR_ARG (exp
, 0);
2932 arg1
= CALL_EXPR_ARG (exp
, 1);
2933 mode
= TYPE_MODE (TREE_TYPE (exp
));
2935 /* Emit a libcall to libgcc. */
2937 /* Mode of the 2nd argument must match that of an int. */
2938 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2940 if (target
== NULL_RTX
)
2941 target
= gen_reg_rtx (mode
);
2943 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2944 if (GET_MODE (op0
) != mode
)
2945 op0
= convert_to_mode (mode
, op0
, 0);
2946 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2947 if (GET_MODE (op1
) != mode2
)
2948 op1
= convert_to_mode (mode2
, op1
, 0);
2950 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2951 target
, LCT_CONST
, mode
, 2,
2952 op0
, mode
, op1
, mode2
);
2957 /* Expand expression EXP which is a call to the strlen builtin. Return
2958 NULL_RTX if we failed the caller should emit a normal call, otherwise
2959 try to get the result in TARGET, if convenient. */
2962 expand_builtin_strlen (tree exp
, rtx target
,
2963 machine_mode target_mode
)
2965 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2969 struct expand_operand ops
[4];
2972 tree src
= CALL_EXPR_ARG (exp
, 0);
2974 rtx_insn
*before_strlen
;
2975 machine_mode insn_mode
= target_mode
;
2976 enum insn_code icode
= CODE_FOR_nothing
;
2979 /* If the length can be computed at compile-time, return it. */
2980 len
= c_strlen (src
, 0);
2982 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2984 /* If the length can be computed at compile-time and is constant
2985 integer, but there are side-effects in src, evaluate
2986 src for side-effects, then return len.
2987 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2988 can be optimized into: i++; x = 3; */
2989 len
= c_strlen (src
, 1);
2990 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
2992 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2993 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2996 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
2998 /* If SRC is not a pointer type, don't do this operation inline. */
3002 /* Bail out if we can't compute strlen in the right mode. */
3003 while (insn_mode
!= VOIDmode
)
3005 icode
= optab_handler (strlen_optab
, insn_mode
);
3006 if (icode
!= CODE_FOR_nothing
)
3009 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3011 if (insn_mode
== VOIDmode
)
3014 /* Make a place to hold the source address. We will not expand
3015 the actual source until we are sure that the expansion will
3016 not fail -- there are trees that cannot be expanded twice. */
3017 src_reg
= gen_reg_rtx (Pmode
);
3019 /* Mark the beginning of the strlen sequence so we can emit the
3020 source operand later. */
3021 before_strlen
= get_last_insn ();
3023 create_output_operand (&ops
[0], target
, insn_mode
);
3024 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3025 create_integer_operand (&ops
[2], 0);
3026 create_integer_operand (&ops
[3], align
);
3027 if (!maybe_expand_insn (icode
, 4, ops
))
3030 /* Now that we are assured of success, expand the source. */
3032 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3035 #ifdef POINTERS_EXTEND_UNSIGNED
3036 if (GET_MODE (pat
) != Pmode
)
3037 pat
= convert_to_mode (Pmode
, pat
,
3038 POINTERS_EXTEND_UNSIGNED
);
3040 emit_move_insn (src_reg
, pat
);
3046 emit_insn_after (pat
, before_strlen
);
3048 emit_insn_before (pat
, get_insns ());
3050 /* Return the value in the proper mode for this function. */
3051 if (GET_MODE (ops
[0].value
) == target_mode
)
3052 target
= ops
[0].value
;
3053 else if (target
!= 0)
3054 convert_move (target
, ops
[0].value
, 0);
3056 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3062 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3063 bytes from constant string DATA + OFFSET and return it as target
3067 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3070 const char *str
= (const char *) data
;
3072 gcc_assert (offset
>= 0
3073 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3074 <= strlen (str
) + 1));
3076 return c_readstr (str
+ offset
, mode
);
3079 /* LEN specify length of the block of memcpy/memset operation.
3080 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3081 In some cases we can make very likely guess on max size, then we
3082 set it into PROBABLE_MAX_SIZE. */
3085 determine_block_size (tree len
, rtx len_rtx
,
3086 unsigned HOST_WIDE_INT
*min_size
,
3087 unsigned HOST_WIDE_INT
*max_size
,
3088 unsigned HOST_WIDE_INT
*probable_max_size
)
3090 if (CONST_INT_P (len_rtx
))
3092 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
3098 enum value_range_type range_type
= VR_UNDEFINED
;
3100 /* Determine bounds from the type. */
3101 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
3102 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
3105 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
3106 *probable_max_size
= *max_size
3107 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
3109 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
3111 if (TREE_CODE (len
) == SSA_NAME
)
3112 range_type
= get_range_info (len
, &min
, &max
);
3113 if (range_type
== VR_RANGE
)
3115 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
3116 *min_size
= min
.to_uhwi ();
3117 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
3118 *probable_max_size
= *max_size
= max
.to_uhwi ();
3120 else if (range_type
== VR_ANTI_RANGE
)
3122 /* Anti range 0...N lets us to determine minimal size to N+1. */
3125 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
3126 *min_size
= max
.to_uhwi () + 1;
3134 Produce anti range allowing negative values of N. We still
3135 can use the information and make a guess that N is not negative.
3137 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
3138 *probable_max_size
= min
.to_uhwi () - 1;
3141 gcc_checking_assert (*max_size
<=
3142 (unsigned HOST_WIDE_INT
)
3143 GET_MODE_MASK (GET_MODE (len_rtx
)));
3146 /* Helper function to do the actual work for expand_builtin_memcpy. */
3149 expand_builtin_memcpy_args (tree dest
, tree src
, tree len
, rtx target
, tree exp
)
3151 const char *src_str
;
3152 unsigned int src_align
= get_pointer_alignment (src
);
3153 unsigned int dest_align
= get_pointer_alignment (dest
);
3154 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3155 HOST_WIDE_INT expected_size
= -1;
3156 unsigned int expected_align
= 0;
3157 unsigned HOST_WIDE_INT min_size
;
3158 unsigned HOST_WIDE_INT max_size
;
3159 unsigned HOST_WIDE_INT probable_max_size
;
3161 /* If DEST is not a pointer type, call the normal function. */
3162 if (dest_align
== 0)
3165 /* If either SRC is not a pointer type, don't do this
3166 operation in-line. */
3170 if (currently_expanding_gimple_stmt
)
3171 stringop_block_profile (currently_expanding_gimple_stmt
,
3172 &expected_align
, &expected_size
);
3174 if (expected_align
< dest_align
)
3175 expected_align
= dest_align
;
3176 dest_mem
= get_memory_rtx (dest
, len
);
3177 set_mem_align (dest_mem
, dest_align
);
3178 len_rtx
= expand_normal (len
);
3179 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3180 &probable_max_size
);
3181 src_str
= c_getstr (src
);
3183 /* If SRC is a string constant and block move would be done
3184 by pieces, we can avoid loading the string from memory
3185 and only stored the computed constants. */
3187 && CONST_INT_P (len_rtx
)
3188 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3189 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3190 CONST_CAST (char *, src_str
),
3193 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3194 builtin_memcpy_read_str
,
3195 CONST_CAST (char *, src_str
),
3196 dest_align
, false, 0);
3197 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3198 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3202 src_mem
= get_memory_rtx (src
, len
);
3203 set_mem_align (src_mem
, src_align
);
3205 /* Copy word part most expediently. */
3206 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3207 CALL_EXPR_TAILCALL (exp
)
3208 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3209 expected_align
, expected_size
,
3210 min_size
, max_size
, probable_max_size
);
3214 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3215 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3221 /* Expand a call EXP to the memcpy builtin.
3222 Return NULL_RTX if we failed, the caller should emit a normal call,
3223 otherwise try to get the result in TARGET, if convenient (and in
3224 mode MODE if that's convenient). */
3227 expand_builtin_memcpy (tree exp
, rtx target
)
3229 if (!validate_arglist (exp
,
3230 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3234 tree dest
= CALL_EXPR_ARG (exp
, 0);
3235 tree src
= CALL_EXPR_ARG (exp
, 1);
3236 tree len
= CALL_EXPR_ARG (exp
, 2);
3237 return expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3241 /* Expand an instrumented call EXP to the memcpy builtin.
3242 Return NULL_RTX if we failed, the caller should emit a normal call,
3243 otherwise try to get the result in TARGET, if convenient (and in
3244 mode MODE if that's convenient). */
3247 expand_builtin_memcpy_with_bounds (tree exp
, rtx target
)
3249 if (!validate_arglist (exp
,
3250 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3251 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3252 INTEGER_TYPE
, VOID_TYPE
))
3256 tree dest
= CALL_EXPR_ARG (exp
, 0);
3257 tree src
= CALL_EXPR_ARG (exp
, 2);
3258 tree len
= CALL_EXPR_ARG (exp
, 4);
3259 rtx res
= expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3261 /* Return src bounds with the result. */
3264 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3265 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3266 res
= chkp_join_splitted_slot (res
, bnd
);
3272 /* Expand a call EXP to the mempcpy builtin.
3273 Return NULL_RTX if we failed; the caller should emit a normal call,
3274 otherwise try to get the result in TARGET, if convenient (and in
3275 mode MODE if that's convenient). If ENDP is 0 return the
3276 destination pointer, if ENDP is 1 return the end pointer ala
3277 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3281 expand_builtin_mempcpy (tree exp
, rtx target
, machine_mode mode
)
3283 if (!validate_arglist (exp
,
3284 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3288 tree dest
= CALL_EXPR_ARG (exp
, 0);
3289 tree src
= CALL_EXPR_ARG (exp
, 1);
3290 tree len
= CALL_EXPR_ARG (exp
, 2);
3291 return expand_builtin_mempcpy_args (dest
, src
, len
,
3292 target
, mode
, /*endp=*/ 1,
3297 /* Expand an instrumented call EXP to the mempcpy builtin.
3298 Return NULL_RTX if we failed, the caller should emit a normal call,
3299 otherwise try to get the result in TARGET, if convenient (and in
3300 mode MODE if that's convenient). */
3303 expand_builtin_mempcpy_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3305 if (!validate_arglist (exp
,
3306 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3307 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3308 INTEGER_TYPE
, VOID_TYPE
))
3312 tree dest
= CALL_EXPR_ARG (exp
, 0);
3313 tree src
= CALL_EXPR_ARG (exp
, 2);
3314 tree len
= CALL_EXPR_ARG (exp
, 4);
3315 rtx res
= expand_builtin_mempcpy_args (dest
, src
, len
, target
,
3318 /* Return src bounds with the result. */
3321 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3322 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3323 res
= chkp_join_splitted_slot (res
, bnd
);
3329 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3330 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3331 so that this can also be called without constructing an actual CALL_EXPR.
3332 The other arguments and return value are the same as for
3333 expand_builtin_mempcpy. */
3336 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3337 rtx target
, machine_mode mode
, int endp
,
3340 tree fndecl
= get_callee_fndecl (orig_exp
);
3342 /* If return value is ignored, transform mempcpy into memcpy. */
3343 if (target
== const0_rtx
3344 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3345 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
))
3347 tree fn
= builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
);
3348 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3350 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3352 else if (target
== const0_rtx
3353 && builtin_decl_implicit_p (BUILT_IN_MEMCPY
))
3355 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3356 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3358 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3362 const char *src_str
;
3363 unsigned int src_align
= get_pointer_alignment (src
);
3364 unsigned int dest_align
= get_pointer_alignment (dest
);
3365 rtx dest_mem
, src_mem
, len_rtx
;
3367 /* If either SRC or DEST is not a pointer type, don't do this
3368 operation in-line. */
3369 if (dest_align
== 0 || src_align
== 0)
3372 /* If LEN is not constant, call the normal function. */
3373 if (! tree_fits_uhwi_p (len
))
3376 len_rtx
= expand_normal (len
);
3377 src_str
= c_getstr (src
);
3379 /* If SRC is a string constant and block move would be done
3380 by pieces, we can avoid loading the string from memory
3381 and only stored the computed constants. */
3383 && CONST_INT_P (len_rtx
)
3384 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3385 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3386 CONST_CAST (char *, src_str
),
3389 dest_mem
= get_memory_rtx (dest
, len
);
3390 set_mem_align (dest_mem
, dest_align
);
3391 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3392 builtin_memcpy_read_str
,
3393 CONST_CAST (char *, src_str
),
3394 dest_align
, false, endp
);
3395 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3396 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3400 if (CONST_INT_P (len_rtx
)
3401 && can_move_by_pieces (INTVAL (len_rtx
),
3402 MIN (dest_align
, src_align
)))
3404 dest_mem
= get_memory_rtx (dest
, len
);
3405 set_mem_align (dest_mem
, dest_align
);
3406 src_mem
= get_memory_rtx (src
, len
);
3407 set_mem_align (src_mem
, src_align
);
3408 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3409 MIN (dest_align
, src_align
), endp
);
3410 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3411 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3419 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3420 we failed, the caller should emit a normal call, otherwise try to
3421 get the result in TARGET, if convenient. If ENDP is 0 return the
3422 destination pointer, if ENDP is 1 return the end pointer ala
3423 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3427 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3429 struct expand_operand ops
[3];
3433 if (!targetm
.have_movstr ())
3436 dest_mem
= get_memory_rtx (dest
, NULL
);
3437 src_mem
= get_memory_rtx (src
, NULL
);
3440 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3441 dest_mem
= replace_equiv_address (dest_mem
, target
);
3444 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3445 create_fixed_operand (&ops
[1], dest_mem
);
3446 create_fixed_operand (&ops
[2], src_mem
);
3447 if (!maybe_expand_insn (targetm
.code_for_movstr
, 3, ops
))
3450 if (endp
&& target
!= const0_rtx
)
3452 target
= ops
[0].value
;
3453 /* movstr is supposed to set end to the address of the NUL
3454 terminator. If the caller requested a mempcpy-like return value,
3458 rtx tem
= plus_constant (GET_MODE (target
),
3459 gen_lowpart (GET_MODE (target
), target
), 1);
3460 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3466 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3467 NULL_RTX if we failed the caller should emit a normal call, otherwise
3468 try to get the result in TARGET, if convenient (and in mode MODE if that's
3472 expand_builtin_strcpy (tree exp
, rtx target
)
3474 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3476 tree dest
= CALL_EXPR_ARG (exp
, 0);
3477 tree src
= CALL_EXPR_ARG (exp
, 1);
3478 return expand_builtin_strcpy_args (dest
, src
, target
);
3483 /* Helper function to do the actual work for expand_builtin_strcpy. The
3484 arguments to the builtin_strcpy call DEST and SRC are broken out
3485 so that this can also be called without constructing an actual CALL_EXPR.
3486 The other arguments and return value are the same as for
3487 expand_builtin_strcpy. */
3490 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3492 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3495 /* Expand a call EXP to the stpcpy builtin.
3496 Return NULL_RTX if we failed the caller should emit a normal call,
3497 otherwise try to get the result in TARGET, if convenient (and in
3498 mode MODE if that's convenient). */
3501 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
3504 location_t loc
= EXPR_LOCATION (exp
);
3506 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3509 dst
= CALL_EXPR_ARG (exp
, 0);
3510 src
= CALL_EXPR_ARG (exp
, 1);
3512 /* If return value is ignored, transform stpcpy into strcpy. */
3513 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3515 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3516 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3517 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3524 /* Ensure we get an actual string whose length can be evaluated at
3525 compile-time, not an expression containing a string. This is
3526 because the latter will potentially produce pessimized code
3527 when used to produce the return value. */
3528 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3529 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3531 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3532 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3533 target
, mode
, /*endp=*/2,
3539 if (TREE_CODE (len
) == INTEGER_CST
)
3541 rtx len_rtx
= expand_normal (len
);
3543 if (CONST_INT_P (len_rtx
))
3545 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3551 if (mode
!= VOIDmode
)
3552 target
= gen_reg_rtx (mode
);
3554 target
= gen_reg_rtx (GET_MODE (ret
));
3556 if (GET_MODE (target
) != GET_MODE (ret
))
3557 ret
= gen_lowpart (GET_MODE (target
), ret
);
3559 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3560 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3568 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3572 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3573 bytes from constant string DATA + OFFSET and return it as target
3577 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3580 const char *str
= (const char *) data
;
3582 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3585 return c_readstr (str
+ offset
, mode
);
3588 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3589 NULL_RTX if we failed the caller should emit a normal call. */
3592 expand_builtin_strncpy (tree exp
, rtx target
)
3594 location_t loc
= EXPR_LOCATION (exp
);
3596 if (validate_arglist (exp
,
3597 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3599 tree dest
= CALL_EXPR_ARG (exp
, 0);
3600 tree src
= CALL_EXPR_ARG (exp
, 1);
3601 tree len
= CALL_EXPR_ARG (exp
, 2);
3602 tree slen
= c_strlen (src
, 1);
3604 /* We must be passed a constant len and src parameter. */
3605 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
3608 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3610 /* We're required to pad with trailing zeros if the requested
3611 len is greater than strlen(s2)+1. In that case try to
3612 use store_by_pieces, if it fails, punt. */
3613 if (tree_int_cst_lt (slen
, len
))
3615 unsigned int dest_align
= get_pointer_alignment (dest
);
3616 const char *p
= c_getstr (src
);
3619 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
3620 || !can_store_by_pieces (tree_to_uhwi (len
),
3621 builtin_strncpy_read_str
,
3622 CONST_CAST (char *, p
),
3626 dest_mem
= get_memory_rtx (dest
, len
);
3627 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3628 builtin_strncpy_read_str
,
3629 CONST_CAST (char *, p
), dest_align
, false, 0);
3630 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3631 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3638 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3639 bytes from constant string DATA + OFFSET and return it as target
3643 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3646 const char *c
= (const char *) data
;
3647 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3649 memset (p
, *c
, GET_MODE_SIZE (mode
));
3651 return c_readstr (p
, mode
);
3654 /* Callback routine for store_by_pieces. Return the RTL of a register
3655 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3656 char value given in the RTL register data. For example, if mode is
3657 4 bytes wide, return the RTL for 0x01010101*data. */
3660 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3667 size
= GET_MODE_SIZE (mode
);
3671 p
= XALLOCAVEC (char, size
);
3672 memset (p
, 1, size
);
3673 coeff
= c_readstr (p
, mode
);
3675 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3676 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3677 return force_reg (mode
, target
);
3680 /* Expand expression EXP, which is a call to the memset builtin. Return
3681 NULL_RTX if we failed the caller should emit a normal call, otherwise
3682 try to get the result in TARGET, if convenient (and in mode MODE if that's
3686 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
3688 if (!validate_arglist (exp
,
3689 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3693 tree dest
= CALL_EXPR_ARG (exp
, 0);
3694 tree val
= CALL_EXPR_ARG (exp
, 1);
3695 tree len
= CALL_EXPR_ARG (exp
, 2);
3696 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3700 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3701 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3702 try to get the result in TARGET, if convenient (and in mode MODE if that's
3706 expand_builtin_memset_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3708 if (!validate_arglist (exp
,
3709 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3710 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3714 tree dest
= CALL_EXPR_ARG (exp
, 0);
3715 tree val
= CALL_EXPR_ARG (exp
, 2);
3716 tree len
= CALL_EXPR_ARG (exp
, 3);
3717 rtx res
= expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3719 /* Return src bounds with the result. */
3722 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3723 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3724 res
= chkp_join_splitted_slot (res
, bnd
);
3730 /* Helper function to do the actual work for expand_builtin_memset. The
3731 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3732 so that this can also be called without constructing an actual CALL_EXPR.
3733 The other arguments and return value are the same as for
3734 expand_builtin_memset. */
3737 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3738 rtx target
, machine_mode mode
, tree orig_exp
)
3741 enum built_in_function fcode
;
3742 machine_mode val_mode
;
3744 unsigned int dest_align
;
3745 rtx dest_mem
, dest_addr
, len_rtx
;
3746 HOST_WIDE_INT expected_size
= -1;
3747 unsigned int expected_align
= 0;
3748 unsigned HOST_WIDE_INT min_size
;
3749 unsigned HOST_WIDE_INT max_size
;
3750 unsigned HOST_WIDE_INT probable_max_size
;
3752 dest_align
= get_pointer_alignment (dest
);
3754 /* If DEST is not a pointer type, don't do this operation in-line. */
3755 if (dest_align
== 0)
3758 if (currently_expanding_gimple_stmt
)
3759 stringop_block_profile (currently_expanding_gimple_stmt
,
3760 &expected_align
, &expected_size
);
3762 if (expected_align
< dest_align
)
3763 expected_align
= dest_align
;
3765 /* If the LEN parameter is zero, return DEST. */
3766 if (integer_zerop (len
))
3768 /* Evaluate and ignore VAL in case it has side-effects. */
3769 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3770 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3773 /* Stabilize the arguments in case we fail. */
3774 dest
= builtin_save_expr (dest
);
3775 val
= builtin_save_expr (val
);
3776 len
= builtin_save_expr (len
);
3778 len_rtx
= expand_normal (len
);
3779 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3780 &probable_max_size
);
3781 dest_mem
= get_memory_rtx (dest
, len
);
3782 val_mode
= TYPE_MODE (unsigned_char_type_node
);
3784 if (TREE_CODE (val
) != INTEGER_CST
)
3788 val_rtx
= expand_normal (val
);
3789 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
3791 /* Assume that we can memset by pieces if we can store
3792 * the coefficients by pieces (in the required modes).
3793 * We can't pass builtin_memset_gen_str as that emits RTL. */
3795 if (tree_fits_uhwi_p (len
)
3796 && can_store_by_pieces (tree_to_uhwi (len
),
3797 builtin_memset_read_str
, &c
, dest_align
,
3800 val_rtx
= force_reg (val_mode
, val_rtx
);
3801 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3802 builtin_memset_gen_str
, val_rtx
, dest_align
,
3805 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3806 dest_align
, expected_align
,
3807 expected_size
, min_size
, max_size
,
3811 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3812 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3816 if (target_char_cast (val
, &c
))
3821 if (tree_fits_uhwi_p (len
)
3822 && can_store_by_pieces (tree_to_uhwi (len
),
3823 builtin_memset_read_str
, &c
, dest_align
,
3825 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3826 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3827 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
3828 gen_int_mode (c
, val_mode
),
3829 dest_align
, expected_align
,
3830 expected_size
, min_size
, max_size
,
3834 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3835 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3839 set_mem_align (dest_mem
, dest_align
);
3840 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3841 CALL_EXPR_TAILCALL (orig_exp
)
3842 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3843 expected_align
, expected_size
,
3849 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3850 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3856 fndecl
= get_callee_fndecl (orig_exp
);
3857 fcode
= DECL_FUNCTION_CODE (fndecl
);
3858 if (fcode
== BUILT_IN_MEMSET
3859 || fcode
== BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
)
3860 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
3862 else if (fcode
== BUILT_IN_BZERO
)
3863 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
3867 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3868 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3869 return expand_call (fn
, target
, target
== const0_rtx
);
3872 /* Expand expression EXP, which is a call to the bzero builtin. Return
3873 NULL_RTX if we failed the caller should emit a normal call. */
3876 expand_builtin_bzero (tree exp
)
3879 location_t loc
= EXPR_LOCATION (exp
);
3881 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3884 dest
= CALL_EXPR_ARG (exp
, 0);
3885 size
= CALL_EXPR_ARG (exp
, 1);
3887 /* New argument list transforming bzero(ptr x, int y) to
3888 memset(ptr x, int 0, size_t y). This is done this way
3889 so that if it isn't expanded inline, we fallback to
3890 calling bzero instead of memset. */
3892 return expand_builtin_memset_args (dest
, integer_zero_node
,
3893 fold_convert_loc (loc
,
3894 size_type_node
, size
),
3895 const0_rtx
, VOIDmode
, exp
);
3898 /* Try to expand cmpstr operation ICODE with the given operands.
3899 Return the result rtx on success, otherwise return null. */
3902 expand_cmpstr (insn_code icode
, rtx target
, rtx arg1_rtx
, rtx arg2_rtx
,
3903 HOST_WIDE_INT align
)
3905 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
3907 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
3910 struct expand_operand ops
[4];
3911 create_output_operand (&ops
[0], target
, insn_mode
);
3912 create_fixed_operand (&ops
[1], arg1_rtx
);
3913 create_fixed_operand (&ops
[2], arg2_rtx
);
3914 create_integer_operand (&ops
[3], align
);
3915 if (maybe_expand_insn (icode
, 4, ops
))
3916 return ops
[0].value
;
3920 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3921 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3922 otherwise return null. */
3925 expand_cmpstrn_or_cmpmem (insn_code icode
, rtx target
, rtx arg1_rtx
,
3926 rtx arg2_rtx
, tree arg3_type
, rtx arg3_rtx
,
3927 HOST_WIDE_INT align
)
3929 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
3931 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
3934 struct expand_operand ops
[5];
3935 create_output_operand (&ops
[0], target
, insn_mode
);
3936 create_fixed_operand (&ops
[1], arg1_rtx
);
3937 create_fixed_operand (&ops
[2], arg2_rtx
);
3938 create_convert_operand_from (&ops
[3], arg3_rtx
, TYPE_MODE (arg3_type
),
3939 TYPE_UNSIGNED (arg3_type
));
3940 create_integer_operand (&ops
[4], align
);
3941 if (maybe_expand_insn (icode
, 5, ops
))
3942 return ops
[0].value
;
3946 /* Expand expression EXP, which is a call to the memcmp built-in function.
3947 Return NULL_RTX if we failed and the caller should emit a normal call,
3948 otherwise try to get the result in TARGET, if convenient. */
3951 expand_builtin_memcmp (tree exp
, rtx target
)
3953 if (!validate_arglist (exp
,
3954 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3957 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3958 implementing memcmp because it will stop if it encounters two
3960 insn_code icode
= direct_optab_handler (cmpmem_optab
, SImode
);
3961 if (icode
== CODE_FOR_nothing
)
3964 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3965 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3966 tree len
= CALL_EXPR_ARG (exp
, 2);
3968 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3969 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3971 /* If we don't have POINTER_TYPE, call the function. */
3972 if (arg1_align
== 0 || arg2_align
== 0)
3975 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
3976 location_t loc
= EXPR_LOCATION (exp
);
3977 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
3978 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
3979 rtx arg3_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
3981 /* Set MEM_SIZE as appropriate. */
3982 if (CONST_INT_P (arg3_rtx
))
3984 set_mem_size (arg1_rtx
, INTVAL (arg3_rtx
));
3985 set_mem_size (arg2_rtx
, INTVAL (arg3_rtx
));
3988 rtx result
= expand_cmpstrn_or_cmpmem (icode
, target
, arg1_rtx
, arg2_rtx
,
3989 TREE_TYPE (len
), arg3_rtx
,
3990 MIN (arg1_align
, arg2_align
));
3993 /* Return the value in the proper mode for this function. */
3994 if (GET_MODE (result
) == mode
)
3999 convert_move (target
, result
, 0);
4003 return convert_to_mode (mode
, result
, 0);
4008 && REG_P (result
) && GET_MODE (result
) == mode
4009 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4010 result
= gen_reg_rtx (mode
);
4012 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
4013 TYPE_MODE (integer_type_node
), 3,
4014 XEXP (arg1_rtx
, 0), Pmode
,
4015 XEXP (arg2_rtx
, 0), Pmode
,
4016 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
4017 TYPE_UNSIGNED (sizetype
)),
4018 TYPE_MODE (sizetype
));
4022 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4023 if we failed the caller should emit a normal call, otherwise try to get
4024 the result in TARGET, if convenient. */
4027 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4029 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4032 insn_code cmpstr_icode
= direct_optab_handler (cmpstr_optab
, SImode
);
4033 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4034 if (cmpstr_icode
!= CODE_FOR_nothing
|| cmpstrn_icode
!= CODE_FOR_nothing
)
4036 rtx arg1_rtx
, arg2_rtx
;
4038 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4039 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4040 rtx result
= NULL_RTX
;
4042 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4043 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4045 /* If we don't have POINTER_TYPE, call the function. */
4046 if (arg1_align
== 0 || arg2_align
== 0)
4049 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4050 arg1
= builtin_save_expr (arg1
);
4051 arg2
= builtin_save_expr (arg2
);
4053 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4054 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4056 /* Try to call cmpstrsi. */
4057 if (cmpstr_icode
!= CODE_FOR_nothing
)
4058 result
= expand_cmpstr (cmpstr_icode
, target
, arg1_rtx
, arg2_rtx
,
4059 MIN (arg1_align
, arg2_align
));
4061 /* Try to determine at least one length and call cmpstrnsi. */
4062 if (!result
&& cmpstrn_icode
!= CODE_FOR_nothing
)
4067 tree len1
= c_strlen (arg1
, 1);
4068 tree len2
= c_strlen (arg2
, 1);
4071 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4073 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4075 /* If we don't have a constant length for the first, use the length
4076 of the second, if we know it. We don't require a constant for
4077 this case; some cost analysis could be done if both are available
4078 but neither is constant. For now, assume they're equally cheap,
4079 unless one has side effects. If both strings have constant lengths,
4086 else if (TREE_SIDE_EFFECTS (len1
))
4088 else if (TREE_SIDE_EFFECTS (len2
))
4090 else if (TREE_CODE (len1
) != INTEGER_CST
)
4092 else if (TREE_CODE (len2
) != INTEGER_CST
)
4094 else if (tree_int_cst_lt (len1
, len2
))
4099 /* If both arguments have side effects, we cannot optimize. */
4100 if (len
&& !TREE_SIDE_EFFECTS (len
))
4102 arg3_rtx
= expand_normal (len
);
4103 result
= expand_cmpstrn_or_cmpmem
4104 (cmpstrn_icode
, target
, arg1_rtx
, arg2_rtx
, TREE_TYPE (len
),
4105 arg3_rtx
, MIN (arg1_align
, arg2_align
));
4111 /* Return the value in the proper mode for this function. */
4112 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4113 if (GET_MODE (result
) == mode
)
4116 return convert_to_mode (mode
, result
, 0);
4117 convert_move (target
, result
, 0);
4121 /* Expand the library call ourselves using a stabilized argument
4122 list to avoid re-evaluating the function's arguments twice. */
4123 fndecl
= get_callee_fndecl (exp
);
4124 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4125 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4126 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4127 return expand_call (fn
, target
, target
== const0_rtx
);
4132 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4133 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4134 the result in TARGET, if convenient. */
4137 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4138 ATTRIBUTE_UNUSED machine_mode mode
)
4140 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
4142 if (!validate_arglist (exp
,
4143 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4146 /* If c_strlen can determine an expression for one of the string
4147 lengths, and it doesn't have side effects, then emit cmpstrnsi
4148 using length MIN(strlen(string)+1, arg3). */
4149 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4150 if (cmpstrn_icode
!= CODE_FOR_nothing
)
4152 tree len
, len1
, len2
;
4153 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4156 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4157 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4158 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4160 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4161 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4163 len1
= c_strlen (arg1
, 1);
4164 len2
= c_strlen (arg2
, 1);
4167 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4169 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4171 /* If we don't have a constant length for the first, use the length
4172 of the second, if we know it. We don't require a constant for
4173 this case; some cost analysis could be done if both are available
4174 but neither is constant. For now, assume they're equally cheap,
4175 unless one has side effects. If both strings have constant lengths,
4182 else if (TREE_SIDE_EFFECTS (len1
))
4184 else if (TREE_SIDE_EFFECTS (len2
))
4186 else if (TREE_CODE (len1
) != INTEGER_CST
)
4188 else if (TREE_CODE (len2
) != INTEGER_CST
)
4190 else if (tree_int_cst_lt (len1
, len2
))
4195 /* If both arguments have side effects, we cannot optimize. */
4196 if (!len
|| TREE_SIDE_EFFECTS (len
))
4199 /* The actual new length parameter is MIN(len,arg3). */
4200 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
4201 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
4203 /* If we don't have POINTER_TYPE, call the function. */
4204 if (arg1_align
== 0 || arg2_align
== 0)
4207 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4208 arg1
= builtin_save_expr (arg1
);
4209 arg2
= builtin_save_expr (arg2
);
4210 len
= builtin_save_expr (len
);
4212 arg1_rtx
= get_memory_rtx (arg1
, len
);
4213 arg2_rtx
= get_memory_rtx (arg2
, len
);
4214 arg3_rtx
= expand_normal (len
);
4215 result
= expand_cmpstrn_or_cmpmem (cmpstrn_icode
, target
, arg1_rtx
,
4216 arg2_rtx
, TREE_TYPE (len
), arg3_rtx
,
4217 MIN (arg1_align
, arg2_align
));
4220 /* Return the value in the proper mode for this function. */
4221 mode
= TYPE_MODE (TREE_TYPE (exp
));
4222 if (GET_MODE (result
) == mode
)
4225 return convert_to_mode (mode
, result
, 0);
4226 convert_move (target
, result
, 0);
4230 /* Expand the library call ourselves using a stabilized argument
4231 list to avoid re-evaluating the function's arguments twice. */
4232 fndecl
= get_callee_fndecl (exp
);
4233 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4235 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4236 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4237 return expand_call (fn
, target
, target
== const0_rtx
);
4242 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4243 if that's convenient. */
4246 expand_builtin_saveregs (void)
4251 /* Don't do __builtin_saveregs more than once in a function.
4252 Save the result of the first call and reuse it. */
4253 if (saveregs_value
!= 0)
4254 return saveregs_value
;
4256 /* When this function is called, it means that registers must be
4257 saved on entry to this function. So we migrate the call to the
4258 first insn of this function. */
4262 /* Do whatever the machine needs done in this case. */
4263 val
= targetm
.calls
.expand_builtin_saveregs ();
4268 saveregs_value
= val
;
4270 /* Put the insns after the NOTE that starts the function. If this
4271 is inside a start_sequence, make the outer-level insn chain current, so
4272 the code is placed at the start of the function. */
4273 push_topmost_sequence ();
4274 emit_insn_after (seq
, entry_of_function ());
4275 pop_topmost_sequence ();
4280 /* Expand a call to __builtin_next_arg. */
4283 expand_builtin_next_arg (void)
4285 /* Checking arguments is already done in fold_builtin_next_arg
4286 that must be called before this function. */
4287 return expand_binop (ptr_mode
, add_optab
,
4288 crtl
->args
.internal_arg_pointer
,
4289 crtl
->args
.arg_offset_rtx
,
4290 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4293 /* Make it easier for the backends by protecting the valist argument
4294 from multiple evaluations. */
4297 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4299 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4301 /* The current way of determining the type of valist is completely
4302 bogus. We should have the information on the va builtin instead. */
4304 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4306 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4308 if (TREE_SIDE_EFFECTS (valist
))
4309 valist
= save_expr (valist
);
4311 /* For this case, the backends will be expecting a pointer to
4312 vatype, but it's possible we've actually been given an array
4313 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4315 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4317 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4318 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4323 tree pt
= build_pointer_type (vatype
);
4327 if (! TREE_SIDE_EFFECTS (valist
))
4330 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4331 TREE_SIDE_EFFECTS (valist
) = 1;
4334 if (TREE_SIDE_EFFECTS (valist
))
4335 valist
= save_expr (valist
);
4336 valist
= fold_build2_loc (loc
, MEM_REF
,
4337 vatype
, valist
, build_int_cst (pt
, 0));
4343 /* The "standard" definition of va_list is void*. */
4346 std_build_builtin_va_list (void)
4348 return ptr_type_node
;
4351 /* The "standard" abi va_list is va_list_type_node. */
4354 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4356 return va_list_type_node
;
4359 /* The "standard" type of va_list is va_list_type_node. */
4362 std_canonical_va_list_type (tree type
)
4366 if (INDIRECT_REF_P (type
))
4367 type
= TREE_TYPE (type
);
4368 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE (type
)))
4369 type
= TREE_TYPE (type
);
4370 wtype
= va_list_type_node
;
4372 /* Treat structure va_list types. */
4373 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4374 htype
= TREE_TYPE (htype
);
4375 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4377 /* If va_list is an array type, the argument may have decayed
4378 to a pointer type, e.g. by being passed to another function.
4379 In that case, unwrap both types so that we can compare the
4380 underlying records. */
4381 if (TREE_CODE (htype
) == ARRAY_TYPE
4382 || POINTER_TYPE_P (htype
))
4384 wtype
= TREE_TYPE (wtype
);
4385 htype
= TREE_TYPE (htype
);
4388 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4389 return va_list_type_node
;
4394 /* The "standard" implementation of va_start: just assign `nextarg' to
4398 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4400 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4401 convert_move (va_r
, nextarg
, 0);
4403 /* We do not have any valid bounds for the pointer, so
4404 just store zero bounds for it. */
4405 if (chkp_function_instrumented_p (current_function_decl
))
4406 chkp_expand_bounds_reset_for_mem (valist
,
4407 make_tree (TREE_TYPE (valist
),
4411 /* Expand EXP, a call to __builtin_va_start. */
4414 expand_builtin_va_start (tree exp
)
4418 location_t loc
= EXPR_LOCATION (exp
);
4420 if (call_expr_nargs (exp
) < 2)
4422 error_at (loc
, "too few arguments to function %<va_start%>");
4426 if (fold_builtin_next_arg (exp
, true))
4429 nextarg
= expand_builtin_next_arg ();
4430 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4432 if (targetm
.expand_builtin_va_start
)
4433 targetm
.expand_builtin_va_start (valist
, nextarg
);
4435 std_expand_builtin_va_start (valist
, nextarg
);
4440 /* Expand EXP, a call to __builtin_va_end. */
4443 expand_builtin_va_end (tree exp
)
4445 tree valist
= CALL_EXPR_ARG (exp
, 0);
4447 /* Evaluate for side effects, if needed. I hate macros that don't
4449 if (TREE_SIDE_EFFECTS (valist
))
4450 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4455 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4456 builtin rather than just as an assignment in stdarg.h because of the
4457 nastiness of array-type va_list types. */
4460 expand_builtin_va_copy (tree exp
)
4463 location_t loc
= EXPR_LOCATION (exp
);
4465 dst
= CALL_EXPR_ARG (exp
, 0);
4466 src
= CALL_EXPR_ARG (exp
, 1);
4468 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4469 src
= stabilize_va_list_loc (loc
, src
, 0);
4471 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4473 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4475 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4476 TREE_SIDE_EFFECTS (t
) = 1;
4477 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4481 rtx dstb
, srcb
, size
;
4483 /* Evaluate to pointers. */
4484 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4485 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4486 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4487 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4489 dstb
= convert_memory_address (Pmode
, dstb
);
4490 srcb
= convert_memory_address (Pmode
, srcb
);
4492 /* "Dereference" to BLKmode memories. */
4493 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4494 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4495 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4496 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4497 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4498 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4501 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4507 /* Expand a call to one of the builtin functions __builtin_frame_address or
4508 __builtin_return_address. */
4511 expand_builtin_frame_address (tree fndecl
, tree exp
)
4513 /* The argument must be a nonnegative integer constant.
4514 It counts the number of frames to scan up the stack.
4515 The value is either the frame pointer value or the return
4516 address saved in that frame. */
4517 if (call_expr_nargs (exp
) == 0)
4518 /* Warning about missing arg was already issued. */
4520 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
4522 error ("invalid argument to %qD", fndecl
);
4527 /* Number of frames to scan up the stack. */
4528 unsigned HOST_WIDE_INT count
= tree_to_uhwi (CALL_EXPR_ARG (exp
, 0));
4530 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
), count
);
4532 /* Some ports cannot access arbitrary stack frames. */
4535 warning (0, "unsupported argument to %qD", fndecl
);
4541 /* Warn since no effort is made to ensure that any frame
4542 beyond the current one exists or can be safely reached. */
4543 warning (OPT_Wframe_address
, "calling %qD with "
4544 "a nonzero argument is unsafe", fndecl
);
4547 /* For __builtin_frame_address, return what we've got. */
4548 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4552 && ! CONSTANT_P (tem
))
4553 tem
= copy_addr_to_reg (tem
);
4558 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4559 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4560 is the same as for allocate_dynamic_stack_space. */
4563 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
4569 bool alloca_with_align
= (DECL_FUNCTION_CODE (get_callee_fndecl (exp
))
4570 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4573 = (alloca_with_align
4574 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4575 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4580 /* Compute the argument. */
4581 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4583 /* Compute the alignment. */
4584 align
= (alloca_with_align
4585 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1))
4586 : BIGGEST_ALIGNMENT
);
4588 /* Allocate the desired space. */
4589 result
= allocate_dynamic_stack_space (op0
, 0, align
, cannot_accumulate
);
4590 result
= convert_memory_address (ptr_mode
, result
);
4595 /* Expand a call to bswap builtin in EXP.
4596 Return NULL_RTX if a normal call should be emitted rather than expanding the
4597 function in-line. If convenient, the result should be placed in TARGET.
4598 SUBTARGET may be used as the target for computing one of EXP's operands. */
4601 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
4607 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4610 arg
= CALL_EXPR_ARG (exp
, 0);
4611 op0
= expand_expr (arg
,
4612 subtarget
&& GET_MODE (subtarget
) == target_mode
4613 ? subtarget
: NULL_RTX
,
4614 target_mode
, EXPAND_NORMAL
);
4615 if (GET_MODE (op0
) != target_mode
)
4616 op0
= convert_to_mode (target_mode
, op0
, 1);
4618 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4620 gcc_assert (target
);
4622 return convert_to_mode (target_mode
, target
, 1);
4625 /* Expand a call to a unary builtin in EXP.
4626 Return NULL_RTX if a normal call should be emitted rather than expanding the
4627 function in-line. If convenient, the result should be placed in TARGET.
4628 SUBTARGET may be used as the target for computing one of EXP's operands. */
4631 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
4632 rtx subtarget
, optab op_optab
)
4636 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4639 /* Compute the argument. */
4640 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4642 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4643 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4644 VOIDmode
, EXPAND_NORMAL
);
4645 /* Compute op, into TARGET if possible.
4646 Set TARGET to wherever the result comes back. */
4647 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4648 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4649 gcc_assert (target
);
4651 return convert_to_mode (target_mode
, target
, 0);
4654 /* Expand a call to __builtin_expect. We just return our argument
4655 as the builtin_expect semantic should've been already executed by
4656 tree branch prediction pass. */
4659 expand_builtin_expect (tree exp
, rtx target
)
4663 if (call_expr_nargs (exp
) < 2)
4665 arg
= CALL_EXPR_ARG (exp
, 0);
4667 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4668 /* When guessing was done, the hints should be already stripped away. */
4669 gcc_assert (!flag_guess_branch_prob
4670 || optimize
== 0 || seen_error ());
4674 /* Expand a call to __builtin_assume_aligned. We just return our first
4675 argument as the builtin_assume_aligned semantic should've been already
4679 expand_builtin_assume_aligned (tree exp
, rtx target
)
4681 if (call_expr_nargs (exp
) < 2)
4683 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
4685 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
4686 && (call_expr_nargs (exp
) < 3
4687 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
4692 expand_builtin_trap (void)
4694 if (targetm
.have_trap ())
4696 rtx_insn
*insn
= emit_insn (targetm
.gen_trap ());
4697 /* For trap insns when not accumulating outgoing args force
4698 REG_ARGS_SIZE note to prevent crossjumping of calls with
4699 different args sizes. */
4700 if (!ACCUMULATE_OUTGOING_ARGS
)
4701 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4704 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4708 /* Expand a call to __builtin_unreachable. We do nothing except emit
4709 a barrier saying that control flow will not pass here.
4711 It is the responsibility of the program being compiled to ensure
4712 that control flow does never reach __builtin_unreachable. */
4714 expand_builtin_unreachable (void)
4719 /* Expand EXP, a call to fabs, fabsf or fabsl.
4720 Return NULL_RTX if a normal call should be emitted rather than expanding
4721 the function inline. If convenient, the result should be placed
4722 in TARGET. SUBTARGET may be used as the target for computing
4726 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
4732 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4735 arg
= CALL_EXPR_ARG (exp
, 0);
4736 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
4737 mode
= TYPE_MODE (TREE_TYPE (arg
));
4738 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4739 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4742 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4743 Return NULL is a normal call should be emitted rather than expanding the
4744 function inline. If convenient, the result should be placed in TARGET.
4745 SUBTARGET may be used as the target for computing the operand. */
4748 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
4753 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4756 arg
= CALL_EXPR_ARG (exp
, 0);
4757 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4759 arg
= CALL_EXPR_ARG (exp
, 1);
4760 op1
= expand_normal (arg
);
4762 return expand_copysign (op0
, op1
, target
);
4765 /* Expand a call to __builtin___clear_cache. */
4768 expand_builtin___clear_cache (tree exp
)
4770 if (!targetm
.code_for_clear_cache
)
4772 #ifdef CLEAR_INSN_CACHE
4773 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4774 does something. Just do the default expansion to a call to
4778 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4779 does nothing. There is no need to call it. Do nothing. */
4781 #endif /* CLEAR_INSN_CACHE */
4784 /* We have a "clear_cache" insn, and it will handle everything. */
4786 rtx begin_rtx
, end_rtx
;
4788 /* We must not expand to a library call. If we did, any
4789 fallback library function in libgcc that might contain a call to
4790 __builtin___clear_cache() would recurse infinitely. */
4791 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4793 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4797 if (targetm
.have_clear_cache ())
4799 struct expand_operand ops
[2];
4801 begin
= CALL_EXPR_ARG (exp
, 0);
4802 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4804 end
= CALL_EXPR_ARG (exp
, 1);
4805 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4807 create_address_operand (&ops
[0], begin_rtx
);
4808 create_address_operand (&ops
[1], end_rtx
);
4809 if (maybe_expand_insn (targetm
.code_for_clear_cache
, 2, ops
))
4815 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4818 round_trampoline_addr (rtx tramp
)
4820 rtx temp
, addend
, mask
;
4822 /* If we don't need too much alignment, we'll have been guaranteed
4823 proper alignment by get_trampoline_type. */
4824 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
4827 /* Round address up to desired boundary. */
4828 temp
= gen_reg_rtx (Pmode
);
4829 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
4830 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
4832 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
4833 temp
, 0, OPTAB_LIB_WIDEN
);
4834 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
4835 temp
, 0, OPTAB_LIB_WIDEN
);
4841 expand_builtin_init_trampoline (tree exp
, bool onstack
)
4843 tree t_tramp
, t_func
, t_chain
;
4844 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
4846 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
4847 POINTER_TYPE
, VOID_TYPE
))
4850 t_tramp
= CALL_EXPR_ARG (exp
, 0);
4851 t_func
= CALL_EXPR_ARG (exp
, 1);
4852 t_chain
= CALL_EXPR_ARG (exp
, 2);
4854 r_tramp
= expand_normal (t_tramp
);
4855 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
4856 MEM_NOTRAP_P (m_tramp
) = 1;
4858 /* If ONSTACK, the TRAMP argument should be the address of a field
4859 within the local function's FRAME decl. Either way, let's see if
4860 we can fill in the MEM_ATTRs for this memory. */
4861 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
4862 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
4864 /* Creator of a heap trampoline is responsible for making sure the
4865 address is aligned to at least STACK_BOUNDARY. Normally malloc
4866 will ensure this anyhow. */
4867 tmp
= round_trampoline_addr (r_tramp
);
4870 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
4871 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
4872 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
4875 /* The FUNC argument should be the address of the nested function.
4876 Extract the actual function decl to pass to the hook. */
4877 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
4878 t_func
= TREE_OPERAND (t_func
, 0);
4879 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
4881 r_chain
= expand_normal (t_chain
);
4883 /* Generate insns to initialize the trampoline. */
4884 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
4888 trampolines_created
= 1;
4890 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
4891 "trampoline generated for nested function %qD", t_func
);
4898 expand_builtin_adjust_trampoline (tree exp
)
4902 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4905 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4906 tramp
= round_trampoline_addr (tramp
);
4907 if (targetm
.calls
.trampoline_adjust_address
)
4908 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
4913 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4914 function. The function first checks whether the back end provides
4915 an insn to implement signbit for the respective mode. If not, it
4916 checks whether the floating point format of the value is such that
4917 the sign bit can be extracted. If that is not the case, error out.
4918 EXP is the expression that is a call to the builtin function; if
4919 convenient, the result should be placed in TARGET. */
4921 expand_builtin_signbit (tree exp
, rtx target
)
4923 const struct real_format
*fmt
;
4924 machine_mode fmode
, imode
, rmode
;
4927 enum insn_code icode
;
4929 location_t loc
= EXPR_LOCATION (exp
);
4931 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4934 arg
= CALL_EXPR_ARG (exp
, 0);
4935 fmode
= TYPE_MODE (TREE_TYPE (arg
));
4936 rmode
= TYPE_MODE (TREE_TYPE (exp
));
4937 fmt
= REAL_MODE_FORMAT (fmode
);
4939 arg
= builtin_save_expr (arg
);
4941 /* Expand the argument yielding a RTX expression. */
4942 temp
= expand_normal (arg
);
4944 /* Check if the back end provides an insn that handles signbit for the
4946 icode
= optab_handler (signbit_optab
, fmode
);
4947 if (icode
!= CODE_FOR_nothing
)
4949 rtx_insn
*last
= get_last_insn ();
4950 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
4951 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
4953 delete_insns_since (last
);
4956 /* For floating point formats without a sign bit, implement signbit
4958 bitpos
= fmt
->signbit_ro
;
4961 /* But we can't do this if the format supports signed zero. */
4962 gcc_assert (!fmt
->has_signed_zero
|| !HONOR_SIGNED_ZEROS (fmode
));
4964 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
4965 build_real (TREE_TYPE (arg
), dconst0
));
4966 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4969 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
4971 imode
= int_mode_for_mode (fmode
);
4972 gcc_assert (imode
!= BLKmode
);
4973 temp
= gen_lowpart (imode
, temp
);
4978 /* Handle targets with different FP word orders. */
4979 if (FLOAT_WORDS_BIG_ENDIAN
)
4980 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
4982 word
= bitpos
/ BITS_PER_WORD
;
4983 temp
= operand_subword_force (temp
, word
, fmode
);
4984 bitpos
= bitpos
% BITS_PER_WORD
;
4987 /* Force the intermediate word_mode (or narrower) result into a
4988 register. This avoids attempting to create paradoxical SUBREGs
4989 of floating point modes below. */
4990 temp
= force_reg (imode
, temp
);
4992 /* If the bitpos is within the "result mode" lowpart, the operation
4993 can be implement with a single bitwise AND. Otherwise, we need
4994 a right shift and an AND. */
4996 if (bitpos
< GET_MODE_BITSIZE (rmode
))
4998 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
5000 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5001 temp
= gen_lowpart (rmode
, temp
);
5002 temp
= expand_binop (rmode
, and_optab
, temp
,
5003 immed_wide_int_const (mask
, rmode
),
5004 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5008 /* Perform a logical right shift to place the signbit in the least
5009 significant bit, then truncate the result to the desired mode
5010 and mask just this bit. */
5011 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5012 temp
= gen_lowpart (rmode
, temp
);
5013 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5014 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5020 /* Expand fork or exec calls. TARGET is the desired target of the
5021 call. EXP is the call. FN is the
5022 identificator of the actual function. IGNORE is nonzero if the
5023 value is to be ignored. */
5026 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5031 /* If we are not profiling, just call the function. */
5032 if (!profile_arc_flag
)
5035 /* Otherwise call the wrapper. This should be equivalent for the rest of
5036 compiler, so the code does not diverge, and the wrapper may run the
5037 code necessary for keeping the profiling sane. */
5039 switch (DECL_FUNCTION_CODE (fn
))
5042 id
= get_identifier ("__gcov_fork");
5045 case BUILT_IN_EXECL
:
5046 id
= get_identifier ("__gcov_execl");
5049 case BUILT_IN_EXECV
:
5050 id
= get_identifier ("__gcov_execv");
5053 case BUILT_IN_EXECLP
:
5054 id
= get_identifier ("__gcov_execlp");
5057 case BUILT_IN_EXECLE
:
5058 id
= get_identifier ("__gcov_execle");
5061 case BUILT_IN_EXECVP
:
5062 id
= get_identifier ("__gcov_execvp");
5065 case BUILT_IN_EXECVE
:
5066 id
= get_identifier ("__gcov_execve");
5073 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5074 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5075 DECL_EXTERNAL (decl
) = 1;
5076 TREE_PUBLIC (decl
) = 1;
5077 DECL_ARTIFICIAL (decl
) = 1;
5078 TREE_NOTHROW (decl
) = 1;
5079 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5080 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5081 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5082 return expand_call (call
, target
, ignore
);
5087 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5088 the pointer in these functions is void*, the tree optimizers may remove
5089 casts. The mode computed in expand_builtin isn't reliable either, due
5090 to __sync_bool_compare_and_swap.
5092 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5093 group of builtins. This gives us log2 of the mode size. */
5095 static inline machine_mode
5096 get_builtin_sync_mode (int fcode_diff
)
5098 /* The size is not negotiable, so ask not to get BLKmode in return
5099 if the target indicates that a smaller size would be better. */
5100 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5103 /* Expand the memory expression LOC and return the appropriate memory operand
5104 for the builtin_sync operations. */
5107 get_builtin_sync_mem (tree loc
, machine_mode mode
)
5111 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5112 addr
= convert_memory_address (Pmode
, addr
);
5114 /* Note that we explicitly do not want any alias information for this
5115 memory, so that we kill all other live memories. Otherwise we don't
5116 satisfy the full barrier semantics of the intrinsic. */
5117 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5119 /* The alignment needs to be at least according to that of the mode. */
5120 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5121 get_pointer_alignment (loc
)));
5122 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5123 MEM_VOLATILE_P (mem
) = 1;
5128 /* Make sure an argument is in the right mode.
5129 EXP is the tree argument.
5130 MODE is the mode it should be in. */
5133 expand_expr_force_mode (tree exp
, machine_mode mode
)
5136 machine_mode old_mode
;
5138 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5139 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5140 of CONST_INTs, where we know the old_mode only from the call argument. */
5142 old_mode
= GET_MODE (val
);
5143 if (old_mode
== VOIDmode
)
5144 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5145 val
= convert_modes (mode
, old_mode
, val
, 1);
5150 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5151 EXP is the CALL_EXPR. CODE is the rtx code
5152 that corresponds to the arithmetic or logical operation from the name;
5153 an exception here is that NOT actually means NAND. TARGET is an optional
5154 place for us to store the results; AFTER is true if this is the
5155 fetch_and_xxx form. */
5158 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
5159 enum rtx_code code
, bool after
,
5163 location_t loc
= EXPR_LOCATION (exp
);
5165 if (code
== NOT
&& warn_sync_nand
)
5167 tree fndecl
= get_callee_fndecl (exp
);
5168 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5170 static bool warned_f_a_n
, warned_n_a_f
;
5174 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5175 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5176 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5177 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5178 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5182 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5183 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5184 warned_f_a_n
= true;
5187 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5188 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5189 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5190 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5191 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5195 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5196 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5197 warned_n_a_f
= true;
5205 /* Expand the operands. */
5206 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5207 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5209 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
5213 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5214 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5215 true if this is the boolean form. TARGET is a place for us to store the
5216 results; this is NOT optional if IS_BOOL is true. */
5219 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
5220 bool is_bool
, rtx target
)
5222 rtx old_val
, new_val
, mem
;
5225 /* Expand the operands. */
5226 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5227 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5228 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5230 pbool
= poval
= NULL
;
5231 if (target
!= const0_rtx
)
5238 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5239 false, MEMMODEL_SYNC_SEQ_CST
,
5240 MEMMODEL_SYNC_SEQ_CST
))
5246 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5247 general form is actually an atomic exchange, and some targets only
5248 support a reduced form with the second argument being a constant 1.
5249 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5253 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
5258 /* Expand the operands. */
5259 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5260 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5262 return expand_sync_lock_test_and_set (target
, mem
, val
);
5265 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5268 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
5272 /* Expand the operands. */
5273 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5275 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
5278 /* Given an integer representing an ``enum memmodel'', verify its
5279 correctness and return the memory model enum. */
5281 static enum memmodel
5282 get_memmodel (tree exp
)
5285 unsigned HOST_WIDE_INT val
;
5287 /* If the parameter is not a constant, it's a run time value so we'll just
5288 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5289 if (TREE_CODE (exp
) != INTEGER_CST
)
5290 return MEMMODEL_SEQ_CST
;
5292 op
= expand_normal (exp
);
5295 if (targetm
.memmodel_check
)
5296 val
= targetm
.memmodel_check (val
);
5297 else if (val
& ~MEMMODEL_MASK
)
5299 warning (OPT_Winvalid_memory_model
,
5300 "Unknown architecture specifier in memory model to builtin.");
5301 return MEMMODEL_SEQ_CST
;
5304 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5305 if (memmodel_base (val
) >= MEMMODEL_LAST
)
5307 warning (OPT_Winvalid_memory_model
,
5308 "invalid memory model argument to builtin");
5309 return MEMMODEL_SEQ_CST
;
5312 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5313 be conservative and promote consume to acquire. */
5314 if (val
== MEMMODEL_CONSUME
)
5315 val
= MEMMODEL_ACQUIRE
;
5317 return (enum memmodel
) val
;
5320 /* Expand the __atomic_exchange intrinsic:
5321 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5322 EXP is the CALL_EXPR.
5323 TARGET is an optional place for us to store the results. */
5326 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
5329 enum memmodel model
;
5331 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5333 if (!flag_inline_atomics
)
5336 /* Expand the operands. */
5337 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5338 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5340 return expand_atomic_exchange (target
, mem
, val
, model
);
5343 /* Expand the __atomic_compare_exchange intrinsic:
5344 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5345 TYPE desired, BOOL weak,
5346 enum memmodel success,
5347 enum memmodel failure)
5348 EXP is the CALL_EXPR.
5349 TARGET is an optional place for us to store the results. */
5352 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
5355 rtx expect
, desired
, mem
, oldval
;
5356 rtx_code_label
*label
;
5357 enum memmodel success
, failure
;
5361 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5362 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5364 if (failure
> success
)
5366 warning (OPT_Winvalid_memory_model
,
5367 "failure memory model cannot be stronger than success memory "
5368 "model for %<__atomic_compare_exchange%>");
5369 success
= MEMMODEL_SEQ_CST
;
5372 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
5374 warning (OPT_Winvalid_memory_model
,
5375 "invalid failure memory model for "
5376 "%<__atomic_compare_exchange%>");
5377 failure
= MEMMODEL_SEQ_CST
;
5378 success
= MEMMODEL_SEQ_CST
;
5382 if (!flag_inline_atomics
)
5385 /* Expand the operands. */
5386 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5388 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5389 expect
= convert_memory_address (Pmode
, expect
);
5390 expect
= gen_rtx_MEM (mode
, expect
);
5391 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5393 weak
= CALL_EXPR_ARG (exp
, 3);
5395 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5398 if (target
== const0_rtx
)
5401 /* Lest the rtl backend create a race condition with an imporoper store
5402 to memory, always create a new pseudo for OLDVAL. */
5405 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
5406 is_weak
, success
, failure
))
5409 /* Conditionally store back to EXPECT, lest we create a race condition
5410 with an improper store to memory. */
5411 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5412 the normal case where EXPECT is totally private, i.e. a register. At
5413 which point the store can be unconditional. */
5414 label
= gen_label_rtx ();
5415 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
,
5416 GET_MODE (target
), 1, label
);
5417 emit_move_insn (expect
, oldval
);
5423 /* Expand the __atomic_load intrinsic:
5424 TYPE __atomic_load (TYPE *object, enum memmodel)
5425 EXP is the CALL_EXPR.
5426 TARGET is an optional place for us to store the results. */
5429 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
5432 enum memmodel model
;
5434 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5435 if (is_mm_release (model
) || is_mm_acq_rel (model
))
5437 warning (OPT_Winvalid_memory_model
,
5438 "invalid memory model for %<__atomic_load%>");
5439 model
= MEMMODEL_SEQ_CST
;
5442 if (!flag_inline_atomics
)
5445 /* Expand the operand. */
5446 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5448 return expand_atomic_load (target
, mem
, model
);
5452 /* Expand the __atomic_store intrinsic:
5453 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5454 EXP is the CALL_EXPR.
5455 TARGET is an optional place for us to store the results. */
5458 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
5461 enum memmodel model
;
5463 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5464 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
5465 || is_mm_release (model
)))
5467 warning (OPT_Winvalid_memory_model
,
5468 "invalid memory model for %<__atomic_store%>");
5469 model
= MEMMODEL_SEQ_CST
;
5472 if (!flag_inline_atomics
)
5475 /* Expand the operands. */
5476 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5477 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5479 return expand_atomic_store (mem
, val
, model
, false);
5482 /* Expand the __atomic_fetch_XXX intrinsic:
5483 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5484 EXP is the CALL_EXPR.
5485 TARGET is an optional place for us to store the results.
5486 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5487 FETCH_AFTER is true if returning the result of the operation.
5488 FETCH_AFTER is false if returning the value before the operation.
5489 IGNORE is true if the result is not used.
5490 EXT_CALL is the correct builtin for an external call if this cannot be
5491 resolved to an instruction sequence. */
5494 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
5495 enum rtx_code code
, bool fetch_after
,
5496 bool ignore
, enum built_in_function ext_call
)
5499 enum memmodel model
;
5503 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5505 /* Expand the operands. */
5506 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5507 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5509 /* Only try generating instructions if inlining is turned on. */
5510 if (flag_inline_atomics
)
5512 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
5517 /* Return if a different routine isn't needed for the library call. */
5518 if (ext_call
== BUILT_IN_NONE
)
5521 /* Change the call to the specified function. */
5522 fndecl
= get_callee_fndecl (exp
);
5523 addr
= CALL_EXPR_FN (exp
);
5526 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
5527 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
5529 /* Expand the call here so we can emit trailing code. */
5530 ret
= expand_call (exp
, target
, ignore
);
5532 /* Replace the original function just in case it matters. */
5533 TREE_OPERAND (addr
, 0) = fndecl
;
5535 /* Then issue the arithmetic correction to return the right result. */
5540 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
5542 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
5545 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
5551 /* Expand an atomic clear operation.
5552 void _atomic_clear (BOOL *obj, enum memmodel)
5553 EXP is the call expression. */
5556 expand_builtin_atomic_clear (tree exp
)
5560 enum memmodel model
;
5562 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5563 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5564 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5566 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
5568 warning (OPT_Winvalid_memory_model
,
5569 "invalid memory model for %<__atomic_store%>");
5570 model
= MEMMODEL_SEQ_CST
;
5573 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5574 Failing that, a store is issued by __atomic_store. The only way this can
5575 fail is if the bool type is larger than a word size. Unlikely, but
5576 handle it anyway for completeness. Assume a single threaded model since
5577 there is no atomic support in this case, and no barriers are required. */
5578 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
5580 emit_move_insn (mem
, const0_rtx
);
5584 /* Expand an atomic test_and_set operation.
5585 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5586 EXP is the call expression. */
5589 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
5592 enum memmodel model
;
5595 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5596 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5597 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5599 return expand_atomic_test_and_set (target
, mem
, model
);
5603 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5604 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5607 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
5611 unsigned int mode_align
, type_align
;
5613 if (TREE_CODE (arg0
) != INTEGER_CST
)
5616 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
5617 mode
= mode_for_size (size
, MODE_INT
, 0);
5618 mode_align
= GET_MODE_ALIGNMENT (mode
);
5620 if (TREE_CODE (arg1
) == INTEGER_CST
)
5622 unsigned HOST_WIDE_INT val
= UINTVAL (expand_normal (arg1
));
5624 /* Either this argument is null, or it's a fake pointer encoding
5625 the alignment of the object. */
5627 val
*= BITS_PER_UNIT
;
5629 if (val
== 0 || mode_align
< val
)
5630 type_align
= mode_align
;
5636 tree ttype
= TREE_TYPE (arg1
);
5638 /* This function is usually invoked and folded immediately by the front
5639 end before anything else has a chance to look at it. The pointer
5640 parameter at this point is usually cast to a void *, so check for that
5641 and look past the cast. */
5642 if (CONVERT_EXPR_P (arg1
) && POINTER_TYPE_P (ttype
)
5643 && VOID_TYPE_P (TREE_TYPE (ttype
)))
5644 arg1
= TREE_OPERAND (arg1
, 0);
5646 ttype
= TREE_TYPE (arg1
);
5647 gcc_assert (POINTER_TYPE_P (ttype
));
5649 /* Get the underlying type of the object. */
5650 ttype
= TREE_TYPE (ttype
);
5651 type_align
= TYPE_ALIGN (ttype
);
5654 /* If the object has smaller alignment, the lock free routines cannot
5656 if (type_align
< mode_align
)
5657 return boolean_false_node
;
5659 /* Check if a compare_and_swap pattern exists for the mode which represents
5660 the required size. The pattern is not allowed to fail, so the existence
5661 of the pattern indicates support is present. */
5662 if (can_compare_and_swap_p (mode
, true))
5663 return boolean_true_node
;
5665 return boolean_false_node
;
5668 /* Return true if the parameters to call EXP represent an object which will
5669 always generate lock free instructions. The first argument represents the
5670 size of the object, and the second parameter is a pointer to the object
5671 itself. If NULL is passed for the object, then the result is based on
5672 typical alignment for an object of the specified size. Otherwise return
5676 expand_builtin_atomic_always_lock_free (tree exp
)
5679 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5680 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5682 if (TREE_CODE (arg0
) != INTEGER_CST
)
5684 error ("non-constant argument 1 to __atomic_always_lock_free");
5688 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
5689 if (size
== boolean_true_node
)
5694 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5695 is lock free on this architecture. */
5698 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
5700 if (!flag_inline_atomics
)
5703 /* If it isn't always lock free, don't generate a result. */
5704 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
5705 return boolean_true_node
;
5710 /* Return true if the parameters to call EXP represent an object which will
5711 always generate lock free instructions. The first argument represents the
5712 size of the object, and the second parameter is a pointer to the object
5713 itself. If NULL is passed for the object, then the result is based on
5714 typical alignment for an object of the specified size. Otherwise return
5718 expand_builtin_atomic_is_lock_free (tree exp
)
5721 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5722 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5724 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5726 error ("non-integer argument 1 to __atomic_is_lock_free");
5730 if (!flag_inline_atomics
)
5733 /* If the value is known at compile time, return the RTX for it. */
5734 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
5735 if (size
== boolean_true_node
)
5741 /* Expand the __atomic_thread_fence intrinsic:
5742 void __atomic_thread_fence (enum memmodel)
5743 EXP is the CALL_EXPR. */
5746 expand_builtin_atomic_thread_fence (tree exp
)
5748 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5749 expand_mem_thread_fence (model
);
5752 /* Expand the __atomic_signal_fence intrinsic:
5753 void __atomic_signal_fence (enum memmodel)
5754 EXP is the CALL_EXPR. */
5757 expand_builtin_atomic_signal_fence (tree exp
)
5759 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5760 expand_mem_signal_fence (model
);
5763 /* Expand the __sync_synchronize intrinsic. */
5766 expand_builtin_sync_synchronize (void)
5768 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
5772 expand_builtin_thread_pointer (tree exp
, rtx target
)
5774 enum insn_code icode
;
5775 if (!validate_arglist (exp
, VOID_TYPE
))
5777 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
5778 if (icode
!= CODE_FOR_nothing
)
5780 struct expand_operand op
;
5781 /* If the target is not sutitable then create a new target. */
5782 if (target
== NULL_RTX
5784 || GET_MODE (target
) != Pmode
)
5785 target
= gen_reg_rtx (Pmode
);
5786 create_output_operand (&op
, target
, Pmode
);
5787 expand_insn (icode
, 1, &op
);
5790 error ("__builtin_thread_pointer is not supported on this target");
5795 expand_builtin_set_thread_pointer (tree exp
)
5797 enum insn_code icode
;
5798 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5800 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
5801 if (icode
!= CODE_FOR_nothing
)
5803 struct expand_operand op
;
5804 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
5805 Pmode
, EXPAND_NORMAL
);
5806 create_input_operand (&op
, val
, Pmode
);
5807 expand_insn (icode
, 1, &op
);
5810 error ("__builtin_set_thread_pointer is not supported on this target");
5814 /* Emit code to restore the current value of stack. */
5817 expand_stack_restore (tree var
)
5820 rtx sa
= expand_normal (var
);
5822 sa
= convert_memory_address (Pmode
, sa
);
5824 prev
= get_last_insn ();
5825 emit_stack_restore (SAVE_BLOCK
, sa
);
5827 record_new_stack_level ();
5829 fixup_args_size_notes (prev
, get_last_insn (), 0);
5832 /* Emit code to save the current value of stack. */
5835 expand_stack_save (void)
5839 emit_stack_save (SAVE_BLOCK
, &ret
);
5844 /* Expand an expression EXP that calls a built-in function,
5845 with result going to TARGET if that's convenient
5846 (and in mode MODE if that's convenient).
5847 SUBTARGET may be used as the target for computing one of EXP's operands.
5848 IGNORE is nonzero if the value is to be ignored. */
5851 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
5854 tree fndecl
= get_callee_fndecl (exp
);
5855 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5856 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5859 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5860 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5862 /* When ASan is enabled, we don't want to expand some memory/string
5863 builtins and rely on libsanitizer's hooks. This allows us to avoid
5864 redundant checks and be sure, that possible overflow will be detected
5867 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
5868 return expand_call (exp
, target
, ignore
);
5870 /* When not optimizing, generate calls to library functions for a certain
5873 && !called_as_built_in (fndecl
)
5874 && fcode
!= BUILT_IN_FORK
5875 && fcode
!= BUILT_IN_EXECL
5876 && fcode
!= BUILT_IN_EXECV
5877 && fcode
!= BUILT_IN_EXECLP
5878 && fcode
!= BUILT_IN_EXECLE
5879 && fcode
!= BUILT_IN_EXECVP
5880 && fcode
!= BUILT_IN_EXECVE
5881 && fcode
!= BUILT_IN_ALLOCA
5882 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
5883 && fcode
!= BUILT_IN_FREE
5884 && fcode
!= BUILT_IN_CHKP_SET_PTR_BOUNDS
5885 && fcode
!= BUILT_IN_CHKP_INIT_PTR_BOUNDS
5886 && fcode
!= BUILT_IN_CHKP_NULL_PTR_BOUNDS
5887 && fcode
!= BUILT_IN_CHKP_COPY_PTR_BOUNDS
5888 && fcode
!= BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5889 && fcode
!= BUILT_IN_CHKP_STORE_PTR_BOUNDS
5890 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5891 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5892 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5893 && fcode
!= BUILT_IN_CHKP_GET_PTR_LBOUND
5894 && fcode
!= BUILT_IN_CHKP_GET_PTR_UBOUND
5895 && fcode
!= BUILT_IN_CHKP_BNDRET
)
5896 return expand_call (exp
, target
, ignore
);
5898 /* The built-in function expanders test for target == const0_rtx
5899 to determine whether the function's result will be ignored. */
5901 target
= const0_rtx
;
5903 /* If the result of a pure or const built-in function is ignored, and
5904 none of its arguments are volatile, we can avoid expanding the
5905 built-in call and just evaluate the arguments for side-effects. */
5906 if (target
== const0_rtx
5907 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
5908 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
5910 bool volatilep
= false;
5912 call_expr_arg_iterator iter
;
5914 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5915 if (TREE_THIS_VOLATILE (arg
))
5923 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5924 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5929 /* expand_builtin_with_bounds is supposed to be used for
5930 instrumented builtin calls. */
5931 gcc_assert (!CALL_WITH_BOUNDS_P (exp
));
5935 CASE_FLT_FN (BUILT_IN_FABS
):
5936 case BUILT_IN_FABSD32
:
5937 case BUILT_IN_FABSD64
:
5938 case BUILT_IN_FABSD128
:
5939 target
= expand_builtin_fabs (exp
, target
, subtarget
);
5944 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
5945 target
= expand_builtin_copysign (exp
, target
, subtarget
);
5950 /* Just do a normal library call if we were unable to fold
5952 CASE_FLT_FN (BUILT_IN_CABS
):
5955 CASE_FLT_FN (BUILT_IN_EXP
):
5956 CASE_FLT_FN (BUILT_IN_EXP10
):
5957 CASE_FLT_FN (BUILT_IN_POW10
):
5958 CASE_FLT_FN (BUILT_IN_EXP2
):
5959 CASE_FLT_FN (BUILT_IN_EXPM1
):
5960 CASE_FLT_FN (BUILT_IN_LOGB
):
5961 CASE_FLT_FN (BUILT_IN_LOG
):
5962 CASE_FLT_FN (BUILT_IN_LOG10
):
5963 CASE_FLT_FN (BUILT_IN_LOG2
):
5964 CASE_FLT_FN (BUILT_IN_LOG1P
):
5965 CASE_FLT_FN (BUILT_IN_TAN
):
5966 CASE_FLT_FN (BUILT_IN_ASIN
):
5967 CASE_FLT_FN (BUILT_IN_ACOS
):
5968 CASE_FLT_FN (BUILT_IN_ATAN
):
5969 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
5970 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5971 because of possible accuracy problems. */
5972 if (! flag_unsafe_math_optimizations
)
5974 CASE_FLT_FN (BUILT_IN_SQRT
):
5975 CASE_FLT_FN (BUILT_IN_FLOOR
):
5976 CASE_FLT_FN (BUILT_IN_CEIL
):
5977 CASE_FLT_FN (BUILT_IN_TRUNC
):
5978 CASE_FLT_FN (BUILT_IN_ROUND
):
5979 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
5980 CASE_FLT_FN (BUILT_IN_RINT
):
5981 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
5986 CASE_FLT_FN (BUILT_IN_FMA
):
5987 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
5992 CASE_FLT_FN (BUILT_IN_ILOGB
):
5993 if (! flag_unsafe_math_optimizations
)
5995 CASE_FLT_FN (BUILT_IN_ISINF
):
5996 CASE_FLT_FN (BUILT_IN_FINITE
):
5997 case BUILT_IN_ISFINITE
:
5998 case BUILT_IN_ISNORMAL
:
5999 target
= expand_builtin_interclass_mathfn (exp
, target
);
6004 CASE_FLT_FN (BUILT_IN_ICEIL
):
6005 CASE_FLT_FN (BUILT_IN_LCEIL
):
6006 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6007 CASE_FLT_FN (BUILT_IN_LFLOOR
):
6008 CASE_FLT_FN (BUILT_IN_IFLOOR
):
6009 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6010 target
= expand_builtin_int_roundingfn (exp
, target
);
6015 CASE_FLT_FN (BUILT_IN_IRINT
):
6016 CASE_FLT_FN (BUILT_IN_LRINT
):
6017 CASE_FLT_FN (BUILT_IN_LLRINT
):
6018 CASE_FLT_FN (BUILT_IN_IROUND
):
6019 CASE_FLT_FN (BUILT_IN_LROUND
):
6020 CASE_FLT_FN (BUILT_IN_LLROUND
):
6021 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
6026 CASE_FLT_FN (BUILT_IN_POWI
):
6027 target
= expand_builtin_powi (exp
, target
);
6032 CASE_FLT_FN (BUILT_IN_ATAN2
):
6033 CASE_FLT_FN (BUILT_IN_LDEXP
):
6034 CASE_FLT_FN (BUILT_IN_SCALB
):
6035 CASE_FLT_FN (BUILT_IN_SCALBN
):
6036 CASE_FLT_FN (BUILT_IN_SCALBLN
):
6037 if (! flag_unsafe_math_optimizations
)
6040 CASE_FLT_FN (BUILT_IN_FMOD
):
6041 CASE_FLT_FN (BUILT_IN_REMAINDER
):
6042 CASE_FLT_FN (BUILT_IN_DREM
):
6043 CASE_FLT_FN (BUILT_IN_POW
):
6044 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
6049 CASE_FLT_FN (BUILT_IN_CEXPI
):
6050 target
= expand_builtin_cexpi (exp
, target
);
6051 gcc_assert (target
);
6054 CASE_FLT_FN (BUILT_IN_SIN
):
6055 CASE_FLT_FN (BUILT_IN_COS
):
6056 if (! flag_unsafe_math_optimizations
)
6058 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6063 CASE_FLT_FN (BUILT_IN_SINCOS
):
6064 if (! flag_unsafe_math_optimizations
)
6066 target
= expand_builtin_sincos (exp
);
6071 case BUILT_IN_APPLY_ARGS
:
6072 return expand_builtin_apply_args ();
6074 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6075 FUNCTION with a copy of the parameters described by
6076 ARGUMENTS, and ARGSIZE. It returns a block of memory
6077 allocated on the stack into which is stored all the registers
6078 that might possibly be used for returning the result of a
6079 function. ARGUMENTS is the value returned by
6080 __builtin_apply_args. ARGSIZE is the number of bytes of
6081 arguments that must be copied. ??? How should this value be
6082 computed? We'll also need a safe worst case value for varargs
6084 case BUILT_IN_APPLY
:
6085 if (!validate_arglist (exp
, POINTER_TYPE
,
6086 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6087 && !validate_arglist (exp
, REFERENCE_TYPE
,
6088 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6094 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6095 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6096 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6098 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6101 /* __builtin_return (RESULT) causes the function to return the
6102 value described by RESULT. RESULT is address of the block of
6103 memory returned by __builtin_apply. */
6104 case BUILT_IN_RETURN
:
6105 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6106 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6109 case BUILT_IN_SAVEREGS
:
6110 return expand_builtin_saveregs ();
6112 case BUILT_IN_VA_ARG_PACK
:
6113 /* All valid uses of __builtin_va_arg_pack () are removed during
6115 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6118 case BUILT_IN_VA_ARG_PACK_LEN
:
6119 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6121 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6124 /* Return the address of the first anonymous stack arg. */
6125 case BUILT_IN_NEXT_ARG
:
6126 if (fold_builtin_next_arg (exp
, false))
6128 return expand_builtin_next_arg ();
6130 case BUILT_IN_CLEAR_CACHE
:
6131 target
= expand_builtin___clear_cache (exp
);
6136 case BUILT_IN_CLASSIFY_TYPE
:
6137 return expand_builtin_classify_type (exp
);
6139 case BUILT_IN_CONSTANT_P
:
6142 case BUILT_IN_FRAME_ADDRESS
:
6143 case BUILT_IN_RETURN_ADDRESS
:
6144 return expand_builtin_frame_address (fndecl
, exp
);
6146 /* Returns the address of the area where the structure is returned.
6148 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6149 if (call_expr_nargs (exp
) != 0
6150 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6151 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6154 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6156 case BUILT_IN_ALLOCA
:
6157 case BUILT_IN_ALLOCA_WITH_ALIGN
:
6158 /* If the allocation stems from the declaration of a variable-sized
6159 object, it cannot accumulate. */
6160 target
= expand_builtin_alloca (exp
, CALL_ALLOCA_FOR_VAR_P (exp
));
6165 case BUILT_IN_STACK_SAVE
:
6166 return expand_stack_save ();
6168 case BUILT_IN_STACK_RESTORE
:
6169 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6172 case BUILT_IN_BSWAP16
:
6173 case BUILT_IN_BSWAP32
:
6174 case BUILT_IN_BSWAP64
:
6175 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6180 CASE_INT_FN (BUILT_IN_FFS
):
6181 target
= expand_builtin_unop (target_mode
, exp
, target
,
6182 subtarget
, ffs_optab
);
6187 CASE_INT_FN (BUILT_IN_CLZ
):
6188 target
= expand_builtin_unop (target_mode
, exp
, target
,
6189 subtarget
, clz_optab
);
6194 CASE_INT_FN (BUILT_IN_CTZ
):
6195 target
= expand_builtin_unop (target_mode
, exp
, target
,
6196 subtarget
, ctz_optab
);
6201 CASE_INT_FN (BUILT_IN_CLRSB
):
6202 target
= expand_builtin_unop (target_mode
, exp
, target
,
6203 subtarget
, clrsb_optab
);
6208 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6209 target
= expand_builtin_unop (target_mode
, exp
, target
,
6210 subtarget
, popcount_optab
);
6215 CASE_INT_FN (BUILT_IN_PARITY
):
6216 target
= expand_builtin_unop (target_mode
, exp
, target
,
6217 subtarget
, parity_optab
);
6222 case BUILT_IN_STRLEN
:
6223 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6228 case BUILT_IN_STRCPY
:
6229 target
= expand_builtin_strcpy (exp
, target
);
6234 case BUILT_IN_STRNCPY
:
6235 target
= expand_builtin_strncpy (exp
, target
);
6240 case BUILT_IN_STPCPY
:
6241 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6246 case BUILT_IN_MEMCPY
:
6247 target
= expand_builtin_memcpy (exp
, target
);
6252 case BUILT_IN_MEMPCPY
:
6253 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6258 case BUILT_IN_MEMSET
:
6259 target
= expand_builtin_memset (exp
, target
, mode
);
6264 case BUILT_IN_BZERO
:
6265 target
= expand_builtin_bzero (exp
);
6270 case BUILT_IN_STRCMP
:
6271 target
= expand_builtin_strcmp (exp
, target
);
6276 case BUILT_IN_STRNCMP
:
6277 target
= expand_builtin_strncmp (exp
, target
, mode
);
6283 case BUILT_IN_MEMCMP
:
6284 target
= expand_builtin_memcmp (exp
, target
);
6289 case BUILT_IN_SETJMP
:
6290 /* This should have been lowered to the builtins below. */
6293 case BUILT_IN_SETJMP_SETUP
:
6294 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6295 and the receiver label. */
6296 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6298 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6299 VOIDmode
, EXPAND_NORMAL
);
6300 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6301 rtx_insn
*label_r
= label_rtx (label
);
6303 /* This is copied from the handling of non-local gotos. */
6304 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6305 nonlocal_goto_handler_labels
6306 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
6307 nonlocal_goto_handler_labels
);
6308 /* ??? Do not let expand_label treat us as such since we would
6309 not want to be both on the list of non-local labels and on
6310 the list of forced labels. */
6311 FORCED_LABEL (label
) = 0;
6316 case BUILT_IN_SETJMP_RECEIVER
:
6317 /* __builtin_setjmp_receiver is passed the receiver label. */
6318 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6320 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6321 rtx_insn
*label_r
= label_rtx (label
);
6323 expand_builtin_setjmp_receiver (label_r
);
6328 /* __builtin_longjmp is passed a pointer to an array of five words.
6329 It's similar to the C library longjmp function but works with
6330 __builtin_setjmp above. */
6331 case BUILT_IN_LONGJMP
:
6332 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6334 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6335 VOIDmode
, EXPAND_NORMAL
);
6336 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6338 if (value
!= const1_rtx
)
6340 error ("%<__builtin_longjmp%> second argument must be 1");
6344 expand_builtin_longjmp (buf_addr
, value
);
6349 case BUILT_IN_NONLOCAL_GOTO
:
6350 target
= expand_builtin_nonlocal_goto (exp
);
6355 /* This updates the setjmp buffer that is its argument with the value
6356 of the current stack pointer. */
6357 case BUILT_IN_UPDATE_SETJMP_BUF
:
6358 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6361 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6363 expand_builtin_update_setjmp_buf (buf_addr
);
6369 expand_builtin_trap ();
6372 case BUILT_IN_UNREACHABLE
:
6373 expand_builtin_unreachable ();
6376 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6377 case BUILT_IN_SIGNBITD32
:
6378 case BUILT_IN_SIGNBITD64
:
6379 case BUILT_IN_SIGNBITD128
:
6380 target
= expand_builtin_signbit (exp
, target
);
6385 /* Various hooks for the DWARF 2 __throw routine. */
6386 case BUILT_IN_UNWIND_INIT
:
6387 expand_builtin_unwind_init ();
6389 case BUILT_IN_DWARF_CFA
:
6390 return virtual_cfa_rtx
;
6391 #ifdef DWARF2_UNWIND_INFO
6392 case BUILT_IN_DWARF_SP_COLUMN
:
6393 return expand_builtin_dwarf_sp_column ();
6394 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6395 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6398 case BUILT_IN_FROB_RETURN_ADDR
:
6399 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6400 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6401 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6402 case BUILT_IN_EH_RETURN
:
6403 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6404 CALL_EXPR_ARG (exp
, 1));
6406 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6407 return expand_builtin_eh_return_data_regno (exp
);
6408 case BUILT_IN_EXTEND_POINTER
:
6409 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6410 case BUILT_IN_EH_POINTER
:
6411 return expand_builtin_eh_pointer (exp
);
6412 case BUILT_IN_EH_FILTER
:
6413 return expand_builtin_eh_filter (exp
);
6414 case BUILT_IN_EH_COPY_VALUES
:
6415 return expand_builtin_eh_copy_values (exp
);
6417 case BUILT_IN_VA_START
:
6418 return expand_builtin_va_start (exp
);
6419 case BUILT_IN_VA_END
:
6420 return expand_builtin_va_end (exp
);
6421 case BUILT_IN_VA_COPY
:
6422 return expand_builtin_va_copy (exp
);
6423 case BUILT_IN_EXPECT
:
6424 return expand_builtin_expect (exp
, target
);
6425 case BUILT_IN_ASSUME_ALIGNED
:
6426 return expand_builtin_assume_aligned (exp
, target
);
6427 case BUILT_IN_PREFETCH
:
6428 expand_builtin_prefetch (exp
);
6431 case BUILT_IN_INIT_TRAMPOLINE
:
6432 return expand_builtin_init_trampoline (exp
, true);
6433 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
6434 return expand_builtin_init_trampoline (exp
, false);
6435 case BUILT_IN_ADJUST_TRAMPOLINE
:
6436 return expand_builtin_adjust_trampoline (exp
);
6439 case BUILT_IN_EXECL
:
6440 case BUILT_IN_EXECV
:
6441 case BUILT_IN_EXECLP
:
6442 case BUILT_IN_EXECLE
:
6443 case BUILT_IN_EXECVP
:
6444 case BUILT_IN_EXECVE
:
6445 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6450 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
6451 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
6452 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
6453 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
6454 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
6455 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
6456 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
6461 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
6462 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
6463 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
6464 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
6465 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
6466 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
6467 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
6472 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
6473 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
6474 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
6475 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
6476 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
6477 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
6478 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
6483 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
6484 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
6485 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
6486 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
6487 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
6488 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
6489 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
6494 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
6495 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
6496 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
6497 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
6498 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
6499 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
6500 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
6505 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6506 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6507 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6508 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6509 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6510 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
6511 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
6516 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
6517 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
6518 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
6519 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
6520 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
6521 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
6522 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
6527 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
6528 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
6529 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
6530 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
6531 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
6532 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
6533 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
6538 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
6539 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
6540 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
6541 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
6542 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
6543 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
6544 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
6549 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
6550 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
6551 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
6552 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
6553 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
6554 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
6555 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
6560 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
6561 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
6562 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
6563 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
6564 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
6565 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
6566 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
6571 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6572 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6573 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6574 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6575 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6576 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
6577 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
6582 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
6583 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
6584 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
6585 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
6586 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
6587 if (mode
== VOIDmode
)
6588 mode
= TYPE_MODE (boolean_type_node
);
6589 if (!target
|| !register_operand (target
, mode
))
6590 target
= gen_reg_rtx (mode
);
6592 mode
= get_builtin_sync_mode
6593 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
6594 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6599 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
6600 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
6601 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
6602 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
6603 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
6604 mode
= get_builtin_sync_mode
6605 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
6606 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6611 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
6612 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
6613 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
6614 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
6615 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
6616 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
6617 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
6622 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
6623 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
6624 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
6625 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
6626 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
6627 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
6628 expand_builtin_sync_lock_release (mode
, exp
);
6631 case BUILT_IN_SYNC_SYNCHRONIZE
:
6632 expand_builtin_sync_synchronize ();
6635 case BUILT_IN_ATOMIC_EXCHANGE_1
:
6636 case BUILT_IN_ATOMIC_EXCHANGE_2
:
6637 case BUILT_IN_ATOMIC_EXCHANGE_4
:
6638 case BUILT_IN_ATOMIC_EXCHANGE_8
:
6639 case BUILT_IN_ATOMIC_EXCHANGE_16
:
6640 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
6641 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
6646 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
6647 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
6648 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
6649 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
6650 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
6652 unsigned int nargs
, z
;
6653 vec
<tree
, va_gc
> *vec
;
6656 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
6657 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
6661 /* If this is turned into an external library call, the weak parameter
6662 must be dropped to match the expected parameter list. */
6663 nargs
= call_expr_nargs (exp
);
6664 vec_alloc (vec
, nargs
- 1);
6665 for (z
= 0; z
< 3; z
++)
6666 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6667 /* Skip the boolean weak parameter. */
6668 for (z
= 4; z
< 6; z
++)
6669 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6670 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
6674 case BUILT_IN_ATOMIC_LOAD_1
:
6675 case BUILT_IN_ATOMIC_LOAD_2
:
6676 case BUILT_IN_ATOMIC_LOAD_4
:
6677 case BUILT_IN_ATOMIC_LOAD_8
:
6678 case BUILT_IN_ATOMIC_LOAD_16
:
6679 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
6680 target
= expand_builtin_atomic_load (mode
, exp
, target
);
6685 case BUILT_IN_ATOMIC_STORE_1
:
6686 case BUILT_IN_ATOMIC_STORE_2
:
6687 case BUILT_IN_ATOMIC_STORE_4
:
6688 case BUILT_IN_ATOMIC_STORE_8
:
6689 case BUILT_IN_ATOMIC_STORE_16
:
6690 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
6691 target
= expand_builtin_atomic_store (mode
, exp
);
6696 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
6697 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
6698 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
6699 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
6700 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
6702 enum built_in_function lib
;
6703 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
6704 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
6705 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
6706 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
6712 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
6713 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
6714 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
6715 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
6716 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
6718 enum built_in_function lib
;
6719 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
6720 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
6721 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
6722 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
6728 case BUILT_IN_ATOMIC_AND_FETCH_1
:
6729 case BUILT_IN_ATOMIC_AND_FETCH_2
:
6730 case BUILT_IN_ATOMIC_AND_FETCH_4
:
6731 case BUILT_IN_ATOMIC_AND_FETCH_8
:
6732 case BUILT_IN_ATOMIC_AND_FETCH_16
:
6734 enum built_in_function lib
;
6735 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
6736 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
6737 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
6738 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
6744 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
6745 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
6746 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
6747 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
6748 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
6750 enum built_in_function lib
;
6751 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
6752 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
6753 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
6754 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
6760 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
6761 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
6762 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
6763 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
6764 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
6766 enum built_in_function lib
;
6767 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
6768 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
6769 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
6770 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
6776 case BUILT_IN_ATOMIC_OR_FETCH_1
:
6777 case BUILT_IN_ATOMIC_OR_FETCH_2
:
6778 case BUILT_IN_ATOMIC_OR_FETCH_4
:
6779 case BUILT_IN_ATOMIC_OR_FETCH_8
:
6780 case BUILT_IN_ATOMIC_OR_FETCH_16
:
6782 enum built_in_function lib
;
6783 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
6784 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
6785 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
6786 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
6792 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
6793 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
6794 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
6795 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
6796 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
6797 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
6798 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
6799 ignore
, BUILT_IN_NONE
);
6804 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
6805 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
6806 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
6807 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
6808 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
6809 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
6810 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
6811 ignore
, BUILT_IN_NONE
);
6816 case BUILT_IN_ATOMIC_FETCH_AND_1
:
6817 case BUILT_IN_ATOMIC_FETCH_AND_2
:
6818 case BUILT_IN_ATOMIC_FETCH_AND_4
:
6819 case BUILT_IN_ATOMIC_FETCH_AND_8
:
6820 case BUILT_IN_ATOMIC_FETCH_AND_16
:
6821 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
6822 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
6823 ignore
, BUILT_IN_NONE
);
6828 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
6829 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
6830 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
6831 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
6832 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
6833 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
6834 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
6835 ignore
, BUILT_IN_NONE
);
6840 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
6841 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
6842 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
6843 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
6844 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
6845 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
6846 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
6847 ignore
, BUILT_IN_NONE
);
6852 case BUILT_IN_ATOMIC_FETCH_OR_1
:
6853 case BUILT_IN_ATOMIC_FETCH_OR_2
:
6854 case BUILT_IN_ATOMIC_FETCH_OR_4
:
6855 case BUILT_IN_ATOMIC_FETCH_OR_8
:
6856 case BUILT_IN_ATOMIC_FETCH_OR_16
:
6857 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
6858 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
6859 ignore
, BUILT_IN_NONE
);
6864 case BUILT_IN_ATOMIC_TEST_AND_SET
:
6865 return expand_builtin_atomic_test_and_set (exp
, target
);
6867 case BUILT_IN_ATOMIC_CLEAR
:
6868 return expand_builtin_atomic_clear (exp
);
6870 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
6871 return expand_builtin_atomic_always_lock_free (exp
);
6873 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
6874 target
= expand_builtin_atomic_is_lock_free (exp
);
6879 case BUILT_IN_ATOMIC_THREAD_FENCE
:
6880 expand_builtin_atomic_thread_fence (exp
);
6883 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
6884 expand_builtin_atomic_signal_fence (exp
);
6887 case BUILT_IN_OBJECT_SIZE
:
6888 return expand_builtin_object_size (exp
);
6890 case BUILT_IN_MEMCPY_CHK
:
6891 case BUILT_IN_MEMPCPY_CHK
:
6892 case BUILT_IN_MEMMOVE_CHK
:
6893 case BUILT_IN_MEMSET_CHK
:
6894 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6899 case BUILT_IN_STRCPY_CHK
:
6900 case BUILT_IN_STPCPY_CHK
:
6901 case BUILT_IN_STRNCPY_CHK
:
6902 case BUILT_IN_STPNCPY_CHK
:
6903 case BUILT_IN_STRCAT_CHK
:
6904 case BUILT_IN_STRNCAT_CHK
:
6905 case BUILT_IN_SNPRINTF_CHK
:
6906 case BUILT_IN_VSNPRINTF_CHK
:
6907 maybe_emit_chk_warning (exp
, fcode
);
6910 case BUILT_IN_SPRINTF_CHK
:
6911 case BUILT_IN_VSPRINTF_CHK
:
6912 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6916 if (warn_free_nonheap_object
)
6917 maybe_emit_free_warning (exp
);
6920 case BUILT_IN_THREAD_POINTER
:
6921 return expand_builtin_thread_pointer (exp
, target
);
6923 case BUILT_IN_SET_THREAD_POINTER
:
6924 expand_builtin_set_thread_pointer (exp
);
6927 case BUILT_IN_CILK_DETACH
:
6928 expand_builtin_cilk_detach (exp
);
6931 case BUILT_IN_CILK_POP_FRAME
:
6932 expand_builtin_cilk_pop_frame (exp
);
6935 case BUILT_IN_CHKP_INIT_PTR_BOUNDS
:
6936 case BUILT_IN_CHKP_NULL_PTR_BOUNDS
:
6937 case BUILT_IN_CHKP_COPY_PTR_BOUNDS
:
6938 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
:
6939 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
:
6940 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS
:
6941 case BUILT_IN_CHKP_SET_PTR_BOUNDS
:
6942 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS
:
6943 case BUILT_IN_CHKP_STORE_PTR_BOUNDS
:
6944 case BUILT_IN_CHKP_GET_PTR_LBOUND
:
6945 case BUILT_IN_CHKP_GET_PTR_UBOUND
:
6946 /* We allow user CHKP builtins if Pointer Bounds
6948 if (!chkp_function_instrumented_p (current_function_decl
))
6950 if (fcode
== BUILT_IN_CHKP_SET_PTR_BOUNDS
6951 || fcode
== BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6952 || fcode
== BUILT_IN_CHKP_INIT_PTR_BOUNDS
6953 || fcode
== BUILT_IN_CHKP_NULL_PTR_BOUNDS
6954 || fcode
== BUILT_IN_CHKP_COPY_PTR_BOUNDS
)
6955 return expand_normal (CALL_EXPR_ARG (exp
, 0));
6956 else if (fcode
== BUILT_IN_CHKP_GET_PTR_LBOUND
)
6957 return expand_normal (size_zero_node
);
6958 else if (fcode
== BUILT_IN_CHKP_GET_PTR_UBOUND
)
6959 return expand_normal (size_int (-1));
6965 case BUILT_IN_CHKP_BNDMK
:
6966 case BUILT_IN_CHKP_BNDSTX
:
6967 case BUILT_IN_CHKP_BNDCL
:
6968 case BUILT_IN_CHKP_BNDCU
:
6969 case BUILT_IN_CHKP_BNDLDX
:
6970 case BUILT_IN_CHKP_BNDRET
:
6971 case BUILT_IN_CHKP_INTERSECT
:
6972 case BUILT_IN_CHKP_NARROW
:
6973 case BUILT_IN_CHKP_EXTRACT_LOWER
:
6974 case BUILT_IN_CHKP_EXTRACT_UPPER
:
6975 /* Software implementation of Pointer Bounds Checker is NYI.
6976 Target support is required. */
6977 error ("Your target platform does not support -fcheck-pointer-bounds");
6980 case BUILT_IN_ACC_ON_DEVICE
:
6981 /* Do library call, if we failed to expand the builtin when
6985 default: /* just do library call, if unknown builtin */
6989 /* The switch statement above can drop through to cause the function
6990 to be called normally. */
6991 return expand_call (exp
, target
, ignore
);
6994 /* Similar to expand_builtin but is used for instrumented calls. */
6997 expand_builtin_with_bounds (tree exp
, rtx target
,
6998 rtx subtarget ATTRIBUTE_UNUSED
,
6999 machine_mode mode
, int ignore
)
7001 tree fndecl
= get_callee_fndecl (exp
);
7002 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7004 gcc_assert (CALL_WITH_BOUNDS_P (exp
));
7006 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7007 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
7009 gcc_assert (fcode
> BEGIN_CHKP_BUILTINS
7010 && fcode
< END_CHKP_BUILTINS
);
7014 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
:
7015 target
= expand_builtin_memcpy_with_bounds (exp
, target
);
7020 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
:
7021 target
= expand_builtin_mempcpy_with_bounds (exp
, target
, mode
);
7026 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
:
7027 target
= expand_builtin_memset_with_bounds (exp
, target
, mode
);
7036 /* The switch statement above can drop through to cause the function
7037 to be called normally. */
7038 return expand_call (exp
, target
, ignore
);
7041 /* Determine whether a tree node represents a call to a built-in
7042 function. If the tree T is a call to a built-in function with
7043 the right number of arguments of the appropriate types, return
7044 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7045 Otherwise the return value is END_BUILTINS. */
7047 enum built_in_function
7048 builtin_mathfn_code (const_tree t
)
7050 const_tree fndecl
, arg
, parmlist
;
7051 const_tree argtype
, parmtype
;
7052 const_call_expr_arg_iterator iter
;
7054 if (TREE_CODE (t
) != CALL_EXPR
7055 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
7056 return END_BUILTINS
;
7058 fndecl
= get_callee_fndecl (t
);
7059 if (fndecl
== NULL_TREE
7060 || TREE_CODE (fndecl
) != FUNCTION_DECL
7061 || ! DECL_BUILT_IN (fndecl
)
7062 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7063 return END_BUILTINS
;
7065 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
7066 init_const_call_expr_arg_iterator (t
, &iter
);
7067 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
7069 /* If a function doesn't take a variable number of arguments,
7070 the last element in the list will have type `void'. */
7071 parmtype
= TREE_VALUE (parmlist
);
7072 if (VOID_TYPE_P (parmtype
))
7074 if (more_const_call_expr_args_p (&iter
))
7075 return END_BUILTINS
;
7076 return DECL_FUNCTION_CODE (fndecl
);
7079 if (! more_const_call_expr_args_p (&iter
))
7080 return END_BUILTINS
;
7082 arg
= next_const_call_expr_arg (&iter
);
7083 argtype
= TREE_TYPE (arg
);
7085 if (SCALAR_FLOAT_TYPE_P (parmtype
))
7087 if (! SCALAR_FLOAT_TYPE_P (argtype
))
7088 return END_BUILTINS
;
7090 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
7092 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
7093 return END_BUILTINS
;
7095 else if (POINTER_TYPE_P (parmtype
))
7097 if (! POINTER_TYPE_P (argtype
))
7098 return END_BUILTINS
;
7100 else if (INTEGRAL_TYPE_P (parmtype
))
7102 if (! INTEGRAL_TYPE_P (argtype
))
7103 return END_BUILTINS
;
7106 return END_BUILTINS
;
7109 /* Variable-length argument list. */
7110 return DECL_FUNCTION_CODE (fndecl
);
7113 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7114 evaluate to a constant. */
7117 fold_builtin_constant_p (tree arg
)
7119 /* We return 1 for a numeric type that's known to be a constant
7120 value at compile-time or for an aggregate type that's a
7121 literal constant. */
7124 /* If we know this is a constant, emit the constant of one. */
7125 if (CONSTANT_CLASS_P (arg
)
7126 || (TREE_CODE (arg
) == CONSTRUCTOR
7127 && TREE_CONSTANT (arg
)))
7128 return integer_one_node
;
7129 if (TREE_CODE (arg
) == ADDR_EXPR
)
7131 tree op
= TREE_OPERAND (arg
, 0);
7132 if (TREE_CODE (op
) == STRING_CST
7133 || (TREE_CODE (op
) == ARRAY_REF
7134 && integer_zerop (TREE_OPERAND (op
, 1))
7135 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
7136 return integer_one_node
;
7139 /* If this expression has side effects, show we don't know it to be a
7140 constant. Likewise if it's a pointer or aggregate type since in
7141 those case we only want literals, since those are only optimized
7142 when generating RTL, not later.
7143 And finally, if we are compiling an initializer, not code, we
7144 need to return a definite result now; there's not going to be any
7145 more optimization done. */
7146 if (TREE_SIDE_EFFECTS (arg
)
7147 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
7148 || POINTER_TYPE_P (TREE_TYPE (arg
))
7150 || folding_initializer
7151 || force_folding_builtin_constant_p
)
7152 return integer_zero_node
;
7157 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7158 return it as a truthvalue. */
7161 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
7164 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
7166 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
7167 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
7168 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
7169 pred_type
= TREE_VALUE (arg_types
);
7170 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
7172 pred
= fold_convert_loc (loc
, pred_type
, pred
);
7173 expected
= fold_convert_loc (loc
, expected_type
, expected
);
7174 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
7177 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7178 build_int_cst (ret_type
, 0));
7181 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7182 NULL_TREE if no simplification is possible. */
7185 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
)
7187 tree inner
, fndecl
, inner_arg0
;
7188 enum tree_code code
;
7190 /* Distribute the expected value over short-circuiting operators.
7191 See through the cast from truthvalue_type_node to long. */
7193 while (CONVERT_EXPR_P (inner_arg0
)
7194 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
7195 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
7196 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
7198 /* If this is a builtin_expect within a builtin_expect keep the
7199 inner one. See through a comparison against a constant. It
7200 might have been added to create a thruthvalue. */
7203 if (COMPARISON_CLASS_P (inner
)
7204 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7205 inner
= TREE_OPERAND (inner
, 0);
7207 if (TREE_CODE (inner
) == CALL_EXPR
7208 && (fndecl
= get_callee_fndecl (inner
))
7209 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7210 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7214 code
= TREE_CODE (inner
);
7215 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7217 tree op0
= TREE_OPERAND (inner
, 0);
7218 tree op1
= TREE_OPERAND (inner
, 1);
7220 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
);
7221 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
);
7222 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7224 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
7227 /* If the argument isn't invariant then there's nothing else we can do. */
7228 if (!TREE_CONSTANT (inner_arg0
))
7231 /* If we expect that a comparison against the argument will fold to
7232 a constant return the constant. In practice, this means a true
7233 constant or the address of a non-weak symbol. */
7236 if (TREE_CODE (inner
) == ADDR_EXPR
)
7240 inner
= TREE_OPERAND (inner
, 0);
7242 while (TREE_CODE (inner
) == COMPONENT_REF
7243 || TREE_CODE (inner
) == ARRAY_REF
);
7244 if ((TREE_CODE (inner
) == VAR_DECL
7245 || TREE_CODE (inner
) == FUNCTION_DECL
)
7246 && DECL_WEAK (inner
))
7250 /* Otherwise, ARG0 already has the proper type for the return value. */
7254 /* Fold a call to __builtin_classify_type with argument ARG. */
7257 fold_builtin_classify_type (tree arg
)
7260 return build_int_cst (integer_type_node
, no_type_class
);
7262 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7265 /* Fold a call to __builtin_strlen with argument ARG. */
7268 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7270 if (!validate_arg (arg
, POINTER_TYPE
))
7274 tree len
= c_strlen (arg
, 0);
7277 return fold_convert_loc (loc
, type
, len
);
7283 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7286 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7288 REAL_VALUE_TYPE real
;
7290 /* __builtin_inff is intended to be usable to define INFINITY on all
7291 targets. If an infinity is not available, INFINITY expands "to a
7292 positive constant of type float that overflows at translation
7293 time", footnote "In this case, using INFINITY will violate the
7294 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7295 Thus we pedwarn to ensure this constraint violation is
7297 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7298 pedwarn (loc
, 0, "target format does not support infinity");
7301 return build_real (type
, real
);
7304 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7307 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7309 REAL_VALUE_TYPE real
;
7312 if (!validate_arg (arg
, POINTER_TYPE
))
7314 str
= c_getstr (arg
);
7318 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7321 return build_real (type
, real
);
7324 /* Return true if the floating point expression T has an integer value.
7325 We also allow +Inf, -Inf and NaN to be considered integer values. */
7328 integer_valued_real_p (tree t
)
7330 switch (TREE_CODE (t
))
7337 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7342 return integer_valued_real_p (TREE_OPERAND (t
, 1));
7349 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7350 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7353 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7354 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7357 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7361 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7362 if (TREE_CODE (type
) == INTEGER_TYPE
)
7364 if (TREE_CODE (type
) == REAL_TYPE
)
7365 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7370 switch (builtin_mathfn_code (t
))
7372 CASE_FLT_FN (BUILT_IN_CEIL
):
7373 CASE_FLT_FN (BUILT_IN_FLOOR
):
7374 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7375 CASE_FLT_FN (BUILT_IN_RINT
):
7376 CASE_FLT_FN (BUILT_IN_ROUND
):
7377 CASE_FLT_FN (BUILT_IN_TRUNC
):
7380 CASE_FLT_FN (BUILT_IN_FMIN
):
7381 CASE_FLT_FN (BUILT_IN_FMAX
):
7382 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7383 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7396 /* FNDECL is assumed to be a builtin where truncation can be propagated
7397 across (for instance floor((double)f) == (double)floorf (f).
7398 Do the transformation for a call with argument ARG. */
7401 fold_trunc_transparent_mathfn (location_t loc
, tree fndecl
, tree arg
)
7403 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7405 if (!validate_arg (arg
, REAL_TYPE
))
7408 /* Integer rounding functions are idempotent. */
7409 if (fcode
== builtin_mathfn_code (arg
))
7412 /* If argument is already integer valued, and we don't need to worry
7413 about setting errno, there's no need to perform rounding. */
7414 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7419 tree arg0
= strip_float_extensions (arg
);
7420 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7421 tree newtype
= TREE_TYPE (arg0
);
7424 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7425 && (decl
= mathfn_built_in (newtype
, fcode
)))
7426 return fold_convert_loc (loc
, ftype
,
7427 build_call_expr_loc (loc
, decl
, 1,
7428 fold_convert_loc (loc
,
7435 /* FNDECL is assumed to be builtin which can narrow the FP type of
7436 the argument, for instance lround((double)f) -> lroundf (f).
7437 Do the transformation for a call with argument ARG. */
7440 fold_fixed_mathfn (location_t loc
, tree fndecl
, tree arg
)
7442 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7444 if (!validate_arg (arg
, REAL_TYPE
))
7447 /* If argument is already integer valued, and we don't need to worry
7448 about setting errno, there's no need to perform rounding. */
7449 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7450 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7451 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7455 tree ftype
= TREE_TYPE (arg
);
7456 tree arg0
= strip_float_extensions (arg
);
7457 tree newtype
= TREE_TYPE (arg0
);
7460 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7461 && (decl
= mathfn_built_in (newtype
, fcode
)))
7462 return build_call_expr_loc (loc
, decl
, 1,
7463 fold_convert_loc (loc
, newtype
, arg0
));
7466 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7467 sizeof (int) == sizeof (long). */
7468 if (TYPE_PRECISION (integer_type_node
)
7469 == TYPE_PRECISION (long_integer_type_node
))
7471 tree newfn
= NULL_TREE
;
7474 CASE_FLT_FN (BUILT_IN_ICEIL
):
7475 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7478 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7479 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7482 CASE_FLT_FN (BUILT_IN_IROUND
):
7483 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7486 CASE_FLT_FN (BUILT_IN_IRINT
):
7487 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7496 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7497 return fold_convert_loc (loc
,
7498 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7502 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7503 sizeof (long long) == sizeof (long). */
7504 if (TYPE_PRECISION (long_long_integer_type_node
)
7505 == TYPE_PRECISION (long_integer_type_node
))
7507 tree newfn
= NULL_TREE
;
7510 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7511 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7514 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7515 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7518 CASE_FLT_FN (BUILT_IN_LLROUND
):
7519 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7522 CASE_FLT_FN (BUILT_IN_LLRINT
):
7523 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7532 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7533 return fold_convert_loc (loc
,
7534 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7541 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7542 complex tree type of the result. If NEG is true, the imaginary
7543 zero is negative. */
7546 build_complex_cproj (tree type
, bool neg
)
7548 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
7552 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
7553 build_real (TREE_TYPE (type
), rzero
));
7556 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7557 return type. Return NULL_TREE if no simplification can be made. */
7560 fold_builtin_cproj (location_t loc
, tree arg
, tree type
)
7562 if (!validate_arg (arg
, COMPLEX_TYPE
)
7563 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7566 /* If there are no infinities, return arg. */
7567 if (! HONOR_INFINITIES (type
))
7568 return non_lvalue_loc (loc
, arg
);
7570 /* Calculate the result when the argument is a constant. */
7571 if (TREE_CODE (arg
) == COMPLEX_CST
)
7573 const REAL_VALUE_TYPE
*real
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
7574 const REAL_VALUE_TYPE
*imag
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
7576 if (real_isinf (real
) || real_isinf (imag
))
7577 return build_complex_cproj (type
, imag
->sign
);
7585 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7586 Return NULL_TREE if no simplification can be made. */
7589 fold_builtin_tan (tree arg
, tree type
)
7591 enum built_in_function fcode
;
7594 if (!validate_arg (arg
, REAL_TYPE
))
7597 /* Calculate the result when the argument is a constant. */
7598 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
7601 /* Optimize tan(atan(x)) = x. */
7602 fcode
= builtin_mathfn_code (arg
);
7603 if (flag_unsafe_math_optimizations
7604 && (fcode
== BUILT_IN_ATAN
7605 || fcode
== BUILT_IN_ATANF
7606 || fcode
== BUILT_IN_ATANL
))
7607 return CALL_EXPR_ARG (arg
, 0);
7612 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7613 NULL_TREE if no simplification can be made. */
7616 fold_builtin_sincos (location_t loc
,
7617 tree arg0
, tree arg1
, tree arg2
)
7622 if (!validate_arg (arg0
, REAL_TYPE
)
7623 || !validate_arg (arg1
, POINTER_TYPE
)
7624 || !validate_arg (arg2
, POINTER_TYPE
))
7627 type
= TREE_TYPE (arg0
);
7629 /* Calculate the result when the argument is a constant. */
7630 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7633 /* Canonicalize sincos to cexpi. */
7634 if (!targetm
.libc_has_function (function_c99_math_complex
))
7636 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
7640 call
= build_call_expr_loc (loc
, fn
, 1, arg0
);
7641 call
= builtin_save_expr (call
);
7643 return build2 (COMPOUND_EXPR
, void_type_node
,
7644 build2 (MODIFY_EXPR
, void_type_node
,
7645 build_fold_indirect_ref_loc (loc
, arg1
),
7646 build1 (IMAGPART_EXPR
, type
, call
)),
7647 build2 (MODIFY_EXPR
, void_type_node
,
7648 build_fold_indirect_ref_loc (loc
, arg2
),
7649 build1 (REALPART_EXPR
, type
, call
)));
7652 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7653 NULL_TREE if no simplification can be made. */
7656 fold_builtin_cexp (location_t loc
, tree arg0
, tree type
)
7659 tree realp
, imagp
, ifn
;
7662 if (!validate_arg (arg0
, COMPLEX_TYPE
)
7663 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) != REAL_TYPE
)
7666 /* Calculate the result when the argument is a constant. */
7667 if ((res
= do_mpc_arg1 (arg0
, type
, mpc_exp
)))
7670 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
7672 /* In case we can figure out the real part of arg0 and it is constant zero
7674 if (!targetm
.libc_has_function (function_c99_math_complex
))
7676 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
7680 if ((realp
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
))
7681 && real_zerop (realp
))
7683 tree narg
= fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7684 return build_call_expr_loc (loc
, ifn
, 1, narg
);
7687 /* In case we can easily decompose real and imaginary parts split cexp
7688 to exp (r) * cexpi (i). */
7689 if (flag_unsafe_math_optimizations
7692 tree rfn
, rcall
, icall
;
7694 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
7698 imagp
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7702 icall
= build_call_expr_loc (loc
, ifn
, 1, imagp
);
7703 icall
= builtin_save_expr (icall
);
7704 rcall
= build_call_expr_loc (loc
, rfn
, 1, realp
);
7705 rcall
= builtin_save_expr (rcall
);
7706 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
7707 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7709 fold_build1_loc (loc
, REALPART_EXPR
,
7711 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7713 fold_build1_loc (loc
, IMAGPART_EXPR
,
7720 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7721 Return NULL_TREE if no simplification can be made. */
7724 fold_builtin_trunc (location_t loc
, tree fndecl
, tree arg
)
7726 if (!validate_arg (arg
, REAL_TYPE
))
7729 /* Optimize trunc of constant value. */
7730 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7732 REAL_VALUE_TYPE r
, x
;
7733 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7735 x
= TREE_REAL_CST (arg
);
7736 real_trunc (&r
, TYPE_MODE (type
), &x
);
7737 return build_real (type
, r
);
7740 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7743 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7744 Return NULL_TREE if no simplification can be made. */
7747 fold_builtin_floor (location_t loc
, tree fndecl
, tree arg
)
7749 if (!validate_arg (arg
, REAL_TYPE
))
7752 /* Optimize floor of constant value. */
7753 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7757 x
= TREE_REAL_CST (arg
);
7758 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7760 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7763 real_floor (&r
, TYPE_MODE (type
), &x
);
7764 return build_real (type
, r
);
7768 /* Fold floor (x) where x is nonnegative to trunc (x). */
7769 if (tree_expr_nonnegative_p (arg
))
7771 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
7773 return build_call_expr_loc (loc
, truncfn
, 1, arg
);
7776 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7779 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7780 Return NULL_TREE if no simplification can be made. */
7783 fold_builtin_ceil (location_t loc
, tree fndecl
, tree arg
)
7785 if (!validate_arg (arg
, REAL_TYPE
))
7788 /* Optimize ceil of constant value. */
7789 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7793 x
= TREE_REAL_CST (arg
);
7794 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7796 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7799 real_ceil (&r
, TYPE_MODE (type
), &x
);
7800 return build_real (type
, r
);
7804 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7807 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7808 Return NULL_TREE if no simplification can be made. */
7811 fold_builtin_round (location_t loc
, tree fndecl
, tree arg
)
7813 if (!validate_arg (arg
, REAL_TYPE
))
7816 /* Optimize round of constant value. */
7817 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7821 x
= TREE_REAL_CST (arg
);
7822 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7824 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7827 real_round (&r
, TYPE_MODE (type
), &x
);
7828 return build_real (type
, r
);
7832 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7835 /* Fold function call to builtin lround, lroundf or lroundl (or the
7836 corresponding long long versions) and other rounding functions. ARG
7837 is the argument to the call. Return NULL_TREE if no simplification
7841 fold_builtin_int_roundingfn (location_t loc
, tree fndecl
, tree arg
)
7843 if (!validate_arg (arg
, REAL_TYPE
))
7846 /* Optimize lround of constant value. */
7847 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7849 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
7851 if (real_isfinite (&x
))
7853 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
7854 tree ftype
= TREE_TYPE (arg
);
7858 switch (DECL_FUNCTION_CODE (fndecl
))
7860 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7861 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7862 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7863 real_floor (&r
, TYPE_MODE (ftype
), &x
);
7866 CASE_FLT_FN (BUILT_IN_ICEIL
):
7867 CASE_FLT_FN (BUILT_IN_LCEIL
):
7868 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7869 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
7872 CASE_FLT_FN (BUILT_IN_IROUND
):
7873 CASE_FLT_FN (BUILT_IN_LROUND
):
7874 CASE_FLT_FN (BUILT_IN_LLROUND
):
7875 real_round (&r
, TYPE_MODE (ftype
), &x
);
7882 wide_int val
= real_to_integer (&r
, &fail
, TYPE_PRECISION (itype
));
7884 return wide_int_to_tree (itype
, val
);
7888 switch (DECL_FUNCTION_CODE (fndecl
))
7890 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7891 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7892 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7893 if (tree_expr_nonnegative_p (arg
))
7894 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7895 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7900 return fold_fixed_mathfn (loc
, fndecl
, arg
);
7903 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7904 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7905 the argument to the call. Return NULL_TREE if no simplification can
7909 fold_builtin_bitop (tree fndecl
, tree arg
)
7911 if (!validate_arg (arg
, INTEGER_TYPE
))
7914 /* Optimize for constant argument. */
7915 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
7917 tree type
= TREE_TYPE (arg
);
7920 switch (DECL_FUNCTION_CODE (fndecl
))
7922 CASE_INT_FN (BUILT_IN_FFS
):
7923 result
= wi::ffs (arg
);
7926 CASE_INT_FN (BUILT_IN_CLZ
):
7927 if (wi::ne_p (arg
, 0))
7928 result
= wi::clz (arg
);
7929 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
7930 result
= TYPE_PRECISION (type
);
7933 CASE_INT_FN (BUILT_IN_CTZ
):
7934 if (wi::ne_p (arg
, 0))
7935 result
= wi::ctz (arg
);
7936 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
7937 result
= TYPE_PRECISION (type
);
7940 CASE_INT_FN (BUILT_IN_CLRSB
):
7941 result
= wi::clrsb (arg
);
7944 CASE_INT_FN (BUILT_IN_POPCOUNT
):
7945 result
= wi::popcount (arg
);
7948 CASE_INT_FN (BUILT_IN_PARITY
):
7949 result
= wi::parity (arg
);
7956 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
7962 /* Fold function call to builtin_bswap and the short, long and long long
7963 variants. Return NULL_TREE if no simplification can be made. */
7965 fold_builtin_bswap (tree fndecl
, tree arg
)
7967 if (! validate_arg (arg
, INTEGER_TYPE
))
7970 /* Optimize constant value. */
7971 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
7973 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7975 switch (DECL_FUNCTION_CODE (fndecl
))
7977 case BUILT_IN_BSWAP16
:
7978 case BUILT_IN_BSWAP32
:
7979 case BUILT_IN_BSWAP64
:
7981 signop sgn
= TYPE_SIGN (type
);
7983 wide_int_to_tree (type
,
7984 wide_int::from (arg
, TYPE_PRECISION (type
),
7996 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7997 NULL_TREE if no simplification can be made. */
8000 fold_builtin_hypot (location_t loc
, tree arg0
, tree arg1
, tree type
)
8004 if (!validate_arg (arg0
, REAL_TYPE
)
8005 || !validate_arg (arg1
, REAL_TYPE
))
8008 /* Calculate the result when the argument is a constant. */
8009 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8012 /* If either argument is zero, hypot is fabs of the other. */
8013 if (real_zerop (arg0
))
8014 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
);
8015 else if (real_zerop (arg1
))
8016 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
);
8018 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8019 if (flag_unsafe_math_optimizations
8020 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8021 return fold_build2_loc (loc
, MULT_EXPR
, type
,
8022 fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
),
8023 build_real_truncate (type
, dconst_sqrt2 ()));
8029 /* Fold a builtin function call to pow, powf, or powl. Return
8030 NULL_TREE if no simplification can be made. */
8032 fold_builtin_pow (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, tree type
)
8036 if (!validate_arg (arg0
, REAL_TYPE
)
8037 || !validate_arg (arg1
, REAL_TYPE
))
8040 /* Calculate the result when the argument is a constant. */
8041 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8044 /* Optimize pow(1.0,y) = 1.0. */
8045 if (real_onep (arg0
))
8046 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8048 if (TREE_CODE (arg1
) == REAL_CST
8049 && !TREE_OVERFLOW (arg1
))
8051 REAL_VALUE_TYPE cint
;
8055 c
= TREE_REAL_CST (arg1
);
8057 /* Optimize pow(x,0.0) = 1.0. */
8058 if (real_equal (&c
, &dconst0
))
8059 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8062 /* Optimize pow(x,1.0) = x. */
8063 if (real_equal (&c
, &dconst1
))
8066 /* Optimize pow(x,-1.0) = 1.0/x. */
8067 if (real_equal (&c
, &dconstm1
))
8068 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8069 build_real (type
, dconst1
), arg0
);
8071 /* Optimize pow(x,0.5) = sqrt(x). */
8072 if (flag_unsafe_math_optimizations
8073 && real_equal (&c
, &dconsthalf
))
8075 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8077 if (sqrtfn
!= NULL_TREE
)
8078 return build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
8081 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8082 if (flag_unsafe_math_optimizations
)
8084 const REAL_VALUE_TYPE dconstroot
8085 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8087 if (real_equal (&c
, &dconstroot
))
8089 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8090 if (cbrtfn
!= NULL_TREE
)
8091 return build_call_expr_loc (loc
, cbrtfn
, 1, arg0
);
8095 /* Check for an integer exponent. */
8096 n
= real_to_integer (&c
);
8097 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
8098 if (real_identical (&c
, &cint
))
8100 /* Attempt to evaluate pow at compile-time, unless this should
8101 raise an exception. */
8102 if (TREE_CODE (arg0
) == REAL_CST
8103 && !TREE_OVERFLOW (arg0
)
8105 || (!flag_trapping_math
&& !flag_errno_math
)
8106 || !real_equal (&TREE_REAL_CST (arg0
), &dconst0
)))
8111 x
= TREE_REAL_CST (arg0
);
8112 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8113 if (flag_unsafe_math_optimizations
|| !inexact
)
8114 return build_real (type
, x
);
8119 if (flag_unsafe_math_optimizations
)
8121 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8123 /* Optimize pow(expN(x),y) = expN(x*y). */
8124 if (BUILTIN_EXPONENT_P (fcode
))
8126 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8127 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8128 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg
, arg1
);
8129 return build_call_expr_loc (loc
, expfn
, 1, arg
);
8132 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8133 if (BUILTIN_SQRT_P (fcode
))
8135 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8136 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8137 build_real (type
, dconsthalf
));
8138 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, narg1
);
8141 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8142 if (BUILTIN_CBRT_P (fcode
))
8144 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8145 if (tree_expr_nonnegative_p (arg
))
8147 tree c
= build_real_truncate (type
, dconst_third ());
8148 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
, c
);
8149 return build_call_expr_loc (loc
, fndecl
, 2, arg
, narg1
);
8153 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8154 if (fcode
== BUILT_IN_POW
8155 || fcode
== BUILT_IN_POWF
8156 || fcode
== BUILT_IN_POWL
)
8158 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8159 if (tree_expr_nonnegative_p (arg00
))
8161 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8162 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
, arg1
);
8163 return build_call_expr_loc (loc
, fndecl
, 2, arg00
, narg1
);
8171 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8172 Return NULL_TREE if no simplification can be made. */
8174 fold_builtin_powi (location_t loc
, tree fndecl ATTRIBUTE_UNUSED
,
8175 tree arg0
, tree arg1
, tree type
)
8177 if (!validate_arg (arg0
, REAL_TYPE
)
8178 || !validate_arg (arg1
, INTEGER_TYPE
))
8181 /* Optimize pow(1.0,y) = 1.0. */
8182 if (real_onep (arg0
))
8183 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8185 if (tree_fits_shwi_p (arg1
))
8187 HOST_WIDE_INT c
= tree_to_shwi (arg1
);
8189 /* Evaluate powi at compile-time. */
8190 if (TREE_CODE (arg0
) == REAL_CST
8191 && !TREE_OVERFLOW (arg0
))
8194 x
= TREE_REAL_CST (arg0
);
8195 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8196 return build_real (type
, x
);
8199 /* Optimize pow(x,0) = 1.0. */
8201 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8204 /* Optimize pow(x,1) = x. */
8208 /* Optimize pow(x,-1) = 1.0/x. */
8210 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8211 build_real (type
, dconst1
), arg0
);
8217 /* A subroutine of fold_builtin to fold the various exponent
8218 functions. Return NULL_TREE if no simplification can be made.
8219 FUNC is the corresponding MPFR exponent function. */
8222 fold_builtin_exponent (location_t loc
, tree fndecl
, tree arg
,
8223 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8225 if (validate_arg (arg
, REAL_TYPE
))
8227 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8230 /* Calculate the result when the argument is a constant. */
8231 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8234 /* Optimize expN(logN(x)) = x. */
8235 if (flag_unsafe_math_optimizations
)
8237 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8239 if ((func
== mpfr_exp
8240 && (fcode
== BUILT_IN_LOG
8241 || fcode
== BUILT_IN_LOGF
8242 || fcode
== BUILT_IN_LOGL
))
8243 || (func
== mpfr_exp2
8244 && (fcode
== BUILT_IN_LOG2
8245 || fcode
== BUILT_IN_LOG2F
8246 || fcode
== BUILT_IN_LOG2L
))
8247 || (func
== mpfr_exp10
8248 && (fcode
== BUILT_IN_LOG10
8249 || fcode
== BUILT_IN_LOG10F
8250 || fcode
== BUILT_IN_LOG10L
)))
8251 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8258 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8259 arguments to the call, and TYPE is its return type.
8260 Return NULL_TREE if no simplification can be made. */
8263 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
8265 if (!validate_arg (arg1
, POINTER_TYPE
)
8266 || !validate_arg (arg2
, INTEGER_TYPE
)
8267 || !validate_arg (len
, INTEGER_TYPE
))
8273 if (TREE_CODE (arg2
) != INTEGER_CST
8274 || !tree_fits_uhwi_p (len
))
8277 p1
= c_getstr (arg1
);
8278 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
8284 if (target_char_cast (arg2
, &c
))
8287 r
= (const char *) memchr (p1
, c
, tree_to_uhwi (len
));
8290 return build_int_cst (TREE_TYPE (arg1
), 0);
8292 tem
= fold_build_pointer_plus_hwi_loc (loc
, arg1
, r
- p1
);
8293 return fold_convert_loc (loc
, type
, tem
);
8299 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8300 Return NULL_TREE if no simplification can be made. */
8303 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8305 const char *p1
, *p2
;
8307 if (!validate_arg (arg1
, POINTER_TYPE
)
8308 || !validate_arg (arg2
, POINTER_TYPE
)
8309 || !validate_arg (len
, INTEGER_TYPE
))
8312 /* If the LEN parameter is zero, return zero. */
8313 if (integer_zerop (len
))
8314 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8317 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8318 if (operand_equal_p (arg1
, arg2
, 0))
8319 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8321 p1
= c_getstr (arg1
);
8322 p2
= c_getstr (arg2
);
8324 /* If all arguments are constant, and the value of len is not greater
8325 than the lengths of arg1 and arg2, evaluate at compile-time. */
8326 if (tree_fits_uhwi_p (len
) && p1
&& p2
8327 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
8328 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
8330 const int r
= memcmp (p1
, p2
, tree_to_uhwi (len
));
8333 return integer_one_node
;
8335 return integer_minus_one_node
;
8337 return integer_zero_node
;
8340 /* If len parameter is one, return an expression corresponding to
8341 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8342 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8344 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8345 tree cst_uchar_ptr_node
8346 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8349 = fold_convert_loc (loc
, integer_type_node
,
8350 build1 (INDIRECT_REF
, cst_uchar_node
,
8351 fold_convert_loc (loc
,
8355 = fold_convert_loc (loc
, integer_type_node
,
8356 build1 (INDIRECT_REF
, cst_uchar_node
,
8357 fold_convert_loc (loc
,
8360 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8366 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8367 Return NULL_TREE if no simplification can be made. */
8370 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
8372 const char *p1
, *p2
;
8374 if (!validate_arg (arg1
, POINTER_TYPE
)
8375 || !validate_arg (arg2
, POINTER_TYPE
))
8378 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8379 if (operand_equal_p (arg1
, arg2
, 0))
8380 return integer_zero_node
;
8382 p1
= c_getstr (arg1
);
8383 p2
= c_getstr (arg2
);
8387 const int i
= strcmp (p1
, p2
);
8389 return integer_minus_one_node
;
8391 return integer_one_node
;
8393 return integer_zero_node
;
8396 /* If the second arg is "", return *(const unsigned char*)arg1. */
8397 if (p2
&& *p2
== '\0')
8399 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8400 tree cst_uchar_ptr_node
8401 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8403 return fold_convert_loc (loc
, integer_type_node
,
8404 build1 (INDIRECT_REF
, cst_uchar_node
,
8405 fold_convert_loc (loc
,
8410 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8411 if (p1
&& *p1
== '\0')
8413 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8414 tree cst_uchar_ptr_node
8415 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8418 = fold_convert_loc (loc
, integer_type_node
,
8419 build1 (INDIRECT_REF
, cst_uchar_node
,
8420 fold_convert_loc (loc
,
8423 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8429 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8430 Return NULL_TREE if no simplification can be made. */
8433 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8435 const char *p1
, *p2
;
8437 if (!validate_arg (arg1
, POINTER_TYPE
)
8438 || !validate_arg (arg2
, POINTER_TYPE
)
8439 || !validate_arg (len
, INTEGER_TYPE
))
8442 /* If the LEN parameter is zero, return zero. */
8443 if (integer_zerop (len
))
8444 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8447 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8448 if (operand_equal_p (arg1
, arg2
, 0))
8449 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8451 p1
= c_getstr (arg1
);
8452 p2
= c_getstr (arg2
);
8454 if (tree_fits_uhwi_p (len
) && p1
&& p2
)
8456 const int i
= strncmp (p1
, p2
, tree_to_uhwi (len
));
8458 return integer_one_node
;
8460 return integer_minus_one_node
;
8462 return integer_zero_node
;
8465 /* If the second arg is "", and the length is greater than zero,
8466 return *(const unsigned char*)arg1. */
8467 if (p2
&& *p2
== '\0'
8468 && TREE_CODE (len
) == INTEGER_CST
8469 && tree_int_cst_sgn (len
) == 1)
8471 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8472 tree cst_uchar_ptr_node
8473 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8475 return fold_convert_loc (loc
, integer_type_node
,
8476 build1 (INDIRECT_REF
, cst_uchar_node
,
8477 fold_convert_loc (loc
,
8482 /* If the first arg is "", and the length is greater than zero,
8483 return -*(const unsigned char*)arg2. */
8484 if (p1
&& *p1
== '\0'
8485 && TREE_CODE (len
) == INTEGER_CST
8486 && tree_int_cst_sgn (len
) == 1)
8488 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8489 tree cst_uchar_ptr_node
8490 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8492 tree temp
= fold_convert_loc (loc
, integer_type_node
,
8493 build1 (INDIRECT_REF
, cst_uchar_node
,
8494 fold_convert_loc (loc
,
8497 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8500 /* If len parameter is one, return an expression corresponding to
8501 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8502 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8504 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8505 tree cst_uchar_ptr_node
8506 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8508 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
8509 build1 (INDIRECT_REF
, cst_uchar_node
,
8510 fold_convert_loc (loc
,
8513 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
8514 build1 (INDIRECT_REF
, cst_uchar_node
,
8515 fold_convert_loc (loc
,
8518 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8524 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8525 ARG. Return NULL_TREE if no simplification can be made. */
8528 fold_builtin_signbit (location_t loc
, tree arg
, tree type
)
8530 if (!validate_arg (arg
, REAL_TYPE
))
8533 /* If ARG is a compile-time constant, determine the result. */
8534 if (TREE_CODE (arg
) == REAL_CST
8535 && !TREE_OVERFLOW (arg
))
8539 c
= TREE_REAL_CST (arg
);
8540 return (REAL_VALUE_NEGATIVE (c
)
8541 ? build_one_cst (type
)
8542 : build_zero_cst (type
));
8545 /* If ARG is non-negative, the result is always zero. */
8546 if (tree_expr_nonnegative_p (arg
))
8547 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8549 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8550 if (!HONOR_SIGNED_ZEROS (arg
))
8551 return fold_convert (type
,
8552 fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, arg
,
8553 build_real (TREE_TYPE (arg
), dconst0
)));
8558 /* Fold function call to builtin copysign, copysignf or copysignl with
8559 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8563 fold_builtin_copysign (location_t loc
, tree arg1
, tree arg2
, tree type
)
8565 if (!validate_arg (arg1
, REAL_TYPE
)
8566 || !validate_arg (arg2
, REAL_TYPE
))
8569 /* copysign(X,X) is X. */
8570 if (operand_equal_p (arg1
, arg2
, 0))
8571 return fold_convert_loc (loc
, type
, arg1
);
8573 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8574 if (TREE_CODE (arg1
) == REAL_CST
8575 && TREE_CODE (arg2
) == REAL_CST
8576 && !TREE_OVERFLOW (arg1
)
8577 && !TREE_OVERFLOW (arg2
))
8579 REAL_VALUE_TYPE c1
, c2
;
8581 c1
= TREE_REAL_CST (arg1
);
8582 c2
= TREE_REAL_CST (arg2
);
8583 /* c1.sign := c2.sign. */
8584 real_copysign (&c1
, &c2
);
8585 return build_real (type
, c1
);
8588 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8589 Remember to evaluate Y for side-effects. */
8590 if (tree_expr_nonnegative_p (arg2
))
8591 return omit_one_operand_loc (loc
, type
,
8592 fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
),
8598 /* Fold a call to builtin isascii with argument ARG. */
8601 fold_builtin_isascii (location_t loc
, tree arg
)
8603 if (!validate_arg (arg
, INTEGER_TYPE
))
8607 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8608 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
8609 build_int_cst (integer_type_node
,
8610 ~ (unsigned HOST_WIDE_INT
) 0x7f));
8611 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
8612 arg
, integer_zero_node
);
8616 /* Fold a call to builtin toascii with argument ARG. */
8619 fold_builtin_toascii (location_t loc
, tree arg
)
8621 if (!validate_arg (arg
, INTEGER_TYPE
))
8624 /* Transform toascii(c) -> (c & 0x7f). */
8625 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
8626 build_int_cst (integer_type_node
, 0x7f));
8629 /* Fold a call to builtin isdigit with argument ARG. */
8632 fold_builtin_isdigit (location_t loc
, tree arg
)
8634 if (!validate_arg (arg
, INTEGER_TYPE
))
8638 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8639 /* According to the C standard, isdigit is unaffected by locale.
8640 However, it definitely is affected by the target character set. */
8641 unsigned HOST_WIDE_INT target_digit0
8642 = lang_hooks
.to_target_charset ('0');
8644 if (target_digit0
== 0)
8647 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
8648 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
8649 build_int_cst (unsigned_type_node
, target_digit0
));
8650 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
8651 build_int_cst (unsigned_type_node
, 9));
8655 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8658 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
8660 if (!validate_arg (arg
, REAL_TYPE
))
8663 arg
= fold_convert_loc (loc
, type
, arg
);
8664 if (TREE_CODE (arg
) == REAL_CST
)
8665 return fold_abs_const (arg
, type
);
8666 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8669 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8672 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
8674 if (!validate_arg (arg
, INTEGER_TYPE
))
8677 arg
= fold_convert_loc (loc
, type
, arg
);
8678 if (TREE_CODE (arg
) == INTEGER_CST
)
8679 return fold_abs_const (arg
, type
);
8680 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8683 /* Fold a fma operation with arguments ARG[012]. */
8686 fold_fma (location_t loc ATTRIBUTE_UNUSED
,
8687 tree type
, tree arg0
, tree arg1
, tree arg2
)
8689 if (TREE_CODE (arg0
) == REAL_CST
8690 && TREE_CODE (arg1
) == REAL_CST
8691 && TREE_CODE (arg2
) == REAL_CST
)
8692 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
8697 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8700 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
8702 if (validate_arg (arg0
, REAL_TYPE
)
8703 && validate_arg (arg1
, REAL_TYPE
)
8704 && validate_arg (arg2
, REAL_TYPE
))
8706 tree tem
= fold_fma (loc
, type
, arg0
, arg1
, arg2
);
8710 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8711 if (optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
8712 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
8717 /* Fold a call to builtin fmin or fmax. */
8720 fold_builtin_fmin_fmax (location_t loc
, tree arg0
, tree arg1
,
8721 tree type
, bool max
)
8723 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
8725 /* Calculate the result when the argument is a constant. */
8726 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
8731 /* If either argument is NaN, return the other one. Avoid the
8732 transformation if we get (and honor) a signalling NaN. Using
8733 omit_one_operand() ensures we create a non-lvalue. */
8734 if (TREE_CODE (arg0
) == REAL_CST
8735 && real_isnan (&TREE_REAL_CST (arg0
))
8736 && (! HONOR_SNANS (arg0
)
8737 || ! TREE_REAL_CST (arg0
).signalling
))
8738 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
8739 if (TREE_CODE (arg1
) == REAL_CST
8740 && real_isnan (&TREE_REAL_CST (arg1
))
8741 && (! HONOR_SNANS (arg1
)
8742 || ! TREE_REAL_CST (arg1
).signalling
))
8743 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
8745 /* Transform fmin/fmax(x,x) -> x. */
8746 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8747 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
8749 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
8750 functions to return the numeric arg if the other one is NaN.
8751 These tree codes don't honor that, so only transform if
8752 -ffinite-math-only is set. C99 doesn't require -0.0 to be
8753 handled, so we don't have to worry about it either. */
8754 if (flag_finite_math_only
)
8755 return fold_build2_loc (loc
, (max
? MAX_EXPR
: MIN_EXPR
), type
,
8756 fold_convert_loc (loc
, type
, arg0
),
8757 fold_convert_loc (loc
, type
, arg1
));
8762 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8765 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
8767 if (validate_arg (arg
, COMPLEX_TYPE
)
8768 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
8770 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
8774 tree new_arg
= builtin_save_expr (arg
);
8775 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
8776 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
8777 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
8784 /* Fold a call to builtin logb/ilogb. */
8787 fold_builtin_logb (location_t loc
, tree arg
, tree rettype
)
8789 if (! validate_arg (arg
, REAL_TYPE
))
8794 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
8796 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
8802 /* If arg is Inf or NaN and we're logb, return it. */
8803 if (TREE_CODE (rettype
) == REAL_TYPE
)
8805 /* For logb(-Inf) we have to return +Inf. */
8806 if (real_isinf (value
) && real_isneg (value
))
8808 REAL_VALUE_TYPE tem
;
8810 return build_real (rettype
, tem
);
8812 return fold_convert_loc (loc
, rettype
, arg
);
8814 /* Fall through... */
8816 /* Zero may set errno and/or raise an exception for logb, also
8817 for ilogb we don't know FP_ILOGB0. */
8820 /* For normal numbers, proceed iff radix == 2. In GCC,
8821 normalized significands are in the range [0.5, 1.0). We
8822 want the exponent as if they were [1.0, 2.0) so get the
8823 exponent and subtract 1. */
8824 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
8825 return fold_convert_loc (loc
, rettype
,
8826 build_int_cst (integer_type_node
,
8827 REAL_EXP (value
)-1));
8835 /* Fold a call to builtin significand, if radix == 2. */
8838 fold_builtin_significand (location_t loc
, tree arg
, tree rettype
)
8840 if (! validate_arg (arg
, REAL_TYPE
))
8845 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
8847 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
8854 /* If arg is +-0, +-Inf or +-NaN, then return it. */
8855 return fold_convert_loc (loc
, rettype
, arg
);
8857 /* For normal numbers, proceed iff radix == 2. */
8858 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
8860 REAL_VALUE_TYPE result
= *value
;
8861 /* In GCC, normalized significands are in the range [0.5,
8862 1.0). We want them to be [1.0, 2.0) so set the
8864 SET_REAL_EXP (&result
, 1);
8865 return build_real (rettype
, result
);
8874 /* Fold a call to builtin frexp, we can assume the base is 2. */
8877 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8879 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8884 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8887 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8889 /* Proceed if a valid pointer type was passed in. */
8890 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
8892 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8898 /* For +-0, return (*exp = 0, +-0). */
8899 exp
= integer_zero_node
;
8904 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8905 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
8908 /* Since the frexp function always expects base 2, and in
8909 GCC normalized significands are already in the range
8910 [0.5, 1.0), we have exactly what frexp wants. */
8911 REAL_VALUE_TYPE frac_rvt
= *value
;
8912 SET_REAL_EXP (&frac_rvt
, 0);
8913 frac
= build_real (rettype
, frac_rvt
);
8914 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
8921 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8922 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
8923 TREE_SIDE_EFFECTS (arg1
) = 1;
8924 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
8930 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
8931 then we can assume the base is two. If it's false, then we have to
8932 check the mode of the TYPE parameter in certain cases. */
8935 fold_builtin_load_exponent (location_t loc
, tree arg0
, tree arg1
,
8936 tree type
, bool ldexp
)
8938 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
8943 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
8944 if (real_zerop (arg0
) || integer_zerop (arg1
)
8945 || (TREE_CODE (arg0
) == REAL_CST
8946 && !real_isfinite (&TREE_REAL_CST (arg0
))))
8947 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
8949 /* If both arguments are constant, then try to evaluate it. */
8950 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
8951 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
8952 && tree_fits_shwi_p (arg1
))
8954 /* Bound the maximum adjustment to twice the range of the
8955 mode's valid exponents. Use abs to ensure the range is
8956 positive as a sanity check. */
8957 const long max_exp_adj
= 2 *
8958 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
8959 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
8961 /* Get the user-requested adjustment. */
8962 const HOST_WIDE_INT req_exp_adj
= tree_to_shwi (arg1
);
8964 /* The requested adjustment must be inside this range. This
8965 is a preliminary cap to avoid things like overflow, we
8966 may still fail to compute the result for other reasons. */
8967 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
8969 REAL_VALUE_TYPE initial_result
;
8971 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
8973 /* Ensure we didn't overflow. */
8974 if (! real_isinf (&initial_result
))
8976 const REAL_VALUE_TYPE trunc_result
8977 = real_value_truncate (TYPE_MODE (type
), initial_result
);
8979 /* Only proceed if the target mode can hold the
8981 if (real_equal (&initial_result
, &trunc_result
))
8982 return build_real (type
, trunc_result
);
8991 /* Fold a call to builtin modf. */
8994 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8996 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9001 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9004 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9006 /* Proceed if a valid pointer type was passed in. */
9007 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9009 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9010 REAL_VALUE_TYPE trunc
, frac
;
9016 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9017 trunc
= frac
= *value
;
9020 /* For +-Inf, return (*arg1 = arg0, +-0). */
9022 frac
.sign
= value
->sign
;
9026 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9027 real_trunc (&trunc
, VOIDmode
, value
);
9028 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9029 /* If the original number was negative and already
9030 integral, then the fractional part is -0.0. */
9031 if (value
->sign
&& frac
.cl
== rvc_zero
)
9032 frac
.sign
= value
->sign
;
9036 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9037 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
9038 build_real (rettype
, trunc
));
9039 TREE_SIDE_EFFECTS (arg1
) = 1;
9040 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
9041 build_real (rettype
, frac
));
9047 /* Given a location LOC, an interclass builtin function decl FNDECL
9048 and its single argument ARG, return an folded expression computing
9049 the same, or NULL_TREE if we either couldn't or didn't want to fold
9050 (the latter happen if there's an RTL instruction available). */
9053 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
9057 if (!validate_arg (arg
, REAL_TYPE
))
9060 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9063 mode
= TYPE_MODE (TREE_TYPE (arg
));
9065 /* If there is no optab, try generic code. */
9066 switch (DECL_FUNCTION_CODE (fndecl
))
9070 CASE_FLT_FN (BUILT_IN_ISINF
):
9072 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9073 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9074 tree
const type
= TREE_TYPE (arg
);
9078 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9079 real_from_string (&r
, buf
);
9080 result
= build_call_expr (isgr_fn
, 2,
9081 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9082 build_real (type
, r
));
9085 CASE_FLT_FN (BUILT_IN_FINITE
):
9086 case BUILT_IN_ISFINITE
:
9088 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9089 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9090 tree
const type
= TREE_TYPE (arg
);
9094 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9095 real_from_string (&r
, buf
);
9096 result
= build_call_expr (isle_fn
, 2,
9097 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9098 build_real (type
, r
));
9099 /*result = fold_build2_loc (loc, UNGT_EXPR,
9100 TREE_TYPE (TREE_TYPE (fndecl)),
9101 fold_build1_loc (loc, ABS_EXPR, type, arg),
9102 build_real (type, r));
9103 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9104 TREE_TYPE (TREE_TYPE (fndecl)),
9108 case BUILT_IN_ISNORMAL
:
9110 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9111 islessequal(fabs(x),DBL_MAX). */
9112 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9113 tree
const isge_fn
= builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
9114 tree
const type
= TREE_TYPE (arg
);
9115 REAL_VALUE_TYPE rmax
, rmin
;
9118 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9119 real_from_string (&rmax
, buf
);
9120 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9121 real_from_string (&rmin
, buf
);
9122 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9123 result
= build_call_expr (isle_fn
, 2, arg
,
9124 build_real (type
, rmax
));
9125 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
9126 build_call_expr (isge_fn
, 2, arg
,
9127 build_real (type
, rmin
)));
9137 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9138 ARG is the argument for the call. */
9141 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
9143 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9146 if (!validate_arg (arg
, REAL_TYPE
))
9149 switch (builtin_index
)
9151 case BUILT_IN_ISINF
:
9152 if (!HONOR_INFINITIES (arg
))
9153 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9155 if (TREE_CODE (arg
) == REAL_CST
)
9157 r
= TREE_REAL_CST (arg
);
9158 if (real_isinf (&r
))
9159 return real_compare (GT_EXPR
, &r
, &dconst0
)
9160 ? integer_one_node
: integer_minus_one_node
;
9162 return integer_zero_node
;
9167 case BUILT_IN_ISINF_SIGN
:
9169 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9170 /* In a boolean context, GCC will fold the inner COND_EXPR to
9171 1. So e.g. "if (isinf_sign(x))" would be folded to just
9172 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9173 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
9174 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
9175 tree tmp
= NULL_TREE
;
9177 arg
= builtin_save_expr (arg
);
9179 if (signbit_fn
&& isinf_fn
)
9181 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
9182 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
9184 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9185 signbit_call
, integer_zero_node
);
9186 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9187 isinf_call
, integer_zero_node
);
9189 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
9190 integer_minus_one_node
, integer_one_node
);
9191 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9199 case BUILT_IN_ISFINITE
:
9200 if (!HONOR_NANS (arg
)
9201 && !HONOR_INFINITIES (arg
))
9202 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9204 if (TREE_CODE (arg
) == REAL_CST
)
9206 r
= TREE_REAL_CST (arg
);
9207 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
9212 case BUILT_IN_ISNAN
:
9213 if (!HONOR_NANS (arg
))
9214 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9216 if (TREE_CODE (arg
) == REAL_CST
)
9218 r
= TREE_REAL_CST (arg
);
9219 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9222 arg
= builtin_save_expr (arg
);
9223 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
9230 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9231 This builtin will generate code to return the appropriate floating
9232 point classification depending on the value of the floating point
9233 number passed in. The possible return values must be supplied as
9234 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9235 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9236 one floating point argument which is "type generic". */
9239 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
9241 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
9242 arg
, type
, res
, tmp
;
9247 /* Verify the required arguments in the original call. */
9249 || !validate_arg (args
[0], INTEGER_TYPE
)
9250 || !validate_arg (args
[1], INTEGER_TYPE
)
9251 || !validate_arg (args
[2], INTEGER_TYPE
)
9252 || !validate_arg (args
[3], INTEGER_TYPE
)
9253 || !validate_arg (args
[4], INTEGER_TYPE
)
9254 || !validate_arg (args
[5], REAL_TYPE
))
9258 fp_infinite
= args
[1];
9259 fp_normal
= args
[2];
9260 fp_subnormal
= args
[3];
9263 type
= TREE_TYPE (arg
);
9264 mode
= TYPE_MODE (type
);
9265 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9269 (fabs(x) == Inf ? FP_INFINITE :
9270 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9271 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9273 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9274 build_real (type
, dconst0
));
9275 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9276 tmp
, fp_zero
, fp_subnormal
);
9278 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9279 real_from_string (&r
, buf
);
9280 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
9281 arg
, build_real (type
, r
));
9282 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
9284 if (HONOR_INFINITIES (mode
))
9287 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9288 build_real (type
, r
));
9289 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
9293 if (HONOR_NANS (mode
))
9295 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
9296 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
9302 /* Fold a call to an unordered comparison function such as
9303 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9304 being called and ARG0 and ARG1 are the arguments for the call.
9305 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9306 the opposite of the desired result. UNORDERED_CODE is used
9307 for modes that can hold NaNs and ORDERED_CODE is used for
9311 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
9312 enum tree_code unordered_code
,
9313 enum tree_code ordered_code
)
9315 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9316 enum tree_code code
;
9318 enum tree_code code0
, code1
;
9319 tree cmp_type
= NULL_TREE
;
9321 type0
= TREE_TYPE (arg0
);
9322 type1
= TREE_TYPE (arg1
);
9324 code0
= TREE_CODE (type0
);
9325 code1
= TREE_CODE (type1
);
9327 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9328 /* Choose the wider of two real types. */
9329 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9331 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9333 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9336 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
9337 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
9339 if (unordered_code
== UNORDERED_EXPR
)
9341 if (!HONOR_NANS (arg0
))
9342 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
9343 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
9346 code
= HONOR_NANS (arg0
) ? unordered_code
: ordered_code
;
9347 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
9348 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
9351 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9352 arithmetics if it can never overflow, or into internal functions that
9353 return both result of arithmetics and overflowed boolean flag in
9354 a complex integer result, or some other check for overflow. */
9357 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
9358 tree arg0
, tree arg1
, tree arg2
)
9360 enum internal_fn ifn
= IFN_LAST
;
9361 tree type
= TREE_TYPE (TREE_TYPE (arg2
));
9362 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
9365 case BUILT_IN_ADD_OVERFLOW
:
9366 case BUILT_IN_SADD_OVERFLOW
:
9367 case BUILT_IN_SADDL_OVERFLOW
:
9368 case BUILT_IN_SADDLL_OVERFLOW
:
9369 case BUILT_IN_UADD_OVERFLOW
:
9370 case BUILT_IN_UADDL_OVERFLOW
:
9371 case BUILT_IN_UADDLL_OVERFLOW
:
9372 ifn
= IFN_ADD_OVERFLOW
;
9374 case BUILT_IN_SUB_OVERFLOW
:
9375 case BUILT_IN_SSUB_OVERFLOW
:
9376 case BUILT_IN_SSUBL_OVERFLOW
:
9377 case BUILT_IN_SSUBLL_OVERFLOW
:
9378 case BUILT_IN_USUB_OVERFLOW
:
9379 case BUILT_IN_USUBL_OVERFLOW
:
9380 case BUILT_IN_USUBLL_OVERFLOW
:
9381 ifn
= IFN_SUB_OVERFLOW
;
9383 case BUILT_IN_MUL_OVERFLOW
:
9384 case BUILT_IN_SMUL_OVERFLOW
:
9385 case BUILT_IN_SMULL_OVERFLOW
:
9386 case BUILT_IN_SMULLL_OVERFLOW
:
9387 case BUILT_IN_UMUL_OVERFLOW
:
9388 case BUILT_IN_UMULL_OVERFLOW
:
9389 case BUILT_IN_UMULLL_OVERFLOW
:
9390 ifn
= IFN_MUL_OVERFLOW
;
9395 tree ctype
= build_complex_type (type
);
9396 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
,
9398 tree tgt
= save_expr (call
);
9399 tree intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
9400 tree ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
9401 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
9403 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
9404 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
9407 /* Fold a call to built-in function FNDECL with 0 arguments.
9408 This function returns NULL_TREE if no simplification was possible. */
9411 fold_builtin_0 (location_t loc
, tree fndecl
)
9413 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9414 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9417 CASE_FLT_FN (BUILT_IN_INF
):
9418 case BUILT_IN_INFD32
:
9419 case BUILT_IN_INFD64
:
9420 case BUILT_IN_INFD128
:
9421 return fold_builtin_inf (loc
, type
, true);
9423 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9424 return fold_builtin_inf (loc
, type
, false);
9426 case BUILT_IN_CLASSIFY_TYPE
:
9427 return fold_builtin_classify_type (NULL_TREE
);
9435 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9436 This function returns NULL_TREE if no simplification was possible. */
9439 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
)
9441 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9442 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9445 case BUILT_IN_CONSTANT_P
:
9447 tree val
= fold_builtin_constant_p (arg0
);
9449 /* Gimplification will pull the CALL_EXPR for the builtin out of
9450 an if condition. When not optimizing, we'll not CSE it back.
9451 To avoid link error types of regressions, return false now. */
9452 if (!val
&& !optimize
)
9453 val
= integer_zero_node
;
9458 case BUILT_IN_CLASSIFY_TYPE
:
9459 return fold_builtin_classify_type (arg0
);
9461 case BUILT_IN_STRLEN
:
9462 return fold_builtin_strlen (loc
, type
, arg0
);
9464 CASE_FLT_FN (BUILT_IN_FABS
):
9465 case BUILT_IN_FABSD32
:
9466 case BUILT_IN_FABSD64
:
9467 case BUILT_IN_FABSD128
:
9468 return fold_builtin_fabs (loc
, arg0
, type
);
9472 case BUILT_IN_LLABS
:
9473 case BUILT_IN_IMAXABS
:
9474 return fold_builtin_abs (loc
, arg0
, type
);
9476 CASE_FLT_FN (BUILT_IN_CONJ
):
9477 if (validate_arg (arg0
, COMPLEX_TYPE
)
9478 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9479 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
9482 CASE_FLT_FN (BUILT_IN_CREAL
):
9483 if (validate_arg (arg0
, COMPLEX_TYPE
)
9484 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9485 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
9488 CASE_FLT_FN (BUILT_IN_CIMAG
):
9489 if (validate_arg (arg0
, COMPLEX_TYPE
)
9490 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9491 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
9494 CASE_FLT_FN (BUILT_IN_CCOS
):
9495 if (validate_arg (arg0
, COMPLEX_TYPE
)
9496 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9497 return do_mpc_arg1 (arg0
, type
, mpc_cos
);
9500 CASE_FLT_FN (BUILT_IN_CCOSH
):
9501 if (validate_arg (arg0
, COMPLEX_TYPE
)
9502 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9503 return do_mpc_arg1 (arg0
, type
, mpc_cosh
);
9506 CASE_FLT_FN (BUILT_IN_CPROJ
):
9507 return fold_builtin_cproj (loc
, arg0
, type
);
9509 CASE_FLT_FN (BUILT_IN_CSIN
):
9510 if (validate_arg (arg0
, COMPLEX_TYPE
)
9511 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9512 return do_mpc_arg1 (arg0
, type
, mpc_sin
);
9515 CASE_FLT_FN (BUILT_IN_CSINH
):
9516 if (validate_arg (arg0
, COMPLEX_TYPE
)
9517 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9518 return do_mpc_arg1 (arg0
, type
, mpc_sinh
);
9521 CASE_FLT_FN (BUILT_IN_CTAN
):
9522 if (validate_arg (arg0
, COMPLEX_TYPE
)
9523 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9524 return do_mpc_arg1 (arg0
, type
, mpc_tan
);
9527 CASE_FLT_FN (BUILT_IN_CTANH
):
9528 if (validate_arg (arg0
, COMPLEX_TYPE
)
9529 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9530 return do_mpc_arg1 (arg0
, type
, mpc_tanh
);
9533 CASE_FLT_FN (BUILT_IN_CLOG
):
9534 if (validate_arg (arg0
, COMPLEX_TYPE
)
9535 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9536 return do_mpc_arg1 (arg0
, type
, mpc_log
);
9539 CASE_FLT_FN (BUILT_IN_CSQRT
):
9540 if (validate_arg (arg0
, COMPLEX_TYPE
)
9541 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9542 return do_mpc_arg1 (arg0
, type
, mpc_sqrt
);
9545 CASE_FLT_FN (BUILT_IN_CASIN
):
9546 if (validate_arg (arg0
, COMPLEX_TYPE
)
9547 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9548 return do_mpc_arg1 (arg0
, type
, mpc_asin
);
9551 CASE_FLT_FN (BUILT_IN_CACOS
):
9552 if (validate_arg (arg0
, COMPLEX_TYPE
)
9553 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9554 return do_mpc_arg1 (arg0
, type
, mpc_acos
);
9557 CASE_FLT_FN (BUILT_IN_CATAN
):
9558 if (validate_arg (arg0
, COMPLEX_TYPE
)
9559 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9560 return do_mpc_arg1 (arg0
, type
, mpc_atan
);
9563 CASE_FLT_FN (BUILT_IN_CASINH
):
9564 if (validate_arg (arg0
, COMPLEX_TYPE
)
9565 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9566 return do_mpc_arg1 (arg0
, type
, mpc_asinh
);
9569 CASE_FLT_FN (BUILT_IN_CACOSH
):
9570 if (validate_arg (arg0
, COMPLEX_TYPE
)
9571 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9572 return do_mpc_arg1 (arg0
, type
, mpc_acosh
);
9575 CASE_FLT_FN (BUILT_IN_CATANH
):
9576 if (validate_arg (arg0
, COMPLEX_TYPE
)
9577 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9578 return do_mpc_arg1 (arg0
, type
, mpc_atanh
);
9581 CASE_FLT_FN (BUILT_IN_CABS
):
9582 if (TREE_CODE (arg0
) == COMPLEX_CST
9583 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9584 return do_mpfr_arg2 (TREE_REALPART (arg0
), TREE_IMAGPART (arg0
),
9588 CASE_FLT_FN (BUILT_IN_CARG
):
9589 return fold_builtin_carg (loc
, arg0
, type
);
9591 CASE_FLT_FN (BUILT_IN_SQRT
):
9592 if (validate_arg (arg0
, REAL_TYPE
))
9593 return do_mpfr_arg1 (arg0
, type
, mpfr_sqrt
, &dconst0
, NULL
, true);
9596 CASE_FLT_FN (BUILT_IN_CBRT
):
9597 if (validate_arg (arg0
, REAL_TYPE
))
9598 return do_mpfr_arg1 (arg0
, type
, mpfr_cbrt
, NULL
, NULL
, 0);
9601 CASE_FLT_FN (BUILT_IN_ASIN
):
9602 if (validate_arg (arg0
, REAL_TYPE
))
9603 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
9604 &dconstm1
, &dconst1
, true);
9607 CASE_FLT_FN (BUILT_IN_ACOS
):
9608 if (validate_arg (arg0
, REAL_TYPE
))
9609 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
9610 &dconstm1
, &dconst1
, true);
9613 CASE_FLT_FN (BUILT_IN_ATAN
):
9614 if (validate_arg (arg0
, REAL_TYPE
))
9615 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
9618 CASE_FLT_FN (BUILT_IN_ASINH
):
9619 if (validate_arg (arg0
, REAL_TYPE
))
9620 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
9623 CASE_FLT_FN (BUILT_IN_ACOSH
):
9624 if (validate_arg (arg0
, REAL_TYPE
))
9625 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
9626 &dconst1
, NULL
, true);
9629 CASE_FLT_FN (BUILT_IN_ATANH
):
9630 if (validate_arg (arg0
, REAL_TYPE
))
9631 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
9632 &dconstm1
, &dconst1
, false);
9635 CASE_FLT_FN (BUILT_IN_SIN
):
9636 if (validate_arg (arg0
, REAL_TYPE
))
9637 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
9640 CASE_FLT_FN (BUILT_IN_COS
):
9641 if (validate_arg (arg0
, REAL_TYPE
))
9642 return do_mpfr_arg1 (arg0
, type
, mpfr_cos
, NULL
, NULL
, 0);
9645 CASE_FLT_FN (BUILT_IN_TAN
):
9646 return fold_builtin_tan (arg0
, type
);
9648 CASE_FLT_FN (BUILT_IN_CEXP
):
9649 return fold_builtin_cexp (loc
, arg0
, type
);
9651 CASE_FLT_FN (BUILT_IN_CEXPI
):
9652 if (validate_arg (arg0
, REAL_TYPE
))
9653 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
9656 CASE_FLT_FN (BUILT_IN_SINH
):
9657 if (validate_arg (arg0
, REAL_TYPE
))
9658 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
9661 CASE_FLT_FN (BUILT_IN_COSH
):
9662 if (validate_arg (arg0
, REAL_TYPE
))
9663 return do_mpfr_arg1 (arg0
, type
, mpfr_cosh
, NULL
, NULL
, 0);
9666 CASE_FLT_FN (BUILT_IN_TANH
):
9667 if (validate_arg (arg0
, REAL_TYPE
))
9668 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
9671 CASE_FLT_FN (BUILT_IN_ERF
):
9672 if (validate_arg (arg0
, REAL_TYPE
))
9673 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
9676 CASE_FLT_FN (BUILT_IN_ERFC
):
9677 if (validate_arg (arg0
, REAL_TYPE
))
9678 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
9681 CASE_FLT_FN (BUILT_IN_TGAMMA
):
9682 if (validate_arg (arg0
, REAL_TYPE
))
9683 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
9686 CASE_FLT_FN (BUILT_IN_EXP
):
9687 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp
);
9689 CASE_FLT_FN (BUILT_IN_EXP2
):
9690 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp2
);
9692 CASE_FLT_FN (BUILT_IN_EXP10
):
9693 CASE_FLT_FN (BUILT_IN_POW10
):
9694 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp10
);
9696 CASE_FLT_FN (BUILT_IN_EXPM1
):
9697 if (validate_arg (arg0
, REAL_TYPE
))
9698 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
9701 CASE_FLT_FN (BUILT_IN_LOG
):
9702 if (validate_arg (arg0
, REAL_TYPE
))
9703 return do_mpfr_arg1 (arg0
, type
, mpfr_log
, &dconst0
, NULL
, false);
9706 CASE_FLT_FN (BUILT_IN_LOG2
):
9707 if (validate_arg (arg0
, REAL_TYPE
))
9708 return do_mpfr_arg1 (arg0
, type
, mpfr_log2
, &dconst0
, NULL
, false);
9711 CASE_FLT_FN (BUILT_IN_LOG10
):
9712 if (validate_arg (arg0
, REAL_TYPE
))
9713 return do_mpfr_arg1 (arg0
, type
, mpfr_log10
, &dconst0
, NULL
, false);
9716 CASE_FLT_FN (BUILT_IN_LOG1P
):
9717 if (validate_arg (arg0
, REAL_TYPE
))
9718 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
9719 &dconstm1
, NULL
, false);
9722 CASE_FLT_FN (BUILT_IN_J0
):
9723 if (validate_arg (arg0
, REAL_TYPE
))
9724 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
9728 CASE_FLT_FN (BUILT_IN_J1
):
9729 if (validate_arg (arg0
, REAL_TYPE
))
9730 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
9734 CASE_FLT_FN (BUILT_IN_Y0
):
9735 if (validate_arg (arg0
, REAL_TYPE
))
9736 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
9737 &dconst0
, NULL
, false);
9740 CASE_FLT_FN (BUILT_IN_Y1
):
9741 if (validate_arg (arg0
, REAL_TYPE
))
9742 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
9743 &dconst0
, NULL
, false);
9746 CASE_FLT_FN (BUILT_IN_NAN
):
9747 case BUILT_IN_NAND32
:
9748 case BUILT_IN_NAND64
:
9749 case BUILT_IN_NAND128
:
9750 return fold_builtin_nan (arg0
, type
, true);
9752 CASE_FLT_FN (BUILT_IN_NANS
):
9753 return fold_builtin_nan (arg0
, type
, false);
9755 CASE_FLT_FN (BUILT_IN_FLOOR
):
9756 return fold_builtin_floor (loc
, fndecl
, arg0
);
9758 CASE_FLT_FN (BUILT_IN_CEIL
):
9759 return fold_builtin_ceil (loc
, fndecl
, arg0
);
9761 CASE_FLT_FN (BUILT_IN_TRUNC
):
9762 return fold_builtin_trunc (loc
, fndecl
, arg0
);
9764 CASE_FLT_FN (BUILT_IN_ROUND
):
9765 return fold_builtin_round (loc
, fndecl
, arg0
);
9767 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
9768 CASE_FLT_FN (BUILT_IN_RINT
):
9769 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg0
);
9771 CASE_FLT_FN (BUILT_IN_ICEIL
):
9772 CASE_FLT_FN (BUILT_IN_LCEIL
):
9773 CASE_FLT_FN (BUILT_IN_LLCEIL
):
9774 CASE_FLT_FN (BUILT_IN_LFLOOR
):
9775 CASE_FLT_FN (BUILT_IN_IFLOOR
):
9776 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
9777 CASE_FLT_FN (BUILT_IN_IROUND
):
9778 CASE_FLT_FN (BUILT_IN_LROUND
):
9779 CASE_FLT_FN (BUILT_IN_LLROUND
):
9780 return fold_builtin_int_roundingfn (loc
, fndecl
, arg0
);
9782 CASE_FLT_FN (BUILT_IN_IRINT
):
9783 CASE_FLT_FN (BUILT_IN_LRINT
):
9784 CASE_FLT_FN (BUILT_IN_LLRINT
):
9785 return fold_fixed_mathfn (loc
, fndecl
, arg0
);
9787 case BUILT_IN_BSWAP16
:
9788 case BUILT_IN_BSWAP32
:
9789 case BUILT_IN_BSWAP64
:
9790 return fold_builtin_bswap (fndecl
, arg0
);
9792 CASE_INT_FN (BUILT_IN_FFS
):
9793 CASE_INT_FN (BUILT_IN_CLZ
):
9794 CASE_INT_FN (BUILT_IN_CTZ
):
9795 CASE_INT_FN (BUILT_IN_CLRSB
):
9796 CASE_INT_FN (BUILT_IN_POPCOUNT
):
9797 CASE_INT_FN (BUILT_IN_PARITY
):
9798 return fold_builtin_bitop (fndecl
, arg0
);
9800 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
9801 return fold_builtin_signbit (loc
, arg0
, type
);
9803 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
9804 return fold_builtin_significand (loc
, arg0
, type
);
9806 CASE_FLT_FN (BUILT_IN_ILOGB
):
9807 CASE_FLT_FN (BUILT_IN_LOGB
):
9808 return fold_builtin_logb (loc
, arg0
, type
);
9810 case BUILT_IN_ISASCII
:
9811 return fold_builtin_isascii (loc
, arg0
);
9813 case BUILT_IN_TOASCII
:
9814 return fold_builtin_toascii (loc
, arg0
);
9816 case BUILT_IN_ISDIGIT
:
9817 return fold_builtin_isdigit (loc
, arg0
);
9819 CASE_FLT_FN (BUILT_IN_FINITE
):
9820 case BUILT_IN_FINITED32
:
9821 case BUILT_IN_FINITED64
:
9822 case BUILT_IN_FINITED128
:
9823 case BUILT_IN_ISFINITE
:
9825 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
9828 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9831 CASE_FLT_FN (BUILT_IN_ISINF
):
9832 case BUILT_IN_ISINFD32
:
9833 case BUILT_IN_ISINFD64
:
9834 case BUILT_IN_ISINFD128
:
9836 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
9839 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9842 case BUILT_IN_ISNORMAL
:
9843 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9845 case BUILT_IN_ISINF_SIGN
:
9846 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
9848 CASE_FLT_FN (BUILT_IN_ISNAN
):
9849 case BUILT_IN_ISNAND32
:
9850 case BUILT_IN_ISNAND64
:
9851 case BUILT_IN_ISNAND128
:
9852 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
9855 if (integer_zerop (arg0
))
9856 return build_empty_stmt (loc
);
9867 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9868 This function returns NULL_TREE if no simplification was possible. */
9871 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
)
9873 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9874 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9878 CASE_FLT_FN (BUILT_IN_JN
):
9879 if (validate_arg (arg0
, INTEGER_TYPE
)
9880 && validate_arg (arg1
, REAL_TYPE
))
9881 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
9884 CASE_FLT_FN (BUILT_IN_YN
):
9885 if (validate_arg (arg0
, INTEGER_TYPE
)
9886 && validate_arg (arg1
, REAL_TYPE
))
9887 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
9891 CASE_FLT_FN (BUILT_IN_DREM
):
9892 CASE_FLT_FN (BUILT_IN_REMAINDER
):
9893 if (validate_arg (arg0
, REAL_TYPE
)
9894 && validate_arg (arg1
, REAL_TYPE
))
9895 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
9898 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
9899 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
9900 if (validate_arg (arg0
, REAL_TYPE
)
9901 && validate_arg (arg1
, POINTER_TYPE
))
9902 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
9905 CASE_FLT_FN (BUILT_IN_ATAN2
):
9906 if (validate_arg (arg0
, REAL_TYPE
)
9907 && validate_arg (arg1
, REAL_TYPE
))
9908 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
9911 CASE_FLT_FN (BUILT_IN_FDIM
):
9912 if (validate_arg (arg0
, REAL_TYPE
)
9913 && validate_arg (arg1
, REAL_TYPE
))
9914 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
9917 CASE_FLT_FN (BUILT_IN_HYPOT
):
9918 return fold_builtin_hypot (loc
, arg0
, arg1
, type
);
9920 CASE_FLT_FN (BUILT_IN_CPOW
):
9921 if (validate_arg (arg0
, COMPLEX_TYPE
)
9922 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
9923 && validate_arg (arg1
, COMPLEX_TYPE
)
9924 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
)
9925 return do_mpc_arg2 (arg0
, arg1
, type
, /*do_nonfinite=*/ 0, mpc_pow
);
9928 CASE_FLT_FN (BUILT_IN_LDEXP
):
9929 return fold_builtin_load_exponent (loc
, arg0
, arg1
, type
, /*ldexp=*/true);
9930 CASE_FLT_FN (BUILT_IN_SCALBN
):
9931 CASE_FLT_FN (BUILT_IN_SCALBLN
):
9932 return fold_builtin_load_exponent (loc
, arg0
, arg1
,
9933 type
, /*ldexp=*/false);
9935 CASE_FLT_FN (BUILT_IN_FREXP
):
9936 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
9938 CASE_FLT_FN (BUILT_IN_MODF
):
9939 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
9941 case BUILT_IN_STRSTR
:
9942 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
9944 case BUILT_IN_STRSPN
:
9945 return fold_builtin_strspn (loc
, arg0
, arg1
);
9947 case BUILT_IN_STRCSPN
:
9948 return fold_builtin_strcspn (loc
, arg0
, arg1
);
9950 case BUILT_IN_STRCHR
:
9951 case BUILT_IN_INDEX
:
9952 return fold_builtin_strchr (loc
, arg0
, arg1
, type
);
9954 case BUILT_IN_STRRCHR
:
9955 case BUILT_IN_RINDEX
:
9956 return fold_builtin_strrchr (loc
, arg0
, arg1
, type
);
9958 case BUILT_IN_STRCMP
:
9959 return fold_builtin_strcmp (loc
, arg0
, arg1
);
9961 case BUILT_IN_STRPBRK
:
9962 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
9964 case BUILT_IN_EXPECT
:
9965 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
);
9967 CASE_FLT_FN (BUILT_IN_POW
):
9968 return fold_builtin_pow (loc
, fndecl
, arg0
, arg1
, type
);
9970 CASE_FLT_FN (BUILT_IN_POWI
):
9971 return fold_builtin_powi (loc
, fndecl
, arg0
, arg1
, type
);
9973 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
9974 return fold_builtin_copysign (loc
, arg0
, arg1
, type
);
9976 CASE_FLT_FN (BUILT_IN_FMIN
):
9977 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/false);
9979 CASE_FLT_FN (BUILT_IN_FMAX
):
9980 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/true);
9982 case BUILT_IN_ISGREATER
:
9983 return fold_builtin_unordered_cmp (loc
, fndecl
,
9984 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
9985 case BUILT_IN_ISGREATEREQUAL
:
9986 return fold_builtin_unordered_cmp (loc
, fndecl
,
9987 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
9988 case BUILT_IN_ISLESS
:
9989 return fold_builtin_unordered_cmp (loc
, fndecl
,
9990 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
9991 case BUILT_IN_ISLESSEQUAL
:
9992 return fold_builtin_unordered_cmp (loc
, fndecl
,
9993 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
9994 case BUILT_IN_ISLESSGREATER
:
9995 return fold_builtin_unordered_cmp (loc
, fndecl
,
9996 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
9997 case BUILT_IN_ISUNORDERED
:
9998 return fold_builtin_unordered_cmp (loc
, fndecl
,
9999 arg0
, arg1
, UNORDERED_EXPR
,
10002 /* We do the folding for va_start in the expander. */
10003 case BUILT_IN_VA_START
:
10006 case BUILT_IN_OBJECT_SIZE
:
10007 return fold_builtin_object_size (arg0
, arg1
);
10009 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
10010 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
10012 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
10013 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
10021 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10023 This function returns NULL_TREE if no simplification was possible. */
10026 fold_builtin_3 (location_t loc
, tree fndecl
,
10027 tree arg0
, tree arg1
, tree arg2
)
10029 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10030 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10034 CASE_FLT_FN (BUILT_IN_SINCOS
):
10035 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
10037 CASE_FLT_FN (BUILT_IN_FMA
):
10038 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
10041 CASE_FLT_FN (BUILT_IN_REMQUO
):
10042 if (validate_arg (arg0
, REAL_TYPE
)
10043 && validate_arg (arg1
, REAL_TYPE
)
10044 && validate_arg (arg2
, POINTER_TYPE
))
10045 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10048 case BUILT_IN_STRNCMP
:
10049 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
10051 case BUILT_IN_MEMCHR
:
10052 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
10054 case BUILT_IN_BCMP
:
10055 case BUILT_IN_MEMCMP
:
10056 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
10058 case BUILT_IN_EXPECT
:
10059 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
);
10061 case BUILT_IN_ADD_OVERFLOW
:
10062 case BUILT_IN_SUB_OVERFLOW
:
10063 case BUILT_IN_MUL_OVERFLOW
:
10064 case BUILT_IN_SADD_OVERFLOW
:
10065 case BUILT_IN_SADDL_OVERFLOW
:
10066 case BUILT_IN_SADDLL_OVERFLOW
:
10067 case BUILT_IN_SSUB_OVERFLOW
:
10068 case BUILT_IN_SSUBL_OVERFLOW
:
10069 case BUILT_IN_SSUBLL_OVERFLOW
:
10070 case BUILT_IN_SMUL_OVERFLOW
:
10071 case BUILT_IN_SMULL_OVERFLOW
:
10072 case BUILT_IN_SMULLL_OVERFLOW
:
10073 case BUILT_IN_UADD_OVERFLOW
:
10074 case BUILT_IN_UADDL_OVERFLOW
:
10075 case BUILT_IN_UADDLL_OVERFLOW
:
10076 case BUILT_IN_USUB_OVERFLOW
:
10077 case BUILT_IN_USUBL_OVERFLOW
:
10078 case BUILT_IN_USUBLL_OVERFLOW
:
10079 case BUILT_IN_UMUL_OVERFLOW
:
10080 case BUILT_IN_UMULL_OVERFLOW
:
10081 case BUILT_IN_UMULLL_OVERFLOW
:
10082 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
10090 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10091 arguments. IGNORE is true if the result of the
10092 function call is ignored. This function returns NULL_TREE if no
10093 simplification was possible. */
10096 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool)
10098 tree ret
= NULL_TREE
;
10103 ret
= fold_builtin_0 (loc
, fndecl
);
10106 ret
= fold_builtin_1 (loc
, fndecl
, args
[0]);
10109 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1]);
10112 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
10115 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
10120 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10121 SET_EXPR_LOCATION (ret
, loc
);
10122 TREE_NO_WARNING (ret
) = 1;
10128 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10129 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10130 of arguments in ARGS to be omitted. OLDNARGS is the number of
10131 elements in ARGS. */
10134 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
10135 int skip
, tree fndecl
, int n
, va_list newargs
)
10137 int nargs
= oldnargs
- skip
+ n
;
10144 buffer
= XALLOCAVEC (tree
, nargs
);
10145 for (i
= 0; i
< n
; i
++)
10146 buffer
[i
] = va_arg (newargs
, tree
);
10147 for (j
= skip
; j
< oldnargs
; j
++, i
++)
10148 buffer
[i
] = args
[j
];
10151 buffer
= args
+ skip
;
10153 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
10156 /* Return true if FNDECL shouldn't be folded right now.
10157 If a built-in function has an inline attribute always_inline
10158 wrapper, defer folding it after always_inline functions have
10159 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10160 might not be performed. */
10163 avoid_folding_inline_builtin (tree fndecl
)
10165 return (DECL_DECLARED_INLINE_P (fndecl
)
10166 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
10168 && !cfun
->always_inline_functions_inlined
10169 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
10172 /* A wrapper function for builtin folding that prevents warnings for
10173 "statement without effect" and the like, caused by removing the
10174 call node earlier than the warning is generated. */
10177 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
10179 tree ret
= NULL_TREE
;
10180 tree fndecl
= get_callee_fndecl (exp
);
10182 && TREE_CODE (fndecl
) == FUNCTION_DECL
10183 && DECL_BUILT_IN (fndecl
)
10184 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10185 yet. Defer folding until we see all the arguments
10186 (after inlining). */
10187 && !CALL_EXPR_VA_ARG_PACK (exp
))
10189 int nargs
= call_expr_nargs (exp
);
10191 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10192 instead last argument is __builtin_va_arg_pack (). Defer folding
10193 even in that case, until arguments are finalized. */
10194 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
10196 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
10198 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10199 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10200 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10204 if (avoid_folding_inline_builtin (fndecl
))
10207 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10208 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
10209 CALL_EXPR_ARGP (exp
), ignore
);
10212 tree
*args
= CALL_EXPR_ARGP (exp
);
10213 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
10221 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10222 N arguments are passed in the array ARGARRAY. Return a folded
10223 expression or NULL_TREE if no simplification was possible. */
10226 fold_builtin_call_array (location_t loc
, tree
,
10231 if (TREE_CODE (fn
) != ADDR_EXPR
)
10234 tree fndecl
= TREE_OPERAND (fn
, 0);
10235 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10236 && DECL_BUILT_IN (fndecl
))
10238 /* If last argument is __builtin_va_arg_pack (), arguments to this
10239 function are not finalized yet. Defer folding until they are. */
10240 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
10242 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
10244 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10245 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10246 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10249 if (avoid_folding_inline_builtin (fndecl
))
10251 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10252 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
10254 return fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
10260 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10261 along with N new arguments specified as the "..." parameters. SKIP
10262 is the number of arguments in EXP to be omitted. This function is used
10263 to do varargs-to-varargs transformations. */
10266 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
10272 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
10273 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
10279 /* Validate a single argument ARG against a tree code CODE representing
10283 validate_arg (const_tree arg
, enum tree_code code
)
10287 else if (code
== POINTER_TYPE
)
10288 return POINTER_TYPE_P (TREE_TYPE (arg
));
10289 else if (code
== INTEGER_TYPE
)
10290 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
10291 return code
== TREE_CODE (TREE_TYPE (arg
));
10294 /* This function validates the types of a function call argument list
10295 against a specified list of tree_codes. If the last specifier is a 0,
10296 that represents an ellipses, otherwise the last specifier must be a
10299 This is the GIMPLE version of validate_arglist. Eventually we want to
10300 completely convert builtins.c to work from GIMPLEs and the tree based
10301 validate_arglist will then be removed. */
10304 validate_gimple_arglist (const gcall
*call
, ...)
10306 enum tree_code code
;
10312 va_start (ap
, call
);
10317 code
= (enum tree_code
) va_arg (ap
, int);
10321 /* This signifies an ellipses, any further arguments are all ok. */
10325 /* This signifies an endlink, if no arguments remain, return
10326 true, otherwise return false. */
10327 res
= (i
== gimple_call_num_args (call
));
10330 /* If no parameters remain or the parameter's code does not
10331 match the specified code, return false. Otherwise continue
10332 checking any remaining arguments. */
10333 arg
= gimple_call_arg (call
, i
++);
10334 if (!validate_arg (arg
, code
))
10341 /* We need gotos here since we can only have one VA_CLOSE in a
10349 /* Default target-specific builtin expander that does nothing. */
10352 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
10353 rtx target ATTRIBUTE_UNUSED
,
10354 rtx subtarget ATTRIBUTE_UNUSED
,
10355 machine_mode mode ATTRIBUTE_UNUSED
,
10356 int ignore ATTRIBUTE_UNUSED
)
10361 /* Returns true is EXP represents data that would potentially reside
10362 in a readonly section. */
10365 readonly_data_expr (tree exp
)
10369 if (TREE_CODE (exp
) != ADDR_EXPR
)
10372 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10376 /* Make sure we call decl_readonly_section only for trees it
10377 can handle (since it returns true for everything it doesn't
10379 if (TREE_CODE (exp
) == STRING_CST
10380 || TREE_CODE (exp
) == CONSTRUCTOR
10381 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
10382 return decl_readonly_section (exp
, 0);
10387 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10388 to the call, and TYPE is its return type.
10390 Return NULL_TREE if no simplification was possible, otherwise return the
10391 simplified form of the call as a tree.
10393 The simplified form may be a constant or other expression which
10394 computes the same value, but in a more efficient manner (including
10395 calls to other builtin functions).
10397 The call may contain arguments which need to be evaluated, but
10398 which are not useful to determine the result of the call. In
10399 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10400 COMPOUND_EXPR will be an argument which must be evaluated.
10401 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10402 COMPOUND_EXPR in the chain will contain the tree for the simplified
10403 form of the builtin function call. */
10406 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
10408 if (!validate_arg (s1
, POINTER_TYPE
)
10409 || !validate_arg (s2
, POINTER_TYPE
))
10414 const char *p1
, *p2
;
10416 p2
= c_getstr (s2
);
10420 p1
= c_getstr (s1
);
10423 const char *r
= strstr (p1
, p2
);
10427 return build_int_cst (TREE_TYPE (s1
), 0);
10429 /* Return an offset into the constant string argument. */
10430 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10431 return fold_convert_loc (loc
, type
, tem
);
10434 /* The argument is const char *, and the result is char *, so we need
10435 a type conversion here to avoid a warning. */
10437 return fold_convert_loc (loc
, type
, s1
);
10442 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10446 /* New argument list transforming strstr(s1, s2) to
10447 strchr(s1, s2[0]). */
10448 return build_call_expr_loc (loc
, fn
, 2, s1
,
10449 build_int_cst (integer_type_node
, p2
[0]));
10453 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10454 the call, and TYPE is its return type.
10456 Return NULL_TREE if no simplification was possible, otherwise return the
10457 simplified form of the call as a tree.
10459 The simplified form may be a constant or other expression which
10460 computes the same value, but in a more efficient manner (including
10461 calls to other builtin functions).
10463 The call may contain arguments which need to be evaluated, but
10464 which are not useful to determine the result of the call. In
10465 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10466 COMPOUND_EXPR will be an argument which must be evaluated.
10467 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10468 COMPOUND_EXPR in the chain will contain the tree for the simplified
10469 form of the builtin function call. */
10472 fold_builtin_strchr (location_t loc
, tree s1
, tree s2
, tree type
)
10474 if (!validate_arg (s1
, POINTER_TYPE
)
10475 || !validate_arg (s2
, INTEGER_TYPE
))
10481 if (TREE_CODE (s2
) != INTEGER_CST
)
10484 p1
= c_getstr (s1
);
10491 if (target_char_cast (s2
, &c
))
10494 r
= strchr (p1
, c
);
10497 return build_int_cst (TREE_TYPE (s1
), 0);
10499 /* Return an offset into the constant string argument. */
10500 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10501 return fold_convert_loc (loc
, type
, tem
);
10507 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10508 the call, and TYPE is its return type.
10510 Return NULL_TREE if no simplification was possible, otherwise return the
10511 simplified form of the call as a tree.
10513 The simplified form may be a constant or other expression which
10514 computes the same value, but in a more efficient manner (including
10515 calls to other builtin functions).
10517 The call may contain arguments which need to be evaluated, but
10518 which are not useful to determine the result of the call. In
10519 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10520 COMPOUND_EXPR will be an argument which must be evaluated.
10521 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10522 COMPOUND_EXPR in the chain will contain the tree for the simplified
10523 form of the builtin function call. */
10526 fold_builtin_strrchr (location_t loc
, tree s1
, tree s2
, tree type
)
10528 if (!validate_arg (s1
, POINTER_TYPE
)
10529 || !validate_arg (s2
, INTEGER_TYPE
))
10536 if (TREE_CODE (s2
) != INTEGER_CST
)
10539 p1
= c_getstr (s1
);
10546 if (target_char_cast (s2
, &c
))
10549 r
= strrchr (p1
, c
);
10552 return build_int_cst (TREE_TYPE (s1
), 0);
10554 /* Return an offset into the constant string argument. */
10555 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10556 return fold_convert_loc (loc
, type
, tem
);
10559 if (! integer_zerop (s2
))
10562 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10566 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10567 return build_call_expr_loc (loc
, fn
, 2, s1
, s2
);
10571 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10572 to the call, and TYPE is its return type.
10574 Return NULL_TREE if no simplification was possible, otherwise return the
10575 simplified form of the call as a tree.
10577 The simplified form may be a constant or other expression which
10578 computes the same value, but in a more efficient manner (including
10579 calls to other builtin functions).
10581 The call may contain arguments which need to be evaluated, but
10582 which are not useful to determine the result of the call. In
10583 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10584 COMPOUND_EXPR will be an argument which must be evaluated.
10585 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10586 COMPOUND_EXPR in the chain will contain the tree for the simplified
10587 form of the builtin function call. */
10590 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
10592 if (!validate_arg (s1
, POINTER_TYPE
)
10593 || !validate_arg (s2
, POINTER_TYPE
))
10598 const char *p1
, *p2
;
10600 p2
= c_getstr (s2
);
10604 p1
= c_getstr (s1
);
10607 const char *r
= strpbrk (p1
, p2
);
10611 return build_int_cst (TREE_TYPE (s1
), 0);
10613 /* Return an offset into the constant string argument. */
10614 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10615 return fold_convert_loc (loc
, type
, tem
);
10619 /* strpbrk(x, "") == NULL.
10620 Evaluate and ignore s1 in case it had side-effects. */
10621 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
10624 return NULL_TREE
; /* Really call strpbrk. */
10626 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10630 /* New argument list transforming strpbrk(s1, s2) to
10631 strchr(s1, s2[0]). */
10632 return build_call_expr_loc (loc
, fn
, 2, s1
,
10633 build_int_cst (integer_type_node
, p2
[0]));
10637 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10640 Return NULL_TREE if no simplification was possible, otherwise return the
10641 simplified form of the call as a tree.
10643 The simplified form may be a constant or other expression which
10644 computes the same value, but in a more efficient manner (including
10645 calls to other builtin functions).
10647 The call may contain arguments which need to be evaluated, but
10648 which are not useful to determine the result of the call. In
10649 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10650 COMPOUND_EXPR will be an argument which must be evaluated.
10651 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10652 COMPOUND_EXPR in the chain will contain the tree for the simplified
10653 form of the builtin function call. */
10656 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
10658 if (!validate_arg (s1
, POINTER_TYPE
)
10659 || !validate_arg (s2
, POINTER_TYPE
))
10663 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
10665 /* If both arguments are constants, evaluate at compile-time. */
10668 const size_t r
= strspn (p1
, p2
);
10669 return build_int_cst (size_type_node
, r
);
10672 /* If either argument is "", return NULL_TREE. */
10673 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
10674 /* Evaluate and ignore both arguments in case either one has
10676 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
10682 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10685 Return NULL_TREE if no simplification was possible, otherwise return the
10686 simplified form of the call as a tree.
10688 The simplified form may be a constant or other expression which
10689 computes the same value, but in a more efficient manner (including
10690 calls to other builtin functions).
10692 The call may contain arguments which need to be evaluated, but
10693 which are not useful to determine the result of the call. In
10694 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10695 COMPOUND_EXPR will be an argument which must be evaluated.
10696 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10697 COMPOUND_EXPR in the chain will contain the tree for the simplified
10698 form of the builtin function call. */
10701 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
10703 if (!validate_arg (s1
, POINTER_TYPE
)
10704 || !validate_arg (s2
, POINTER_TYPE
))
10708 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
10710 /* If both arguments are constants, evaluate at compile-time. */
10713 const size_t r
= strcspn (p1
, p2
);
10714 return build_int_cst (size_type_node
, r
);
10717 /* If the first argument is "", return NULL_TREE. */
10718 if (p1
&& *p1
== '\0')
10720 /* Evaluate and ignore argument s2 in case it has
10722 return omit_one_operand_loc (loc
, size_type_node
,
10723 size_zero_node
, s2
);
10726 /* If the second argument is "", return __builtin_strlen(s1). */
10727 if (p2
&& *p2
== '\0')
10729 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
10731 /* If the replacement _DECL isn't initialized, don't do the
10736 return build_call_expr_loc (loc
, fn
, 1, s1
);
10742 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10743 produced. False otherwise. This is done so that we don't output the error
10744 or warning twice or three times. */
10747 fold_builtin_next_arg (tree exp
, bool va_start_p
)
10749 tree fntype
= TREE_TYPE (current_function_decl
);
10750 int nargs
= call_expr_nargs (exp
);
10752 /* There is good chance the current input_location points inside the
10753 definition of the va_start macro (perhaps on the token for
10754 builtin) in a system header, so warnings will not be emitted.
10755 Use the location in real source code. */
10756 source_location current_location
=
10757 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
10760 if (!stdarg_p (fntype
))
10762 error ("%<va_start%> used in function with fixed args");
10768 if (va_start_p
&& (nargs
!= 2))
10770 error ("wrong number of arguments to function %<va_start%>");
10773 arg
= CALL_EXPR_ARG (exp
, 1);
10775 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10776 when we checked the arguments and if needed issued a warning. */
10781 /* Evidently an out of date version of <stdarg.h>; can't validate
10782 va_start's second argument, but can still work as intended. */
10783 warning_at (current_location
,
10785 "%<__builtin_next_arg%> called without an argument");
10788 else if (nargs
> 1)
10790 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10793 arg
= CALL_EXPR_ARG (exp
, 0);
10796 if (TREE_CODE (arg
) == SSA_NAME
)
10797 arg
= SSA_NAME_VAR (arg
);
10799 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10800 or __builtin_next_arg (0) the first time we see it, after checking
10801 the arguments and if needed issuing a warning. */
10802 if (!integer_zerop (arg
))
10804 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
10806 /* Strip off all nops for the sake of the comparison. This
10807 is not quite the same as STRIP_NOPS. It does more.
10808 We must also strip off INDIRECT_EXPR for C++ reference
10810 while (CONVERT_EXPR_P (arg
)
10811 || TREE_CODE (arg
) == INDIRECT_REF
)
10812 arg
= TREE_OPERAND (arg
, 0);
10813 if (arg
!= last_parm
)
10815 /* FIXME: Sometimes with the tree optimizers we can get the
10816 not the last argument even though the user used the last
10817 argument. We just warn and set the arg to be the last
10818 argument so that we will get wrong-code because of
10820 warning_at (current_location
,
10822 "second parameter of %<va_start%> not last named argument");
10825 /* Undefined by C99 7.15.1.4p4 (va_start):
10826 "If the parameter parmN is declared with the register storage
10827 class, with a function or array type, or with a type that is
10828 not compatible with the type that results after application of
10829 the default argument promotions, the behavior is undefined."
10831 else if (DECL_REGISTER (arg
))
10833 warning_at (current_location
,
10835 "undefined behaviour when second parameter of "
10836 "%<va_start%> is declared with %<register%> storage");
10839 /* We want to verify the second parameter just once before the tree
10840 optimizers are run and then avoid keeping it in the tree,
10841 as otherwise we could warn even for correct code like:
10842 void foo (int i, ...)
10843 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10845 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
10847 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
10853 /* Expand a call EXP to __builtin_object_size. */
10856 expand_builtin_object_size (tree exp
)
10859 int object_size_type
;
10860 tree fndecl
= get_callee_fndecl (exp
);
10862 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
10864 error ("%Kfirst argument of %D must be a pointer, second integer constant",
10866 expand_builtin_trap ();
10870 ost
= CALL_EXPR_ARG (exp
, 1);
10873 if (TREE_CODE (ost
) != INTEGER_CST
10874 || tree_int_cst_sgn (ost
) < 0
10875 || compare_tree_int (ost
, 3) > 0)
10877 error ("%Klast argument of %D is not integer constant between 0 and 3",
10879 expand_builtin_trap ();
10883 object_size_type
= tree_to_shwi (ost
);
10885 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
10888 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10889 FCODE is the BUILT_IN_* to use.
10890 Return NULL_RTX if we failed; the caller should emit a normal call,
10891 otherwise try to get the result in TARGET, if convenient (and in
10892 mode MODE if that's convenient). */
10895 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
10896 enum built_in_function fcode
)
10898 tree dest
, src
, len
, size
;
10900 if (!validate_arglist (exp
,
10902 fcode
== BUILT_IN_MEMSET_CHK
10903 ? INTEGER_TYPE
: POINTER_TYPE
,
10904 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
10907 dest
= CALL_EXPR_ARG (exp
, 0);
10908 src
= CALL_EXPR_ARG (exp
, 1);
10909 len
= CALL_EXPR_ARG (exp
, 2);
10910 size
= CALL_EXPR_ARG (exp
, 3);
10912 if (! tree_fits_uhwi_p (size
))
10915 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
10919 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
10921 warning_at (tree_nonartificial_location (exp
),
10922 0, "%Kcall to %D will always overflow destination buffer",
10923 exp
, get_callee_fndecl (exp
));
10928 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10929 mem{cpy,pcpy,move,set} is available. */
10932 case BUILT_IN_MEMCPY_CHK
:
10933 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
10935 case BUILT_IN_MEMPCPY_CHK
:
10936 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
10938 case BUILT_IN_MEMMOVE_CHK
:
10939 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
10941 case BUILT_IN_MEMSET_CHK
:
10942 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
10951 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
10952 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
10953 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
10954 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
10956 else if (fcode
== BUILT_IN_MEMSET_CHK
)
10960 unsigned int dest_align
= get_pointer_alignment (dest
);
10962 /* If DEST is not a pointer type, call the normal function. */
10963 if (dest_align
== 0)
10966 /* If SRC and DEST are the same (and not volatile), do nothing. */
10967 if (operand_equal_p (src
, dest
, 0))
10971 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
10973 /* Evaluate and ignore LEN in case it has side-effects. */
10974 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
10975 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
10978 expr
= fold_build_pointer_plus (dest
, len
);
10979 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
10982 /* __memmove_chk special case. */
10983 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
10985 unsigned int src_align
= get_pointer_alignment (src
);
10987 if (src_align
== 0)
10990 /* If src is categorized for a readonly section we can use
10991 normal __memcpy_chk. */
10992 if (readonly_data_expr (src
))
10994 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
10997 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
10998 dest
, src
, len
, size
);
10999 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11000 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11001 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11008 /* Emit warning if a buffer overflow is detected at compile time. */
11011 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
11015 location_t loc
= tree_nonartificial_location (exp
);
11019 case BUILT_IN_STRCPY_CHK
:
11020 case BUILT_IN_STPCPY_CHK
:
11021 /* For __strcat_chk the warning will be emitted only if overflowing
11022 by at least strlen (dest) + 1 bytes. */
11023 case BUILT_IN_STRCAT_CHK
:
11024 len
= CALL_EXPR_ARG (exp
, 1);
11025 size
= CALL_EXPR_ARG (exp
, 2);
11028 case BUILT_IN_STRNCAT_CHK
:
11029 case BUILT_IN_STRNCPY_CHK
:
11030 case BUILT_IN_STPNCPY_CHK
:
11031 len
= CALL_EXPR_ARG (exp
, 2);
11032 size
= CALL_EXPR_ARG (exp
, 3);
11034 case BUILT_IN_SNPRINTF_CHK
:
11035 case BUILT_IN_VSNPRINTF_CHK
:
11036 len
= CALL_EXPR_ARG (exp
, 1);
11037 size
= CALL_EXPR_ARG (exp
, 3);
11040 gcc_unreachable ();
11046 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11051 len
= c_strlen (len
, 1);
11052 if (! len
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11055 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
11057 tree src
= CALL_EXPR_ARG (exp
, 1);
11058 if (! src
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11060 src
= c_strlen (src
, 1);
11061 if (! src
|| ! tree_fits_uhwi_p (src
))
11063 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
11064 exp
, get_callee_fndecl (exp
));
11067 else if (tree_int_cst_lt (src
, size
))
11070 else if (! tree_fits_uhwi_p (len
) || ! tree_int_cst_lt (size
, len
))
11073 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
11074 exp
, get_callee_fndecl (exp
));
11077 /* Emit warning if a buffer overflow is detected at compile time
11078 in __sprintf_chk/__vsprintf_chk calls. */
11081 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
11083 tree size
, len
, fmt
;
11084 const char *fmt_str
;
11085 int nargs
= call_expr_nargs (exp
);
11087 /* Verify the required arguments in the original call. */
11091 size
= CALL_EXPR_ARG (exp
, 2);
11092 fmt
= CALL_EXPR_ARG (exp
, 3);
11094 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11097 /* Check whether the format is a literal string constant. */
11098 fmt_str
= c_getstr (fmt
);
11099 if (fmt_str
== NULL
)
11102 if (!init_target_chars ())
11105 /* If the format doesn't contain % args or %%, we know its size. */
11106 if (strchr (fmt_str
, target_percent
) == 0)
11107 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
11108 /* If the format is "%s" and first ... argument is a string literal,
11110 else if (fcode
== BUILT_IN_SPRINTF_CHK
11111 && strcmp (fmt_str
, target_percent_s
) == 0)
11117 arg
= CALL_EXPR_ARG (exp
, 4);
11118 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
11121 len
= c_strlen (arg
, 1);
11122 if (!len
|| ! tree_fits_uhwi_p (len
))
11128 if (! tree_int_cst_lt (len
, size
))
11129 warning_at (tree_nonartificial_location (exp
),
11130 0, "%Kcall to %D will always overflow destination buffer",
11131 exp
, get_callee_fndecl (exp
));
11134 /* Emit warning if a free is called with address of a variable. */
11137 maybe_emit_free_warning (tree exp
)
11139 tree arg
= CALL_EXPR_ARG (exp
, 0);
11142 if (TREE_CODE (arg
) != ADDR_EXPR
)
11145 arg
= get_base_address (TREE_OPERAND (arg
, 0));
11146 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
11149 if (SSA_VAR_P (arg
))
11150 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11151 "%Kattempt to free a non-heap object %qD", exp
, arg
);
11153 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11154 "%Kattempt to free a non-heap object", exp
);
11157 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11161 fold_builtin_object_size (tree ptr
, tree ost
)
11163 unsigned HOST_WIDE_INT bytes
;
11164 int object_size_type
;
11166 if (!validate_arg (ptr
, POINTER_TYPE
)
11167 || !validate_arg (ost
, INTEGER_TYPE
))
11172 if (TREE_CODE (ost
) != INTEGER_CST
11173 || tree_int_cst_sgn (ost
) < 0
11174 || compare_tree_int (ost
, 3) > 0)
11177 object_size_type
= tree_to_shwi (ost
);
11179 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11180 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11181 and (size_t) 0 for types 2 and 3. */
11182 if (TREE_SIDE_EFFECTS (ptr
))
11183 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
11185 if (TREE_CODE (ptr
) == ADDR_EXPR
)
11187 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11188 if (wi::fits_to_tree_p (bytes
, size_type_node
))
11189 return build_int_cstu (size_type_node
, bytes
);
11191 else if (TREE_CODE (ptr
) == SSA_NAME
)
11193 /* If object size is not known yet, delay folding until
11194 later. Maybe subsequent passes will help determining
11196 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11197 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2 ? -1 : 0)
11198 && wi::fits_to_tree_p (bytes
, size_type_node
))
11199 return build_int_cstu (size_type_node
, bytes
);
11205 /* Builtins with folding operations that operate on "..." arguments
11206 need special handling; we need to store the arguments in a convenient
11207 data structure before attempting any folding. Fortunately there are
11208 only a few builtins that fall into this category. FNDECL is the
11209 function, EXP is the CALL_EXPR for the call. */
11212 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
11214 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11215 tree ret
= NULL_TREE
;
11219 case BUILT_IN_FPCLASSIFY
:
11220 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
11228 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11229 SET_EXPR_LOCATION (ret
, loc
);
11230 TREE_NO_WARNING (ret
) = 1;
11236 /* Initialize format string characters in the target charset. */
11239 init_target_chars (void)
11244 target_newline
= lang_hooks
.to_target_charset ('\n');
11245 target_percent
= lang_hooks
.to_target_charset ('%');
11246 target_c
= lang_hooks
.to_target_charset ('c');
11247 target_s
= lang_hooks
.to_target_charset ('s');
11248 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
11252 target_percent_c
[0] = target_percent
;
11253 target_percent_c
[1] = target_c
;
11254 target_percent_c
[2] = '\0';
11256 target_percent_s
[0] = target_percent
;
11257 target_percent_s
[1] = target_s
;
11258 target_percent_s
[2] = '\0';
11260 target_percent_s_newline
[0] = target_percent
;
11261 target_percent_s_newline
[1] = target_s
;
11262 target_percent_s_newline
[2] = target_newline
;
11263 target_percent_s_newline
[3] = '\0';
11270 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11271 and no overflow/underflow occurred. INEXACT is true if M was not
11272 exactly calculated. TYPE is the tree type for the result. This
11273 function assumes that you cleared the MPFR flags and then
11274 calculated M to see if anything subsequently set a flag prior to
11275 entering this function. Return NULL_TREE if any checks fail. */
11278 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
11280 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11281 overflow/underflow occurred. If -frounding-math, proceed iff the
11282 result of calling FUNC was exact. */
11283 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11284 && (!flag_rounding_math
|| !inexact
))
11286 REAL_VALUE_TYPE rr
;
11288 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
11289 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11290 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11291 but the mpft_t is not, then we underflowed in the
11293 if (real_isfinite (&rr
)
11294 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
11296 REAL_VALUE_TYPE rmode
;
11298 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
11299 /* Proceed iff the specified mode can hold the value. */
11300 if (real_identical (&rmode
, &rr
))
11301 return build_real (type
, rmode
);
11307 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11308 number and no overflow/underflow occurred. INEXACT is true if M
11309 was not exactly calculated. TYPE is the tree type for the result.
11310 This function assumes that you cleared the MPFR flags and then
11311 calculated M to see if anything subsequently set a flag prior to
11312 entering this function. Return NULL_TREE if any checks fail, if
11313 FORCE_CONVERT is true, then bypass the checks. */
11316 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
11318 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11319 overflow/underflow occurred. If -frounding-math, proceed iff the
11320 result of calling FUNC was exact. */
11322 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
11323 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11324 && (!flag_rounding_math
|| !inexact
)))
11326 REAL_VALUE_TYPE re
, im
;
11328 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
11329 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
11330 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11331 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11332 but the mpft_t is not, then we underflowed in the
11335 || (real_isfinite (&re
) && real_isfinite (&im
)
11336 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
11337 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
11339 REAL_VALUE_TYPE re_mode
, im_mode
;
11341 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
11342 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
11343 /* Proceed iff the specified mode can hold the value. */
11345 || (real_identical (&re_mode
, &re
)
11346 && real_identical (&im_mode
, &im
)))
11347 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
11348 build_real (TREE_TYPE (type
), im_mode
));
11354 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11355 FUNC on it and return the resulting value as a tree with type TYPE.
11356 If MIN and/or MAX are not NULL, then the supplied ARG must be
11357 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11358 acceptable values, otherwise they are not. The mpfr precision is
11359 set to the precision of TYPE. We assume that function FUNC returns
11360 zero if the result could be calculated exactly within the requested
11364 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
11365 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
11368 tree result
= NULL_TREE
;
11372 /* To proceed, MPFR must exactly represent the target floating point
11373 format, which only happens when the target base equals two. */
11374 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11375 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
11377 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
11379 if (real_isfinite (ra
)
11380 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
11381 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
11383 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11384 const int prec
= fmt
->p
;
11385 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11389 mpfr_init2 (m
, prec
);
11390 mpfr_from_real (m
, ra
, GMP_RNDN
);
11391 mpfr_clear_flags ();
11392 inexact
= func (m
, m
, rnd
);
11393 result
= do_mpfr_ckconv (m
, type
, inexact
);
11401 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11402 FUNC on it and return the resulting value as a tree with type TYPE.
11403 The mpfr precision is set to the precision of TYPE. We assume that
11404 function FUNC returns zero if the result could be calculated
11405 exactly within the requested precision. */
11408 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
11409 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
11411 tree result
= NULL_TREE
;
11416 /* To proceed, MPFR must exactly represent the target floating point
11417 format, which only happens when the target base equals two. */
11418 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11419 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
11420 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
11422 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
11423 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
11425 if (real_isfinite (ra1
) && real_isfinite (ra2
))
11427 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11428 const int prec
= fmt
->p
;
11429 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11433 mpfr_inits2 (prec
, m1
, m2
, NULL
);
11434 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11435 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
11436 mpfr_clear_flags ();
11437 inexact
= func (m1
, m1
, m2
, rnd
);
11438 result
= do_mpfr_ckconv (m1
, type
, inexact
);
11439 mpfr_clears (m1
, m2
, NULL
);
11446 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11447 FUNC on it and return the resulting value as a tree with type TYPE.
11448 The mpfr precision is set to the precision of TYPE. We assume that
11449 function FUNC returns zero if the result could be calculated
11450 exactly within the requested precision. */
11453 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
11454 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
11456 tree result
= NULL_TREE
;
11462 /* To proceed, MPFR must exactly represent the target floating point
11463 format, which only happens when the target base equals two. */
11464 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11465 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
11466 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
11467 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
11469 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
11470 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
11471 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
11473 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
11475 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11476 const int prec
= fmt
->p
;
11477 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11481 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
11482 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11483 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
11484 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
11485 mpfr_clear_flags ();
11486 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
11487 result
= do_mpfr_ckconv (m1
, type
, inexact
);
11488 mpfr_clears (m1
, m2
, m3
, NULL
);
11495 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11496 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11497 If ARG_SINP and ARG_COSP are NULL then the result is returned
11498 as a complex value.
11499 The type is taken from the type of ARG and is used for setting the
11500 precision of the calculation and results. */
11503 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
11505 tree
const type
= TREE_TYPE (arg
);
11506 tree result
= NULL_TREE
;
11510 /* To proceed, MPFR must exactly represent the target floating point
11511 format, which only happens when the target base equals two. */
11512 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11513 && TREE_CODE (arg
) == REAL_CST
11514 && !TREE_OVERFLOW (arg
))
11516 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
11518 if (real_isfinite (ra
))
11520 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11521 const int prec
= fmt
->p
;
11522 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11523 tree result_s
, result_c
;
11527 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
11528 mpfr_from_real (m
, ra
, GMP_RNDN
);
11529 mpfr_clear_flags ();
11530 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
11531 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
11532 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
11533 mpfr_clears (m
, ms
, mc
, NULL
);
11534 if (result_s
&& result_c
)
11536 /* If we are to return in a complex value do so. */
11537 if (!arg_sinp
&& !arg_cosp
)
11538 return build_complex (build_complex_type (type
),
11539 result_c
, result_s
);
11541 /* Dereference the sin/cos pointer arguments. */
11542 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
11543 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
11544 /* Proceed if valid pointer type were passed in. */
11545 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
11546 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
11548 /* Set the values. */
11549 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
11551 TREE_SIDE_EFFECTS (result_s
) = 1;
11552 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
11554 TREE_SIDE_EFFECTS (result_c
) = 1;
11555 /* Combine the assignments into a compound expr. */
11556 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
11557 result_s
, result_c
));
11565 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
11566 two-argument mpfr order N Bessel function FUNC on them and return
11567 the resulting value as a tree with type TYPE. The mpfr precision
11568 is set to the precision of TYPE. We assume that function FUNC
11569 returns zero if the result could be calculated exactly within the
11570 requested precision. */
11572 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
11573 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
11574 const REAL_VALUE_TYPE
*min
, bool inclusive
)
11576 tree result
= NULL_TREE
;
11581 /* To proceed, MPFR must exactly represent the target floating point
11582 format, which only happens when the target base equals two. */
11583 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11584 && tree_fits_shwi_p (arg1
)
11585 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
11587 const HOST_WIDE_INT n
= tree_to_shwi (arg1
);
11588 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
11591 && real_isfinite (ra
)
11592 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
11594 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11595 const int prec
= fmt
->p
;
11596 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11600 mpfr_init2 (m
, prec
);
11601 mpfr_from_real (m
, ra
, GMP_RNDN
);
11602 mpfr_clear_flags ();
11603 inexact
= func (m
, n
, m
, rnd
);
11604 result
= do_mpfr_ckconv (m
, type
, inexact
);
11612 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11613 the pointer *(ARG_QUO) and return the result. The type is taken
11614 from the type of ARG0 and is used for setting the precision of the
11615 calculation and results. */
11618 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
11620 tree
const type
= TREE_TYPE (arg0
);
11621 tree result
= NULL_TREE
;
11626 /* To proceed, MPFR must exactly represent the target floating point
11627 format, which only happens when the target base equals two. */
11628 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11629 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
11630 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
11632 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
11633 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
11635 if (real_isfinite (ra0
) && real_isfinite (ra1
))
11637 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11638 const int prec
= fmt
->p
;
11639 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11644 mpfr_inits2 (prec
, m0
, m1
, NULL
);
11645 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
11646 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11647 mpfr_clear_flags ();
11648 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
11649 /* Remquo is independent of the rounding mode, so pass
11650 inexact=0 to do_mpfr_ckconv(). */
11651 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
11652 mpfr_clears (m0
, m1
, NULL
);
11655 /* MPFR calculates quo in the host's long so it may
11656 return more bits in quo than the target int can hold
11657 if sizeof(host long) > sizeof(target int). This can
11658 happen even for native compilers in LP64 mode. In
11659 these cases, modulo the quo value with the largest
11660 number that the target int can hold while leaving one
11661 bit for the sign. */
11662 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
11663 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
11665 /* Dereference the quo pointer argument. */
11666 arg_quo
= build_fold_indirect_ref (arg_quo
);
11667 /* Proceed iff a valid pointer type was passed in. */
11668 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
11670 /* Set the value. */
11672 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
11673 build_int_cst (TREE_TYPE (arg_quo
),
11675 TREE_SIDE_EFFECTS (result_quo
) = 1;
11676 /* Combine the quo assignment with the rem. */
11677 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
11678 result_quo
, result_rem
));
11686 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11687 resulting value as a tree with type TYPE. The mpfr precision is
11688 set to the precision of TYPE. We assume that this mpfr function
11689 returns zero if the result could be calculated exactly within the
11690 requested precision. In addition, the integer pointer represented
11691 by ARG_SG will be dereferenced and set to the appropriate signgam
11695 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
11697 tree result
= NULL_TREE
;
11701 /* To proceed, MPFR must exactly represent the target floating point
11702 format, which only happens when the target base equals two. Also
11703 verify ARG is a constant and that ARG_SG is an int pointer. */
11704 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11705 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
11706 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
11707 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
11709 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
11711 /* In addition to NaN and Inf, the argument cannot be zero or a
11712 negative integer. */
11713 if (real_isfinite (ra
)
11714 && ra
->cl
!= rvc_zero
11715 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
11717 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11718 const int prec
= fmt
->p
;
11719 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11724 mpfr_init2 (m
, prec
);
11725 mpfr_from_real (m
, ra
, GMP_RNDN
);
11726 mpfr_clear_flags ();
11727 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
11728 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
11734 /* Dereference the arg_sg pointer argument. */
11735 arg_sg
= build_fold_indirect_ref (arg_sg
);
11736 /* Assign the signgam value into *arg_sg. */
11737 result_sg
= fold_build2 (MODIFY_EXPR
,
11738 TREE_TYPE (arg_sg
), arg_sg
,
11739 build_int_cst (TREE_TYPE (arg_sg
), sg
));
11740 TREE_SIDE_EFFECTS (result_sg
) = 1;
11741 /* Combine the signgam assignment with the lgamma result. */
11742 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
11743 result_sg
, result_lg
));
11751 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
11752 function FUNC on it and return the resulting value as a tree with
11753 type TYPE. The mpfr precision is set to the precision of TYPE. We
11754 assume that function FUNC returns zero if the result could be
11755 calculated exactly within the requested precision. */
11758 do_mpc_arg1 (tree arg
, tree type
, int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
))
11760 tree result
= NULL_TREE
;
11764 /* To proceed, MPFR must exactly represent the target floating point
11765 format, which only happens when the target base equals two. */
11766 if (TREE_CODE (arg
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg
)
11767 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
11768 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg
))))->b
== 2)
11770 const REAL_VALUE_TYPE
*const re
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
11771 const REAL_VALUE_TYPE
*const im
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
11773 if (real_isfinite (re
) && real_isfinite (im
))
11775 const struct real_format
*const fmt
=
11776 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
11777 const int prec
= fmt
->p
;
11778 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11779 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
11783 mpc_init2 (m
, prec
);
11784 mpfr_from_real (mpc_realref (m
), re
, rnd
);
11785 mpfr_from_real (mpc_imagref (m
), im
, rnd
);
11786 mpfr_clear_flags ();
11787 inexact
= func (m
, m
, crnd
);
11788 result
= do_mpc_ckconv (m
, type
, inexact
, /*force_convert=*/ 0);
11796 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11797 mpc function FUNC on it and return the resulting value as a tree
11798 with type TYPE. The mpfr precision is set to the precision of
11799 TYPE. We assume that function FUNC returns zero if the result
11800 could be calculated exactly within the requested precision. If
11801 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11802 in the arguments and/or results. */
11805 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
11806 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
11808 tree result
= NULL_TREE
;
11813 /* To proceed, MPFR must exactly represent the target floating point
11814 format, which only happens when the target base equals two. */
11815 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
11816 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
11817 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
11818 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
11819 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
11821 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
11822 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
11823 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
11824 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
11827 || (real_isfinite (re0
) && real_isfinite (im0
)
11828 && real_isfinite (re1
) && real_isfinite (im1
)))
11830 const struct real_format
*const fmt
=
11831 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
11832 const int prec
= fmt
->p
;
11833 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11834 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
11838 mpc_init2 (m0
, prec
);
11839 mpc_init2 (m1
, prec
);
11840 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
11841 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
11842 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
11843 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
11844 mpfr_clear_flags ();
11845 inexact
= func (m0
, m0
, m1
, crnd
);
11846 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
11855 /* A wrapper function for builtin folding that prevents warnings for
11856 "statement without effect" and the like, caused by removing the
11857 call node earlier than the warning is generated. */
11860 fold_call_stmt (gcall
*stmt
, bool ignore
)
11862 tree ret
= NULL_TREE
;
11863 tree fndecl
= gimple_call_fndecl (stmt
);
11864 location_t loc
= gimple_location (stmt
);
11866 && TREE_CODE (fndecl
) == FUNCTION_DECL
11867 && DECL_BUILT_IN (fndecl
)
11868 && !gimple_call_va_arg_pack_p (stmt
))
11870 int nargs
= gimple_call_num_args (stmt
);
11871 tree
*args
= (nargs
> 0
11872 ? gimple_call_arg_ptr (stmt
, 0)
11873 : &error_mark_node
);
11875 if (avoid_folding_inline_builtin (fndecl
))
11877 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11879 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
11883 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
11886 /* Propagate location information from original call to
11887 expansion of builtin. Otherwise things like
11888 maybe_emit_chk_warning, that operate on the expansion
11889 of a builtin, will use the wrong location information. */
11890 if (gimple_has_location (stmt
))
11892 tree realret
= ret
;
11893 if (TREE_CODE (ret
) == NOP_EXPR
)
11894 realret
= TREE_OPERAND (ret
, 0);
11895 if (CAN_HAVE_LOCATION_P (realret
)
11896 && !EXPR_HAS_LOCATION (realret
))
11897 SET_EXPR_LOCATION (realret
, loc
);
11907 /* Look up the function in builtin_decl that corresponds to DECL
11908 and set ASMSPEC as its user assembler name. DECL must be a
11909 function decl that declares a builtin. */
11912 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
11915 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
11916 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
11919 builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
11920 set_user_assembler_name (builtin
, asmspec
);
11921 switch (DECL_FUNCTION_CODE (decl
))
11923 case BUILT_IN_MEMCPY
:
11924 init_block_move_fn (asmspec
);
11925 memcpy_libfunc
= set_user_assembler_libfunc ("memcpy", asmspec
);
11927 case BUILT_IN_MEMSET
:
11928 init_block_clear_fn (asmspec
);
11929 memset_libfunc
= set_user_assembler_libfunc ("memset", asmspec
);
11931 case BUILT_IN_MEMMOVE
:
11932 memmove_libfunc
= set_user_assembler_libfunc ("memmove", asmspec
);
11934 case BUILT_IN_MEMCMP
:
11935 memcmp_libfunc
= set_user_assembler_libfunc ("memcmp", asmspec
);
11937 case BUILT_IN_ABORT
:
11938 abort_libfunc
= set_user_assembler_libfunc ("abort", asmspec
);
11941 if (INT_TYPE_SIZE
< BITS_PER_WORD
)
11943 set_user_assembler_libfunc ("ffs", asmspec
);
11944 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
,
11945 MODE_INT
, 0), "ffs");
11953 /* Return true if DECL is a builtin that expands to a constant or similarly
11956 is_simple_builtin (tree decl
)
11958 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
11959 switch (DECL_FUNCTION_CODE (decl
))
11961 /* Builtins that expand to constants. */
11962 case BUILT_IN_CONSTANT_P
:
11963 case BUILT_IN_EXPECT
:
11964 case BUILT_IN_OBJECT_SIZE
:
11965 case BUILT_IN_UNREACHABLE
:
11966 /* Simple register moves or loads from stack. */
11967 case BUILT_IN_ASSUME_ALIGNED
:
11968 case BUILT_IN_RETURN_ADDRESS
:
11969 case BUILT_IN_EXTRACT_RETURN_ADDR
:
11970 case BUILT_IN_FROB_RETURN_ADDR
:
11971 case BUILT_IN_RETURN
:
11972 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
11973 case BUILT_IN_FRAME_ADDRESS
:
11974 case BUILT_IN_VA_END
:
11975 case BUILT_IN_STACK_SAVE
:
11976 case BUILT_IN_STACK_RESTORE
:
11977 /* Exception state returns or moves registers around. */
11978 case BUILT_IN_EH_FILTER
:
11979 case BUILT_IN_EH_POINTER
:
11980 case BUILT_IN_EH_COPY_VALUES
:
11990 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11991 most probably expanded inline into reasonably simple code. This is a
11992 superset of is_simple_builtin. */
11994 is_inexpensive_builtin (tree decl
)
11998 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
12000 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12001 switch (DECL_FUNCTION_CODE (decl
))
12004 case BUILT_IN_ALLOCA
:
12005 case BUILT_IN_ALLOCA_WITH_ALIGN
:
12006 case BUILT_IN_BSWAP16
:
12007 case BUILT_IN_BSWAP32
:
12008 case BUILT_IN_BSWAP64
:
12010 case BUILT_IN_CLZIMAX
:
12011 case BUILT_IN_CLZL
:
12012 case BUILT_IN_CLZLL
:
12014 case BUILT_IN_CTZIMAX
:
12015 case BUILT_IN_CTZL
:
12016 case BUILT_IN_CTZLL
:
12018 case BUILT_IN_FFSIMAX
:
12019 case BUILT_IN_FFSL
:
12020 case BUILT_IN_FFSLL
:
12021 case BUILT_IN_IMAXABS
:
12022 case BUILT_IN_FINITE
:
12023 case BUILT_IN_FINITEF
:
12024 case BUILT_IN_FINITEL
:
12025 case BUILT_IN_FINITED32
:
12026 case BUILT_IN_FINITED64
:
12027 case BUILT_IN_FINITED128
:
12028 case BUILT_IN_FPCLASSIFY
:
12029 case BUILT_IN_ISFINITE
:
12030 case BUILT_IN_ISINF_SIGN
:
12031 case BUILT_IN_ISINF
:
12032 case BUILT_IN_ISINFF
:
12033 case BUILT_IN_ISINFL
:
12034 case BUILT_IN_ISINFD32
:
12035 case BUILT_IN_ISINFD64
:
12036 case BUILT_IN_ISINFD128
:
12037 case BUILT_IN_ISNAN
:
12038 case BUILT_IN_ISNANF
:
12039 case BUILT_IN_ISNANL
:
12040 case BUILT_IN_ISNAND32
:
12041 case BUILT_IN_ISNAND64
:
12042 case BUILT_IN_ISNAND128
:
12043 case BUILT_IN_ISNORMAL
:
12044 case BUILT_IN_ISGREATER
:
12045 case BUILT_IN_ISGREATEREQUAL
:
12046 case BUILT_IN_ISLESS
:
12047 case BUILT_IN_ISLESSEQUAL
:
12048 case BUILT_IN_ISLESSGREATER
:
12049 case BUILT_IN_ISUNORDERED
:
12050 case BUILT_IN_VA_ARG_PACK
:
12051 case BUILT_IN_VA_ARG_PACK_LEN
:
12052 case BUILT_IN_VA_COPY
:
12053 case BUILT_IN_TRAP
:
12054 case BUILT_IN_SAVEREGS
:
12055 case BUILT_IN_POPCOUNTL
:
12056 case BUILT_IN_POPCOUNTLL
:
12057 case BUILT_IN_POPCOUNTIMAX
:
12058 case BUILT_IN_POPCOUNT
:
12059 case BUILT_IN_PARITYL
:
12060 case BUILT_IN_PARITYLL
:
12061 case BUILT_IN_PARITYIMAX
:
12062 case BUILT_IN_PARITY
:
12063 case BUILT_IN_LABS
:
12064 case BUILT_IN_LLABS
:
12065 case BUILT_IN_PREFETCH
:
12066 case BUILT_IN_ACC_ON_DEVICE
:
12070 return is_simple_builtin (decl
);