1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
26 #include "coretypes.h"
33 #include "fold-const.h"
34 #include "stringpool.h"
35 #include "stor-layout.h"
38 #include "tree-object-size.h"
41 #include "internal-fn.h"
45 #include "insn-config.h"
52 #include "insn-codes.h"
57 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "tree-ssanames.h"
63 #include "value-prof.h"
64 #include "diagnostic-core.h"
69 #include "tree-chkp.h"
73 static tree
do_mpc_arg1 (tree
, tree
, int (*)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
));
75 struct target_builtins default_target_builtins
;
77 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
80 /* Define the names of the builtin function types and codes. */
81 const char *const built_in_class_names
[BUILT_IN_LAST
]
82 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
84 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
85 const char * built_in_names
[(int) END_BUILTINS
] =
87 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info
[(int)END_BUILTINS
];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p
;
98 static rtx
c_readstr (const char *, machine_mode
);
99 static int target_char_cast (tree
, char *);
100 static rtx
get_memory_rtx (tree
, tree
);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx
result_vector (int, rtx
);
104 static void expand_builtin_prefetch (tree
);
105 static rtx
expand_builtin_apply_args (void);
106 static rtx
expand_builtin_apply_args_1 (void);
107 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
108 static void expand_builtin_return (rtx
);
109 static enum type_class
type_to_class (tree
);
110 static rtx
expand_builtin_classify_type (tree
);
111 static void expand_errno_check (tree
, rtx
);
112 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
113 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
114 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
115 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
116 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
117 static rtx
expand_builtin_sincos (tree
);
118 static rtx
expand_builtin_cexpi (tree
, rtx
);
119 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
120 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
121 static rtx
expand_builtin_next_arg (void);
122 static rtx
expand_builtin_va_start (tree
);
123 static rtx
expand_builtin_va_end (tree
);
124 static rtx
expand_builtin_va_copy (tree
);
125 static rtx
expand_builtin_strcmp (tree
, rtx
);
126 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
127 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, machine_mode
);
128 static rtx
expand_builtin_memcpy (tree
, rtx
);
129 static rtx
expand_builtin_memcpy_with_bounds (tree
, rtx
);
130 static rtx
expand_builtin_memcpy_args (tree
, tree
, tree
, rtx
, tree
);
131 static rtx
expand_builtin_mempcpy (tree
, rtx
, machine_mode
);
132 static rtx
expand_builtin_mempcpy_with_bounds (tree
, rtx
, machine_mode
);
133 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
134 machine_mode
, int, tree
);
135 static rtx
expand_builtin_strcpy (tree
, rtx
);
136 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
137 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
138 static rtx
expand_builtin_strncpy (tree
, rtx
);
139 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, machine_mode
);
140 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
141 static rtx
expand_builtin_memset_with_bounds (tree
, rtx
, machine_mode
);
142 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
143 static rtx
expand_builtin_bzero (tree
);
144 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
145 static rtx
expand_builtin_alloca (tree
, bool);
146 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
147 static rtx
expand_builtin_frame_address (tree
, tree
);
148 static tree
stabilize_va_list_loc (location_t
, tree
, int);
149 static rtx
expand_builtin_expect (tree
, rtx
);
150 static tree
fold_builtin_constant_p (tree
);
151 static tree
fold_builtin_classify_type (tree
);
152 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
153 static tree
fold_builtin_inf (location_t
, tree
, int);
154 static tree
fold_builtin_nan (tree
, tree
, int);
155 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
156 static bool validate_arg (const_tree
, enum tree_code code
);
157 static bool integer_valued_real_p (tree
);
158 static tree
fold_trunc_transparent_mathfn (location_t
, tree
, tree
);
159 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
160 static rtx
expand_builtin_signbit (tree
, rtx
);
161 static tree
fold_builtin_pow (location_t
, tree
, tree
, tree
, tree
);
162 static tree
fold_builtin_powi (location_t
, tree
, tree
, tree
, tree
);
163 static tree
fold_builtin_cos (location_t
, tree
, tree
, tree
);
164 static tree
fold_builtin_cosh (location_t
, tree
, tree
, tree
);
165 static tree
fold_builtin_tan (tree
, tree
);
166 static tree
fold_builtin_trunc (location_t
, tree
, tree
);
167 static tree
fold_builtin_floor (location_t
, tree
, tree
);
168 static tree
fold_builtin_ceil (location_t
, tree
, tree
);
169 static tree
fold_builtin_round (location_t
, tree
, tree
);
170 static tree
fold_builtin_int_roundingfn (location_t
, tree
, tree
);
171 static tree
fold_builtin_bitop (tree
, tree
);
172 static tree
fold_builtin_strchr (location_t
, tree
, tree
, tree
);
173 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
174 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
175 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
176 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
177 static tree
fold_builtin_signbit (location_t
, tree
, tree
);
178 static tree
fold_builtin_copysign (location_t
, tree
, tree
, tree
, tree
);
179 static tree
fold_builtin_isascii (location_t
, tree
);
180 static tree
fold_builtin_toascii (location_t
, tree
);
181 static tree
fold_builtin_isdigit (location_t
, tree
);
182 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
183 static tree
fold_builtin_abs (location_t
, tree
, tree
);
184 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
186 static tree
fold_builtin_0 (location_t
, tree
);
187 static tree
fold_builtin_1 (location_t
, tree
, tree
);
188 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
);
189 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
);
190 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
192 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
193 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
194 static tree
fold_builtin_strrchr (location_t
, tree
, tree
, tree
);
195 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
196 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
198 static rtx
expand_builtin_object_size (tree
);
199 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
200 enum built_in_function
);
201 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
202 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
203 static void maybe_emit_free_warning (tree
);
204 static tree
fold_builtin_object_size (tree
, tree
);
206 unsigned HOST_WIDE_INT target_newline
;
207 unsigned HOST_WIDE_INT target_percent
;
208 static unsigned HOST_WIDE_INT target_c
;
209 static unsigned HOST_WIDE_INT target_s
;
210 char target_percent_c
[3];
211 char target_percent_s
[3];
212 char target_percent_s_newline
[4];
213 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
214 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
215 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
216 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
217 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
218 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
219 static tree
do_mpfr_sincos (tree
, tree
, tree
);
220 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
221 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
222 const REAL_VALUE_TYPE
*, bool);
223 static tree
do_mpfr_remquo (tree
, tree
, tree
);
224 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
225 static void expand_builtin_sync_synchronize (void);
227 /* Return true if NAME starts with __builtin_ or __sync_. */
230 is_builtin_name (const char *name
)
232 if (strncmp (name
, "__builtin_", 10) == 0)
234 if (strncmp (name
, "__sync_", 7) == 0)
236 if (strncmp (name
, "__atomic_", 9) == 0)
239 && (!strcmp (name
, "__cilkrts_detach")
240 || !strcmp (name
, "__cilkrts_pop_frame")))
246 /* Return true if DECL is a function symbol representing a built-in. */
249 is_builtin_fn (tree decl
)
251 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
254 /* Return true if NODE should be considered for inline expansion regardless
255 of the optimization level. This means whenever a function is invoked with
256 its "internal" name, which normally contains the prefix "__builtin". */
259 called_as_built_in (tree node
)
261 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
262 we want the name used to call the function, not the name it
264 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
265 return is_builtin_name (name
);
268 /* Compute values M and N such that M divides (address of EXP - N) and such
269 that N < M. If these numbers can be determined, store M in alignp and N in
270 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
271 *alignp and any bit-offset to *bitposp.
273 Note that the address (and thus the alignment) computed here is based
274 on the address to which a symbol resolves, whereas DECL_ALIGN is based
275 on the address at which an object is actually located. These two
276 addresses are not always the same. For example, on ARM targets,
277 the address &foo of a Thumb function foo() has the lowest bit set,
278 whereas foo() itself starts on an even address.
280 If ADDR_P is true we are taking the address of the memory reference EXP
281 and thus cannot rely on the access taking place. */
284 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
285 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
287 HOST_WIDE_INT bitsize
, bitpos
;
290 int unsignedp
, volatilep
;
291 unsigned int align
= BITS_PER_UNIT
;
292 bool known_alignment
= false;
294 /* Get the innermost object and the constant (bitpos) and possibly
295 variable (offset) offset of the access. */
296 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
297 &mode
, &unsignedp
, &volatilep
, true);
299 /* Extract alignment information from the innermost object and
300 possibly adjust bitpos and offset. */
301 if (TREE_CODE (exp
) == FUNCTION_DECL
)
303 /* Function addresses can encode extra information besides their
304 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
305 allows the low bit to be used as a virtual bit, we know
306 that the address itself must be at least 2-byte aligned. */
307 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
308 align
= 2 * BITS_PER_UNIT
;
310 else if (TREE_CODE (exp
) == LABEL_DECL
)
312 else if (TREE_CODE (exp
) == CONST_DECL
)
314 /* The alignment of a CONST_DECL is determined by its initializer. */
315 exp
= DECL_INITIAL (exp
);
316 align
= TYPE_ALIGN (TREE_TYPE (exp
));
317 if (CONSTANT_CLASS_P (exp
))
318 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
320 known_alignment
= true;
322 else if (DECL_P (exp
))
324 align
= DECL_ALIGN (exp
);
325 known_alignment
= true;
327 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
329 align
= TYPE_ALIGN (TREE_TYPE (exp
));
331 else if (TREE_CODE (exp
) == INDIRECT_REF
332 || TREE_CODE (exp
) == MEM_REF
333 || TREE_CODE (exp
) == TARGET_MEM_REF
)
335 tree addr
= TREE_OPERAND (exp
, 0);
337 unsigned HOST_WIDE_INT ptr_bitpos
;
338 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
340 /* If the address is explicitely aligned, handle that. */
341 if (TREE_CODE (addr
) == BIT_AND_EXPR
342 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
344 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
345 ptr_bitmask
*= BITS_PER_UNIT
;
346 align
= ptr_bitmask
& -ptr_bitmask
;
347 addr
= TREE_OPERAND (addr
, 0);
351 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
352 align
= MAX (ptr_align
, align
);
354 /* Re-apply explicit alignment to the bitpos. */
355 ptr_bitpos
&= ptr_bitmask
;
357 /* The alignment of the pointer operand in a TARGET_MEM_REF
358 has to take the variable offset parts into account. */
359 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
363 unsigned HOST_WIDE_INT step
= 1;
365 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
366 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
368 if (TMR_INDEX2 (exp
))
369 align
= BITS_PER_UNIT
;
370 known_alignment
= false;
373 /* When EXP is an actual memory reference then we can use
374 TYPE_ALIGN of a pointer indirection to derive alignment.
375 Do so only if get_pointer_alignment_1 did not reveal absolute
376 alignment knowledge and if using that alignment would
377 improve the situation. */
378 if (!addr_p
&& !known_alignment
379 && TYPE_ALIGN (TREE_TYPE (exp
)) > align
)
380 align
= TYPE_ALIGN (TREE_TYPE (exp
));
383 /* Else adjust bitpos accordingly. */
384 bitpos
+= ptr_bitpos
;
385 if (TREE_CODE (exp
) == MEM_REF
386 || TREE_CODE (exp
) == TARGET_MEM_REF
)
387 bitpos
+= mem_ref_offset (exp
).to_short_addr () * BITS_PER_UNIT
;
390 else if (TREE_CODE (exp
) == STRING_CST
)
392 /* STRING_CST are the only constant objects we allow to be not
393 wrapped inside a CONST_DECL. */
394 align
= TYPE_ALIGN (TREE_TYPE (exp
));
395 if (CONSTANT_CLASS_P (exp
))
396 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
398 known_alignment
= true;
401 /* If there is a non-constant offset part extract the maximum
402 alignment that can prevail. */
405 unsigned int trailing_zeros
= tree_ctz (offset
);
406 if (trailing_zeros
< HOST_BITS_PER_INT
)
408 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
410 align
= MIN (align
, inner
);
415 *bitposp
= bitpos
& (*alignp
- 1);
416 return known_alignment
;
419 /* For a memory reference expression EXP compute values M and N such that M
420 divides (&EXP - N) and such that N < M. If these numbers can be determined,
421 store M in alignp and N in *BITPOSP and return true. Otherwise return false
422 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
425 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
426 unsigned HOST_WIDE_INT
*bitposp
)
428 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
431 /* Return the alignment in bits of EXP, an object. */
434 get_object_alignment (tree exp
)
436 unsigned HOST_WIDE_INT bitpos
= 0;
439 get_object_alignment_1 (exp
, &align
, &bitpos
);
441 /* align and bitpos now specify known low bits of the pointer.
442 ptr & (align - 1) == bitpos. */
445 align
= (bitpos
& -bitpos
);
449 /* For a pointer valued expression EXP compute values M and N such that M
450 divides (EXP - N) and such that N < M. If these numbers can be determined,
451 store M in alignp and N in *BITPOSP and return true. Return false if
452 the results are just a conservative approximation.
454 If EXP is not a pointer, false is returned too. */
457 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
458 unsigned HOST_WIDE_INT
*bitposp
)
462 if (TREE_CODE (exp
) == ADDR_EXPR
)
463 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
464 alignp
, bitposp
, true);
465 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
468 unsigned HOST_WIDE_INT bitpos
;
469 bool res
= get_pointer_alignment_1 (TREE_OPERAND (exp
, 0),
471 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
472 bitpos
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
;
475 unsigned int trailing_zeros
= tree_ctz (TREE_OPERAND (exp
, 1));
476 if (trailing_zeros
< HOST_BITS_PER_INT
)
478 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
480 align
= MIN (align
, inner
);
484 *bitposp
= bitpos
& (align
- 1);
487 else if (TREE_CODE (exp
) == SSA_NAME
488 && POINTER_TYPE_P (TREE_TYPE (exp
)))
490 unsigned int ptr_align
, ptr_misalign
;
491 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
493 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
495 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
496 *alignp
= ptr_align
* BITS_PER_UNIT
;
497 /* We cannot really tell whether this result is an approximation. */
503 *alignp
= BITS_PER_UNIT
;
507 else if (TREE_CODE (exp
) == INTEGER_CST
)
509 *alignp
= BIGGEST_ALIGNMENT
;
510 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
511 & (BIGGEST_ALIGNMENT
- 1));
516 *alignp
= BITS_PER_UNIT
;
520 /* Return the alignment in bits of EXP, a pointer valued expression.
521 The alignment returned is, by default, the alignment of the thing that
522 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
524 Otherwise, look at the expression to see if we can do better, i.e., if the
525 expression is actually pointing at an object whose alignment is tighter. */
528 get_pointer_alignment (tree exp
)
530 unsigned HOST_WIDE_INT bitpos
= 0;
533 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
535 /* align and bitpos now specify known low bits of the pointer.
536 ptr & (align - 1) == bitpos. */
539 align
= (bitpos
& -bitpos
);
544 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
545 way, because it could contain a zero byte in the middle.
546 TREE_STRING_LENGTH is the size of the character array, not the string.
548 ONLY_VALUE should be nonzero if the result is not going to be emitted
549 into the instruction stream and zero if it is going to be expanded.
550 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
551 is returned, otherwise NULL, since
552 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
553 evaluate the side-effects.
555 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
556 accesses. Note that this implies the result is not going to be emitted
557 into the instruction stream.
559 The value returned is of type `ssizetype'.
561 Unfortunately, string_constant can't access the values of const char
562 arrays with initializers, so neither can we do so here. */
565 c_strlen (tree src
, int only_value
)
568 HOST_WIDE_INT offset
;
574 if (TREE_CODE (src
) == COND_EXPR
575 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
579 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
580 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
581 if (tree_int_cst_equal (len1
, len2
))
585 if (TREE_CODE (src
) == COMPOUND_EXPR
586 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
587 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
589 loc
= EXPR_LOC_OR_LOC (src
, input_location
);
591 src
= string_constant (src
, &offset_node
);
595 max
= TREE_STRING_LENGTH (src
) - 1;
596 ptr
= TREE_STRING_POINTER (src
);
598 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
600 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
601 compute the offset to the following null if we don't know where to
602 start searching for it. */
605 for (i
= 0; i
< max
; i
++)
609 /* We don't know the starting offset, but we do know that the string
610 has no internal zero bytes. We can assume that the offset falls
611 within the bounds of the string; otherwise, the programmer deserves
612 what he gets. Subtract the offset from the length of the string,
613 and return that. This would perhaps not be valid if we were dealing
614 with named arrays in addition to literal string constants. */
616 return size_diffop_loc (loc
, size_int (max
), offset_node
);
619 /* We have a known offset into the string. Start searching there for
620 a null character if we can represent it as a single HOST_WIDE_INT. */
621 if (offset_node
== 0)
623 else if (! tree_fits_shwi_p (offset_node
))
626 offset
= tree_to_shwi (offset_node
);
628 /* If the offset is known to be out of bounds, warn, and call strlen at
630 if (offset
< 0 || offset
> max
)
632 /* Suppress multiple warnings for propagated constant strings. */
634 && !TREE_NO_WARNING (src
))
636 warning_at (loc
, 0, "offset outside bounds of constant string");
637 TREE_NO_WARNING (src
) = 1;
642 /* Use strlen to search for the first zero byte. Since any strings
643 constructed with build_string will have nulls appended, we win even
644 if we get handed something like (char[4])"abcd".
646 Since OFFSET is our starting index into the string, no further
647 calculation is needed. */
648 return ssize_int (strlen (ptr
+ offset
));
651 /* Return a char pointer for a C string if it is a string constant
652 or sum of string constant and integer constant. */
659 src
= string_constant (src
, &offset_node
);
663 if (offset_node
== 0)
664 return TREE_STRING_POINTER (src
);
665 else if (!tree_fits_uhwi_p (offset_node
)
666 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
669 return TREE_STRING_POINTER (src
) + tree_to_uhwi (offset_node
);
672 /* Return a constant integer corresponding to target reading
673 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
676 c_readstr (const char *str
, machine_mode mode
)
680 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
682 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
683 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
684 / HOST_BITS_PER_WIDE_INT
;
686 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
687 for (i
= 0; i
< len
; i
++)
691 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
694 if (WORDS_BIG_ENDIAN
)
695 j
= GET_MODE_SIZE (mode
) - i
- 1;
696 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
697 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
698 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
702 ch
= (unsigned char) str
[i
];
703 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
706 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
707 return immed_wide_int_const (c
, mode
);
710 /* Cast a target constant CST to target CHAR and if that value fits into
711 host char type, return zero and put that value into variable pointed to by
715 target_char_cast (tree cst
, char *p
)
717 unsigned HOST_WIDE_INT val
, hostval
;
719 if (TREE_CODE (cst
) != INTEGER_CST
720 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
723 /* Do not care if it fits or not right here. */
724 val
= TREE_INT_CST_LOW (cst
);
726 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
727 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
730 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
731 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
740 /* Similar to save_expr, but assumes that arbitrary code is not executed
741 in between the multiple evaluations. In particular, we assume that a
742 non-addressable local variable will not be modified. */
745 builtin_save_expr (tree exp
)
747 if (TREE_CODE (exp
) == SSA_NAME
748 || (TREE_ADDRESSABLE (exp
) == 0
749 && (TREE_CODE (exp
) == PARM_DECL
750 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
)))))
753 return save_expr (exp
);
756 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
757 times to get the address of either a higher stack frame, or a return
758 address located within it (depending on FNDECL_CODE). */
761 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
764 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
767 /* For a zero count with __builtin_return_address, we don't care what
768 frame address we return, because target-specific definitions will
769 override us. Therefore frame pointer elimination is OK, and using
770 the soft frame pointer is OK.
772 For a nonzero count, or a zero count with __builtin_frame_address,
773 we require a stable offset from the current frame pointer to the
774 previous one, so we must use the hard frame pointer, and
775 we must disable frame pointer elimination. */
776 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
777 tem
= frame_pointer_rtx
;
780 tem
= hard_frame_pointer_rtx
;
782 /* Tell reload not to eliminate the frame pointer. */
783 crtl
->accesses_prior_frames
= 1;
788 SETUP_FRAME_ADDRESSES ();
790 /* On the SPARC, the return address is not in the frame, it is in a
791 register. There is no way to access it off of the current frame
792 pointer, but it can be accessed off the previous frame pointer by
793 reading the value from the register window save area. */
794 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
797 /* Scan back COUNT frames to the specified frame. */
798 for (i
= 0; i
< count
; i
++)
800 /* Assume the dynamic chain pointer is in the word that the
801 frame address points to, unless otherwise specified. */
802 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
803 tem
= memory_address (Pmode
, tem
);
804 tem
= gen_frame_mem (Pmode
, tem
);
805 tem
= copy_to_reg (tem
);
808 /* For __builtin_frame_address, return what we've got. But, on
809 the SPARC for example, we may have to add a bias. */
810 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
811 #ifdef FRAME_ADDR_RTX
812 return FRAME_ADDR_RTX (tem
);
817 /* For __builtin_return_address, get the return address from that frame. */
818 #ifdef RETURN_ADDR_RTX
819 tem
= RETURN_ADDR_RTX (count
, tem
);
821 tem
= memory_address (Pmode
,
822 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
823 tem
= gen_frame_mem (Pmode
, tem
);
828 /* Alias set used for setjmp buffer. */
829 static alias_set_type setjmp_alias_set
= -1;
831 /* Construct the leading half of a __builtin_setjmp call. Control will
832 return to RECEIVER_LABEL. This is also called directly by the SJLJ
833 exception handling code. */
836 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
838 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
842 if (setjmp_alias_set
== -1)
843 setjmp_alias_set
= new_alias_set ();
845 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
847 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
849 /* We store the frame pointer and the address of receiver_label in
850 the buffer and use the rest of it for the stack save area, which
851 is machine-dependent. */
853 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
854 set_mem_alias_set (mem
, setjmp_alias_set
);
855 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
857 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
858 GET_MODE_SIZE (Pmode
))),
859 set_mem_alias_set (mem
, setjmp_alias_set
);
861 emit_move_insn (validize_mem (mem
),
862 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
864 stack_save
= gen_rtx_MEM (sa_mode
,
865 plus_constant (Pmode
, buf_addr
,
866 2 * GET_MODE_SIZE (Pmode
)));
867 set_mem_alias_set (stack_save
, setjmp_alias_set
);
868 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
870 /* If there is further processing to do, do it. */
871 if (targetm
.have_builtin_setjmp_setup ())
872 emit_insn (targetm
.gen_builtin_setjmp_setup (buf_addr
));
874 /* We have a nonlocal label. */
875 cfun
->has_nonlocal_label
= 1;
878 /* Construct the trailing part of a __builtin_setjmp call. This is
879 also called directly by the SJLJ exception handling code.
880 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
883 expand_builtin_setjmp_receiver (rtx receiver_label
)
887 /* Mark the FP as used when we get here, so we have to make sure it's
888 marked as used by this function. */
889 emit_use (hard_frame_pointer_rtx
);
891 /* Mark the static chain as clobbered here so life information
892 doesn't get messed up for it. */
893 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
894 if (chain
&& REG_P (chain
))
895 emit_clobber (chain
);
897 /* Now put in the code to restore the frame pointer, and argument
898 pointer, if needed. */
899 if (! targetm
.have_nonlocal_goto ())
901 /* First adjust our frame pointer to its actual value. It was
902 previously set to the start of the virtual area corresponding to
903 the stacked variables when we branched here and now needs to be
904 adjusted to the actual hardware fp value.
906 Assignments to virtual registers are converted by
907 instantiate_virtual_regs into the corresponding assignment
908 to the underlying register (fp in this case) that makes
909 the original assignment true.
910 So the following insn will actually be decrementing fp by
911 STARTING_FRAME_OFFSET. */
912 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
914 /* Restoring the frame pointer also modifies the hard frame pointer.
915 Mark it used (so that the previous assignment remains live once
916 the frame pointer is eliminated) and clobbered (to represent the
917 implicit update from the assignment). */
918 emit_use (hard_frame_pointer_rtx
);
919 emit_clobber (hard_frame_pointer_rtx
);
922 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
923 if (fixed_regs
[ARG_POINTER_REGNUM
])
925 #ifdef ELIMINABLE_REGS
926 /* If the argument pointer can be eliminated in favor of the
927 frame pointer, we don't need to restore it. We assume here
928 that if such an elimination is present, it can always be used.
929 This is the case on all known machines; if we don't make this
930 assumption, we do unnecessary saving on many machines. */
932 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
934 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
935 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
936 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
939 if (i
== ARRAY_SIZE (elim_regs
))
942 /* Now restore our arg pointer from the address at which it
943 was saved in our stack frame. */
944 emit_move_insn (crtl
->args
.internal_arg_pointer
,
945 copy_to_reg (get_arg_pointer_save_area ()));
950 if (receiver_label
!= NULL
&& targetm
.have_builtin_setjmp_receiver ())
951 emit_insn (targetm
.gen_builtin_setjmp_receiver (receiver_label
));
952 else if (targetm
.have_nonlocal_goto_receiver ())
953 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
957 /* We must not allow the code we just generated to be reordered by
958 scheduling. Specifically, the update of the frame pointer must
959 happen immediately, not later. */
960 emit_insn (gen_blockage ());
963 /* __builtin_longjmp is passed a pointer to an array of five words (not
964 all will be used on all machines). It operates similarly to the C
965 library function of the same name, but is more efficient. Much of
966 the code below is copied from the handling of non-local gotos. */
969 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
972 rtx_insn
*insn
, *last
;
973 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
975 /* DRAP is needed for stack realign if longjmp is expanded to current
977 if (SUPPORTS_STACK_ALIGNMENT
)
978 crtl
->need_drap
= true;
980 if (setjmp_alias_set
== -1)
981 setjmp_alias_set
= new_alias_set ();
983 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
985 buf_addr
= force_reg (Pmode
, buf_addr
);
987 /* We require that the user must pass a second argument of 1, because
988 that is what builtin_setjmp will return. */
989 gcc_assert (value
== const1_rtx
);
991 last
= get_last_insn ();
992 if (targetm
.have_builtin_longjmp ())
993 emit_insn (targetm
.gen_builtin_longjmp (buf_addr
));
996 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
997 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
998 GET_MODE_SIZE (Pmode
)));
1000 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
1001 2 * GET_MODE_SIZE (Pmode
)));
1002 set_mem_alias_set (fp
, setjmp_alias_set
);
1003 set_mem_alias_set (lab
, setjmp_alias_set
);
1004 set_mem_alias_set (stack
, setjmp_alias_set
);
1006 /* Pick up FP, label, and SP from the block and jump. This code is
1007 from expand_goto in stmt.c; see there for detailed comments. */
1008 if (targetm
.have_nonlocal_goto ())
1009 /* We have to pass a value to the nonlocal_goto pattern that will
1010 get copied into the static_chain pointer, but it does not matter
1011 what that value is, because builtin_setjmp does not use it. */
1012 emit_insn (targetm
.gen_nonlocal_goto (value
, lab
, stack
, fp
));
1015 lab
= copy_to_reg (lab
);
1017 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1018 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1020 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1021 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1023 emit_use (hard_frame_pointer_rtx
);
1024 emit_use (stack_pointer_rtx
);
1025 emit_indirect_jump (lab
);
1029 /* Search backwards and mark the jump insn as a non-local goto.
1030 Note that this precludes the use of __builtin_longjmp to a
1031 __builtin_setjmp target in the same function. However, we've
1032 already cautioned the user that these functions are for
1033 internal exception handling use only. */
1034 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1036 gcc_assert (insn
!= last
);
1040 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1043 else if (CALL_P (insn
))
1049 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1051 return (iter
->i
< iter
->n
);
1054 /* This function validates the types of a function call argument list
1055 against a specified list of tree_codes. If the last specifier is a 0,
1056 that represents an ellipses, otherwise the last specifier must be a
1060 validate_arglist (const_tree callexpr
, ...)
1062 enum tree_code code
;
1065 const_call_expr_arg_iterator iter
;
1068 va_start (ap
, callexpr
);
1069 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1073 code
= (enum tree_code
) va_arg (ap
, int);
1077 /* This signifies an ellipses, any further arguments are all ok. */
1081 /* This signifies an endlink, if no arguments remain, return
1082 true, otherwise return false. */
1083 res
= !more_const_call_expr_args_p (&iter
);
1086 /* If no parameters remain or the parameter's code does not
1087 match the specified code, return false. Otherwise continue
1088 checking any remaining arguments. */
1089 arg
= next_const_call_expr_arg (&iter
);
1090 if (!validate_arg (arg
, code
))
1097 /* We need gotos here since we can only have one VA_CLOSE in a
1105 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1106 and the address of the save area. */
1109 expand_builtin_nonlocal_goto (tree exp
)
1111 tree t_label
, t_save_area
;
1112 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1115 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1118 t_label
= CALL_EXPR_ARG (exp
, 0);
1119 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1121 r_label
= expand_normal (t_label
);
1122 r_label
= convert_memory_address (Pmode
, r_label
);
1123 r_save_area
= expand_normal (t_save_area
);
1124 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1125 /* Copy the address of the save location to a register just in case it was
1126 based on the frame pointer. */
1127 r_save_area
= copy_to_reg (r_save_area
);
1128 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1129 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1130 plus_constant (Pmode
, r_save_area
,
1131 GET_MODE_SIZE (Pmode
)));
1133 crtl
->has_nonlocal_goto
= 1;
1135 /* ??? We no longer need to pass the static chain value, afaik. */
1136 if (targetm
.have_nonlocal_goto ())
1137 emit_insn (targetm
.gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1140 r_label
= copy_to_reg (r_label
);
1142 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1143 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1145 /* Restore frame pointer for containing function. */
1146 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1147 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1149 /* USE of hard_frame_pointer_rtx added for consistency;
1150 not clear if really needed. */
1151 emit_use (hard_frame_pointer_rtx
);
1152 emit_use (stack_pointer_rtx
);
1154 /* If the architecture is using a GP register, we must
1155 conservatively assume that the target function makes use of it.
1156 The prologue of functions with nonlocal gotos must therefore
1157 initialize the GP register to the appropriate value, and we
1158 must then make sure that this value is live at the point
1159 of the jump. (Note that this doesn't necessarily apply
1160 to targets with a nonlocal_goto pattern; they are free
1161 to implement it in their own way. Note also that this is
1162 a no-op if the GP register is a global invariant.) */
1163 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1164 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1165 emit_use (pic_offset_table_rtx
);
1167 emit_indirect_jump (r_label
);
1170 /* Search backwards to the jump insn and mark it as a
1172 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1176 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1179 else if (CALL_P (insn
))
1186 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1187 (not all will be used on all machines) that was passed to __builtin_setjmp.
1188 It updates the stack pointer in that block to the current value. This is
1189 also called directly by the SJLJ exception handling code. */
1192 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1194 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1196 = gen_rtx_MEM (sa_mode
,
1199 plus_constant (Pmode
, buf_addr
,
1200 2 * GET_MODE_SIZE (Pmode
))));
1202 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1205 /* Expand a call to __builtin_prefetch. For a target that does not support
1206 data prefetch, evaluate the memory address argument in case it has side
1210 expand_builtin_prefetch (tree exp
)
1212 tree arg0
, arg1
, arg2
;
1216 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1219 arg0
= CALL_EXPR_ARG (exp
, 0);
1221 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1222 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1224 nargs
= call_expr_nargs (exp
);
1226 arg1
= CALL_EXPR_ARG (exp
, 1);
1228 arg1
= integer_zero_node
;
1230 arg2
= CALL_EXPR_ARG (exp
, 2);
1232 arg2
= integer_three_node
;
1234 /* Argument 0 is an address. */
1235 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1237 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1238 if (TREE_CODE (arg1
) != INTEGER_CST
)
1240 error ("second argument to %<__builtin_prefetch%> must be a constant");
1241 arg1
= integer_zero_node
;
1243 op1
= expand_normal (arg1
);
1244 /* Argument 1 must be either zero or one. */
1245 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1247 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1252 /* Argument 2 (locality) must be a compile-time constant int. */
1253 if (TREE_CODE (arg2
) != INTEGER_CST
)
1255 error ("third argument to %<__builtin_prefetch%> must be a constant");
1256 arg2
= integer_zero_node
;
1258 op2
= expand_normal (arg2
);
1259 /* Argument 2 must be 0, 1, 2, or 3. */
1260 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1262 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1266 if (targetm
.have_prefetch ())
1268 struct expand_operand ops
[3];
1270 create_address_operand (&ops
[0], op0
);
1271 create_integer_operand (&ops
[1], INTVAL (op1
));
1272 create_integer_operand (&ops
[2], INTVAL (op2
));
1273 if (maybe_expand_insn (targetm
.code_for_prefetch
, 3, ops
))
1277 /* Don't do anything with direct references to volatile memory, but
1278 generate code to handle other side effects. */
1279 if (!MEM_P (op0
) && side_effects_p (op0
))
1283 /* Get a MEM rtx for expression EXP which is the address of an operand
1284 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1285 the maximum length of the block of memory that might be accessed or
1289 get_memory_rtx (tree exp
, tree len
)
1291 tree orig_exp
= exp
;
1294 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1295 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1296 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1297 exp
= TREE_OPERAND (exp
, 0);
1299 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1300 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1302 /* Get an expression we can use to find the attributes to assign to MEM.
1303 First remove any nops. */
1304 while (CONVERT_EXPR_P (exp
)
1305 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1306 exp
= TREE_OPERAND (exp
, 0);
1308 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1309 (as builtin stringops may alias with anything). */
1310 exp
= fold_build2 (MEM_REF
,
1311 build_array_type (char_type_node
,
1312 build_range_type (sizetype
,
1313 size_one_node
, len
)),
1314 exp
, build_int_cst (ptr_type_node
, 0));
1316 /* If the MEM_REF has no acceptable address, try to get the base object
1317 from the original address we got, and build an all-aliasing
1318 unknown-sized access to that one. */
1319 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1320 set_mem_attributes (mem
, exp
, 0);
1321 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1322 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1325 exp
= build_fold_addr_expr (exp
);
1326 exp
= fold_build2 (MEM_REF
,
1327 build_array_type (char_type_node
,
1328 build_range_type (sizetype
,
1331 exp
, build_int_cst (ptr_type_node
, 0));
1332 set_mem_attributes (mem
, exp
, 0);
1334 set_mem_alias_set (mem
, 0);
1338 /* Built-in functions to perform an untyped call and return. */
1340 #define apply_args_mode \
1341 (this_target_builtins->x_apply_args_mode)
1342 #define apply_result_mode \
1343 (this_target_builtins->x_apply_result_mode)
1345 /* Return the size required for the block returned by __builtin_apply_args,
1346 and initialize apply_args_mode. */
1349 apply_args_size (void)
1351 static int size
= -1;
1356 /* The values computed by this function never change. */
1359 /* The first value is the incoming arg-pointer. */
1360 size
= GET_MODE_SIZE (Pmode
);
1362 /* The second value is the structure value address unless this is
1363 passed as an "invisible" first argument. */
1364 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1365 size
+= GET_MODE_SIZE (Pmode
);
1367 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1368 if (FUNCTION_ARG_REGNO_P (regno
))
1370 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1372 gcc_assert (mode
!= VOIDmode
);
1374 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1375 if (size
% align
!= 0)
1376 size
= CEIL (size
, align
) * align
;
1377 size
+= GET_MODE_SIZE (mode
);
1378 apply_args_mode
[regno
] = mode
;
1382 apply_args_mode
[regno
] = VOIDmode
;
1388 /* Return the size required for the block returned by __builtin_apply,
1389 and initialize apply_result_mode. */
1392 apply_result_size (void)
1394 static int size
= -1;
1398 /* The values computed by this function never change. */
1403 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1404 if (targetm
.calls
.function_value_regno_p (regno
))
1406 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1408 gcc_assert (mode
!= VOIDmode
);
1410 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1411 if (size
% align
!= 0)
1412 size
= CEIL (size
, align
) * align
;
1413 size
+= GET_MODE_SIZE (mode
);
1414 apply_result_mode
[regno
] = mode
;
1417 apply_result_mode
[regno
] = VOIDmode
;
1419 /* Allow targets that use untyped_call and untyped_return to override
1420 the size so that machine-specific information can be stored here. */
1421 #ifdef APPLY_RESULT_SIZE
1422 size
= APPLY_RESULT_SIZE
;
1428 /* Create a vector describing the result block RESULT. If SAVEP is true,
1429 the result block is used to save the values; otherwise it is used to
1430 restore the values. */
1433 result_vector (int savep
, rtx result
)
1435 int regno
, size
, align
, nelts
;
1438 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1441 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1442 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1444 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1445 if (size
% align
!= 0)
1446 size
= CEIL (size
, align
) * align
;
1447 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1448 mem
= adjust_address (result
, mode
, size
);
1449 savevec
[nelts
++] = (savep
1450 ? gen_rtx_SET (mem
, reg
)
1451 : gen_rtx_SET (reg
, mem
));
1452 size
+= GET_MODE_SIZE (mode
);
1454 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1457 /* Save the state required to perform an untyped call with the same
1458 arguments as were passed to the current function. */
1461 expand_builtin_apply_args_1 (void)
1464 int size
, align
, regno
;
1466 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1468 /* Create a block where the arg-pointer, structure value address,
1469 and argument registers can be saved. */
1470 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1472 /* Walk past the arg-pointer and structure value address. */
1473 size
= GET_MODE_SIZE (Pmode
);
1474 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1475 size
+= GET_MODE_SIZE (Pmode
);
1477 /* Save each register used in calling a function to the block. */
1478 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1479 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1481 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1482 if (size
% align
!= 0)
1483 size
= CEIL (size
, align
) * align
;
1485 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1487 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1488 size
+= GET_MODE_SIZE (mode
);
1491 /* Save the arg pointer to the block. */
1492 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1493 /* We need the pointer as the caller actually passed them to us, not
1494 as we might have pretended they were passed. Make sure it's a valid
1495 operand, as emit_move_insn isn't expected to handle a PLUS. */
1496 if (STACK_GROWS_DOWNWARD
)
1498 = force_operand (plus_constant (Pmode
, tem
,
1499 crtl
->args
.pretend_args_size
),
1501 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1503 size
= GET_MODE_SIZE (Pmode
);
1505 /* Save the structure value address unless this is passed as an
1506 "invisible" first argument. */
1507 if (struct_incoming_value
)
1509 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1510 copy_to_reg (struct_incoming_value
));
1511 size
+= GET_MODE_SIZE (Pmode
);
1514 /* Return the address of the block. */
1515 return copy_addr_to_reg (XEXP (registers
, 0));
1518 /* __builtin_apply_args returns block of memory allocated on
1519 the stack into which is stored the arg pointer, structure
1520 value address, static chain, and all the registers that might
1521 possibly be used in performing a function call. The code is
1522 moved to the start of the function so the incoming values are
1526 expand_builtin_apply_args (void)
1528 /* Don't do __builtin_apply_args more than once in a function.
1529 Save the result of the first call and reuse it. */
1530 if (apply_args_value
!= 0)
1531 return apply_args_value
;
1533 /* When this function is called, it means that registers must be
1534 saved on entry to this function. So we migrate the
1535 call to the first insn of this function. */
1539 temp
= expand_builtin_apply_args_1 ();
1540 rtx_insn
*seq
= get_insns ();
1543 apply_args_value
= temp
;
1545 /* Put the insns after the NOTE that starts the function.
1546 If this is inside a start_sequence, make the outer-level insn
1547 chain current, so the code is placed at the start of the
1548 function. If internal_arg_pointer is a non-virtual pseudo,
1549 it needs to be placed after the function that initializes
1551 push_topmost_sequence ();
1552 if (REG_P (crtl
->args
.internal_arg_pointer
)
1553 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1554 emit_insn_before (seq
, parm_birth_insn
);
1556 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1557 pop_topmost_sequence ();
1562 /* Perform an untyped call and save the state required to perform an
1563 untyped return of whatever value was returned by the given function. */
1566 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1568 int size
, align
, regno
;
1570 rtx incoming_args
, result
, reg
, dest
, src
;
1571 rtx_call_insn
*call_insn
;
1572 rtx old_stack_level
= 0;
1573 rtx call_fusage
= 0;
1574 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1576 arguments
= convert_memory_address (Pmode
, arguments
);
1578 /* Create a block where the return registers can be saved. */
1579 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1581 /* Fetch the arg pointer from the ARGUMENTS block. */
1582 incoming_args
= gen_reg_rtx (Pmode
);
1583 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1584 if (!STACK_GROWS_DOWNWARD
)
1585 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1586 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1588 /* Push a new argument block and copy the arguments. Do not allow
1589 the (potential) memcpy call below to interfere with our stack
1591 do_pending_stack_adjust ();
1594 /* Save the stack with nonlocal if available. */
1595 if (targetm
.have_save_stack_nonlocal ())
1596 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1598 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1600 /* Allocate a block of memory onto the stack and copy the memory
1601 arguments to the outgoing arguments address. We can pass TRUE
1602 as the 4th argument because we just saved the stack pointer
1603 and will restore it right after the call. */
1604 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1606 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1607 may have already set current_function_calls_alloca to true.
1608 current_function_calls_alloca won't be set if argsize is zero,
1609 so we have to guarantee need_drap is true here. */
1610 if (SUPPORTS_STACK_ALIGNMENT
)
1611 crtl
->need_drap
= true;
1613 dest
= virtual_outgoing_args_rtx
;
1614 if (!STACK_GROWS_DOWNWARD
)
1616 if (CONST_INT_P (argsize
))
1617 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1619 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1621 dest
= gen_rtx_MEM (BLKmode
, dest
);
1622 set_mem_align (dest
, PARM_BOUNDARY
);
1623 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1624 set_mem_align (src
, PARM_BOUNDARY
);
1625 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1627 /* Refer to the argument block. */
1629 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1630 set_mem_align (arguments
, PARM_BOUNDARY
);
1632 /* Walk past the arg-pointer and structure value address. */
1633 size
= GET_MODE_SIZE (Pmode
);
1635 size
+= GET_MODE_SIZE (Pmode
);
1637 /* Restore each of the registers previously saved. Make USE insns
1638 for each of these registers for use in making the call. */
1639 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1640 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1642 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1643 if (size
% align
!= 0)
1644 size
= CEIL (size
, align
) * align
;
1645 reg
= gen_rtx_REG (mode
, regno
);
1646 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1647 use_reg (&call_fusage
, reg
);
1648 size
+= GET_MODE_SIZE (mode
);
1651 /* Restore the structure value address unless this is passed as an
1652 "invisible" first argument. */
1653 size
= GET_MODE_SIZE (Pmode
);
1656 rtx value
= gen_reg_rtx (Pmode
);
1657 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1658 emit_move_insn (struct_value
, value
);
1659 if (REG_P (struct_value
))
1660 use_reg (&call_fusage
, struct_value
);
1661 size
+= GET_MODE_SIZE (Pmode
);
1664 /* All arguments and registers used for the call are set up by now! */
1665 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1667 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1668 and we don't want to load it into a register as an optimization,
1669 because prepare_call_address already did it if it should be done. */
1670 if (GET_CODE (function
) != SYMBOL_REF
)
1671 function
= memory_address (FUNCTION_MODE
, function
);
1673 /* Generate the actual call instruction and save the return value. */
1674 if (targetm
.have_untyped_call ())
1676 rtx mem
= gen_rtx_MEM (FUNCTION_MODE
, function
);
1677 emit_call_insn (targetm
.gen_untyped_call (mem
, result
,
1678 result_vector (1, result
)));
1680 else if (targetm
.have_call_value ())
1684 /* Locate the unique return register. It is not possible to
1685 express a call that sets more than one return register using
1686 call_value; use untyped_call for that. In fact, untyped_call
1687 only needs to save the return registers in the given block. */
1688 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1689 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1691 gcc_assert (!valreg
); /* have_untyped_call required. */
1693 valreg
= gen_rtx_REG (mode
, regno
);
1696 emit_insn (targetm
.gen_call_value (valreg
,
1697 gen_rtx_MEM (FUNCTION_MODE
, function
),
1698 const0_rtx
, NULL_RTX
, const0_rtx
));
1700 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1705 /* Find the CALL insn we just emitted, and attach the register usage
1707 call_insn
= last_call_insn ();
1708 add_function_usage_to (call_insn
, call_fusage
);
1710 /* Restore the stack. */
1711 if (targetm
.have_save_stack_nonlocal ())
1712 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1714 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1715 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1719 /* Return the address of the result block. */
1720 result
= copy_addr_to_reg (XEXP (result
, 0));
1721 return convert_memory_address (ptr_mode
, result
);
1724 /* Perform an untyped return. */
1727 expand_builtin_return (rtx result
)
1729 int size
, align
, regno
;
1732 rtx_insn
*call_fusage
= 0;
1734 result
= convert_memory_address (Pmode
, result
);
1736 apply_result_size ();
1737 result
= gen_rtx_MEM (BLKmode
, result
);
1739 if (targetm
.have_untyped_return ())
1741 rtx vector
= result_vector (0, result
);
1742 emit_jump_insn (targetm
.gen_untyped_return (result
, vector
));
1747 /* Restore the return value and note that each value is used. */
1749 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1750 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1752 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1753 if (size
% align
!= 0)
1754 size
= CEIL (size
, align
) * align
;
1755 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1756 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1758 push_to_sequence (call_fusage
);
1760 call_fusage
= get_insns ();
1762 size
+= GET_MODE_SIZE (mode
);
1765 /* Put the USE insns before the return. */
1766 emit_insn (call_fusage
);
1768 /* Return whatever values was restored by jumping directly to the end
1770 expand_naked_return ();
1773 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1775 static enum type_class
1776 type_to_class (tree type
)
1778 switch (TREE_CODE (type
))
1780 case VOID_TYPE
: return void_type_class
;
1781 case INTEGER_TYPE
: return integer_type_class
;
1782 case ENUMERAL_TYPE
: return enumeral_type_class
;
1783 case BOOLEAN_TYPE
: return boolean_type_class
;
1784 case POINTER_TYPE
: return pointer_type_class
;
1785 case REFERENCE_TYPE
: return reference_type_class
;
1786 case OFFSET_TYPE
: return offset_type_class
;
1787 case REAL_TYPE
: return real_type_class
;
1788 case COMPLEX_TYPE
: return complex_type_class
;
1789 case FUNCTION_TYPE
: return function_type_class
;
1790 case METHOD_TYPE
: return method_type_class
;
1791 case RECORD_TYPE
: return record_type_class
;
1793 case QUAL_UNION_TYPE
: return union_type_class
;
1794 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1795 ? string_type_class
: array_type_class
);
1796 case LANG_TYPE
: return lang_type_class
;
1797 default: return no_type_class
;
1801 /* Expand a call EXP to __builtin_classify_type. */
1804 expand_builtin_classify_type (tree exp
)
1806 if (call_expr_nargs (exp
))
1807 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1808 return GEN_INT (no_type_class
);
1811 /* This helper macro, meant to be used in mathfn_built_in below,
1812 determines which among a set of three builtin math functions is
1813 appropriate for a given type mode. The `F' and `L' cases are
1814 automatically generated from the `double' case. */
1815 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1816 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1817 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1818 fcodel = BUILT_IN_MATHFN##L ; break;
1819 /* Similar to above, but appends _R after any F/L suffix. */
1820 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1821 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1822 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1823 fcodel = BUILT_IN_MATHFN##L_R ; break;
1825 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1826 if available. If IMPLICIT is true use the implicit builtin declaration,
1827 otherwise use the explicit declaration. If we can't do the conversion,
1831 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit_p
)
1833 enum built_in_function fcode
, fcodef
, fcodel
, fcode2
;
1837 CASE_MATHFN (BUILT_IN_ACOS
)
1838 CASE_MATHFN (BUILT_IN_ACOSH
)
1839 CASE_MATHFN (BUILT_IN_ASIN
)
1840 CASE_MATHFN (BUILT_IN_ASINH
)
1841 CASE_MATHFN (BUILT_IN_ATAN
)
1842 CASE_MATHFN (BUILT_IN_ATAN2
)
1843 CASE_MATHFN (BUILT_IN_ATANH
)
1844 CASE_MATHFN (BUILT_IN_CBRT
)
1845 CASE_MATHFN (BUILT_IN_CEIL
)
1846 CASE_MATHFN (BUILT_IN_CEXPI
)
1847 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1848 CASE_MATHFN (BUILT_IN_COS
)
1849 CASE_MATHFN (BUILT_IN_COSH
)
1850 CASE_MATHFN (BUILT_IN_DREM
)
1851 CASE_MATHFN (BUILT_IN_ERF
)
1852 CASE_MATHFN (BUILT_IN_ERFC
)
1853 CASE_MATHFN (BUILT_IN_EXP
)
1854 CASE_MATHFN (BUILT_IN_EXP10
)
1855 CASE_MATHFN (BUILT_IN_EXP2
)
1856 CASE_MATHFN (BUILT_IN_EXPM1
)
1857 CASE_MATHFN (BUILT_IN_FABS
)
1858 CASE_MATHFN (BUILT_IN_FDIM
)
1859 CASE_MATHFN (BUILT_IN_FLOOR
)
1860 CASE_MATHFN (BUILT_IN_FMA
)
1861 CASE_MATHFN (BUILT_IN_FMAX
)
1862 CASE_MATHFN (BUILT_IN_FMIN
)
1863 CASE_MATHFN (BUILT_IN_FMOD
)
1864 CASE_MATHFN (BUILT_IN_FREXP
)
1865 CASE_MATHFN (BUILT_IN_GAMMA
)
1866 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1867 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1868 CASE_MATHFN (BUILT_IN_HYPOT
)
1869 CASE_MATHFN (BUILT_IN_ILOGB
)
1870 CASE_MATHFN (BUILT_IN_ICEIL
)
1871 CASE_MATHFN (BUILT_IN_IFLOOR
)
1872 CASE_MATHFN (BUILT_IN_INF
)
1873 CASE_MATHFN (BUILT_IN_IRINT
)
1874 CASE_MATHFN (BUILT_IN_IROUND
)
1875 CASE_MATHFN (BUILT_IN_ISINF
)
1876 CASE_MATHFN (BUILT_IN_J0
)
1877 CASE_MATHFN (BUILT_IN_J1
)
1878 CASE_MATHFN (BUILT_IN_JN
)
1879 CASE_MATHFN (BUILT_IN_LCEIL
)
1880 CASE_MATHFN (BUILT_IN_LDEXP
)
1881 CASE_MATHFN (BUILT_IN_LFLOOR
)
1882 CASE_MATHFN (BUILT_IN_LGAMMA
)
1883 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1884 CASE_MATHFN (BUILT_IN_LLCEIL
)
1885 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1886 CASE_MATHFN (BUILT_IN_LLRINT
)
1887 CASE_MATHFN (BUILT_IN_LLROUND
)
1888 CASE_MATHFN (BUILT_IN_LOG
)
1889 CASE_MATHFN (BUILT_IN_LOG10
)
1890 CASE_MATHFN (BUILT_IN_LOG1P
)
1891 CASE_MATHFN (BUILT_IN_LOG2
)
1892 CASE_MATHFN (BUILT_IN_LOGB
)
1893 CASE_MATHFN (BUILT_IN_LRINT
)
1894 CASE_MATHFN (BUILT_IN_LROUND
)
1895 CASE_MATHFN (BUILT_IN_MODF
)
1896 CASE_MATHFN (BUILT_IN_NAN
)
1897 CASE_MATHFN (BUILT_IN_NANS
)
1898 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1899 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1900 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1901 CASE_MATHFN (BUILT_IN_POW
)
1902 CASE_MATHFN (BUILT_IN_POWI
)
1903 CASE_MATHFN (BUILT_IN_POW10
)
1904 CASE_MATHFN (BUILT_IN_REMAINDER
)
1905 CASE_MATHFN (BUILT_IN_REMQUO
)
1906 CASE_MATHFN (BUILT_IN_RINT
)
1907 CASE_MATHFN (BUILT_IN_ROUND
)
1908 CASE_MATHFN (BUILT_IN_SCALB
)
1909 CASE_MATHFN (BUILT_IN_SCALBLN
)
1910 CASE_MATHFN (BUILT_IN_SCALBN
)
1911 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1912 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1913 CASE_MATHFN (BUILT_IN_SIN
)
1914 CASE_MATHFN (BUILT_IN_SINCOS
)
1915 CASE_MATHFN (BUILT_IN_SINH
)
1916 CASE_MATHFN (BUILT_IN_SQRT
)
1917 CASE_MATHFN (BUILT_IN_TAN
)
1918 CASE_MATHFN (BUILT_IN_TANH
)
1919 CASE_MATHFN (BUILT_IN_TGAMMA
)
1920 CASE_MATHFN (BUILT_IN_TRUNC
)
1921 CASE_MATHFN (BUILT_IN_Y0
)
1922 CASE_MATHFN (BUILT_IN_Y1
)
1923 CASE_MATHFN (BUILT_IN_YN
)
1929 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1931 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1933 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1938 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1941 return builtin_decl_explicit (fcode2
);
1944 /* Like mathfn_built_in_1(), but always use the implicit array. */
1947 mathfn_built_in (tree type
, enum built_in_function fn
)
1949 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1952 /* If errno must be maintained, expand the RTL to check if the result,
1953 TARGET, of a built-in function call, EXP, is NaN, and if so set
1957 expand_errno_check (tree exp
, rtx target
)
1959 rtx_code_label
*lab
= gen_label_rtx ();
1961 /* Test the result; if it is NaN, set errno=EDOM because
1962 the argument was not in the domain. */
1963 do_compare_rtx_and_jump (target
, target
, EQ
, 0, GET_MODE (target
),
1964 NULL_RTX
, NULL
, lab
,
1965 /* The jump is very likely. */
1966 REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1));
1969 /* If this built-in doesn't throw an exception, set errno directly. */
1970 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1972 #ifdef GEN_ERRNO_RTX
1973 rtx errno_rtx
= GEN_ERRNO_RTX
;
1976 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1978 emit_move_insn (errno_rtx
,
1979 gen_int_mode (TARGET_EDOM
, GET_MODE (errno_rtx
)));
1985 /* Make sure the library call isn't expanded as a tail call. */
1986 CALL_EXPR_TAILCALL (exp
) = 0;
1988 /* We can't set errno=EDOM directly; let the library call do it.
1989 Pop the arguments right away in case the call gets deleted. */
1991 expand_call (exp
, target
, 0);
1996 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1997 Return NULL_RTX if a normal call should be emitted rather than expanding
1998 the function in-line. EXP is the expression that is a call to the builtin
1999 function; if convenient, the result should be placed in TARGET.
2000 SUBTARGET may be used as the target for computing one of EXP's operands. */
2003 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
2005 optab builtin_optab
;
2008 tree fndecl
= get_callee_fndecl (exp
);
2010 bool errno_set
= false;
2011 bool try_widening
= false;
2014 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2017 arg
= CALL_EXPR_ARG (exp
, 0);
2019 switch (DECL_FUNCTION_CODE (fndecl
))
2021 CASE_FLT_FN (BUILT_IN_SQRT
):
2022 errno_set
= ! tree_expr_nonnegative_p (arg
);
2023 try_widening
= true;
2024 builtin_optab
= sqrt_optab
;
2026 CASE_FLT_FN (BUILT_IN_EXP
):
2027 errno_set
= true; builtin_optab
= exp_optab
; break;
2028 CASE_FLT_FN (BUILT_IN_EXP10
):
2029 CASE_FLT_FN (BUILT_IN_POW10
):
2030 errno_set
= true; builtin_optab
= exp10_optab
; break;
2031 CASE_FLT_FN (BUILT_IN_EXP2
):
2032 errno_set
= true; builtin_optab
= exp2_optab
; break;
2033 CASE_FLT_FN (BUILT_IN_EXPM1
):
2034 errno_set
= true; builtin_optab
= expm1_optab
; break;
2035 CASE_FLT_FN (BUILT_IN_LOGB
):
2036 errno_set
= true; builtin_optab
= logb_optab
; break;
2037 CASE_FLT_FN (BUILT_IN_LOG
):
2038 errno_set
= true; builtin_optab
= log_optab
; break;
2039 CASE_FLT_FN (BUILT_IN_LOG10
):
2040 errno_set
= true; builtin_optab
= log10_optab
; break;
2041 CASE_FLT_FN (BUILT_IN_LOG2
):
2042 errno_set
= true; builtin_optab
= log2_optab
; break;
2043 CASE_FLT_FN (BUILT_IN_LOG1P
):
2044 errno_set
= true; builtin_optab
= log1p_optab
; break;
2045 CASE_FLT_FN (BUILT_IN_ASIN
):
2046 builtin_optab
= asin_optab
; break;
2047 CASE_FLT_FN (BUILT_IN_ACOS
):
2048 builtin_optab
= acos_optab
; break;
2049 CASE_FLT_FN (BUILT_IN_TAN
):
2050 builtin_optab
= tan_optab
; break;
2051 CASE_FLT_FN (BUILT_IN_ATAN
):
2052 builtin_optab
= atan_optab
; break;
2053 CASE_FLT_FN (BUILT_IN_FLOOR
):
2054 builtin_optab
= floor_optab
; break;
2055 CASE_FLT_FN (BUILT_IN_CEIL
):
2056 builtin_optab
= ceil_optab
; break;
2057 CASE_FLT_FN (BUILT_IN_TRUNC
):
2058 builtin_optab
= btrunc_optab
; break;
2059 CASE_FLT_FN (BUILT_IN_ROUND
):
2060 builtin_optab
= round_optab
; break;
2061 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
2062 builtin_optab
= nearbyint_optab
;
2063 if (flag_trapping_math
)
2065 /* Else fallthrough and expand as rint. */
2066 CASE_FLT_FN (BUILT_IN_RINT
):
2067 builtin_optab
= rint_optab
; break;
2068 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
2069 builtin_optab
= significand_optab
; break;
2074 /* Make a suitable register to place result in. */
2075 mode
= TYPE_MODE (TREE_TYPE (exp
));
2077 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2080 /* Before working hard, check whether the instruction is available, but try
2081 to widen the mode for specific operations. */
2082 if ((optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
2083 || (try_widening
&& !excess_precision_type (TREE_TYPE (exp
))))
2084 && (!errno_set
|| !optimize_insn_for_size_p ()))
2086 rtx result
= gen_reg_rtx (mode
);
2088 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2089 need to expand the argument again. This way, we will not perform
2090 side-effects more the once. */
2091 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2093 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2097 /* Compute into RESULT.
2098 Set RESULT to wherever the result comes back. */
2099 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2104 expand_errno_check (exp
, result
);
2106 /* Output the entire sequence. */
2107 insns
= get_insns ();
2113 /* If we were unable to expand via the builtin, stop the sequence
2114 (without outputting the insns) and call to the library function
2115 with the stabilized argument list. */
2119 return expand_call (exp
, target
, target
== const0_rtx
);
2122 /* Expand a call to the builtin binary math functions (pow and atan2).
2123 Return NULL_RTX if a normal call should be emitted rather than expanding the
2124 function in-line. EXP is the expression that is a call to the builtin
2125 function; if convenient, the result should be placed in TARGET.
2126 SUBTARGET may be used as the target for computing one of EXP's
2130 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2132 optab builtin_optab
;
2133 rtx op0
, op1
, result
;
2135 int op1_type
= REAL_TYPE
;
2136 tree fndecl
= get_callee_fndecl (exp
);
2139 bool errno_set
= true;
2141 switch (DECL_FUNCTION_CODE (fndecl
))
2143 CASE_FLT_FN (BUILT_IN_SCALBN
):
2144 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2145 CASE_FLT_FN (BUILT_IN_LDEXP
):
2146 op1_type
= INTEGER_TYPE
;
2151 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2154 arg0
= CALL_EXPR_ARG (exp
, 0);
2155 arg1
= CALL_EXPR_ARG (exp
, 1);
2157 switch (DECL_FUNCTION_CODE (fndecl
))
2159 CASE_FLT_FN (BUILT_IN_POW
):
2160 builtin_optab
= pow_optab
; break;
2161 CASE_FLT_FN (BUILT_IN_ATAN2
):
2162 builtin_optab
= atan2_optab
; break;
2163 CASE_FLT_FN (BUILT_IN_SCALB
):
2164 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2166 builtin_optab
= scalb_optab
; break;
2167 CASE_FLT_FN (BUILT_IN_SCALBN
):
2168 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2169 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2171 /* Fall through... */
2172 CASE_FLT_FN (BUILT_IN_LDEXP
):
2173 builtin_optab
= ldexp_optab
; break;
2174 CASE_FLT_FN (BUILT_IN_FMOD
):
2175 builtin_optab
= fmod_optab
; break;
2176 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2177 CASE_FLT_FN (BUILT_IN_DREM
):
2178 builtin_optab
= remainder_optab
; break;
2183 /* Make a suitable register to place result in. */
2184 mode
= TYPE_MODE (TREE_TYPE (exp
));
2186 /* Before working hard, check whether the instruction is available. */
2187 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2190 result
= gen_reg_rtx (mode
);
2192 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2195 if (errno_set
&& optimize_insn_for_size_p ())
2198 /* Always stabilize the argument list. */
2199 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2200 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2202 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2203 op1
= expand_normal (arg1
);
2207 /* Compute into RESULT.
2208 Set RESULT to wherever the result comes back. */
2209 result
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2210 result
, 0, OPTAB_DIRECT
);
2212 /* If we were unable to expand via the builtin, stop the sequence
2213 (without outputting the insns) and call to the library function
2214 with the stabilized argument list. */
2218 return expand_call (exp
, target
, target
== const0_rtx
);
2222 expand_errno_check (exp
, result
);
2224 /* Output the entire sequence. */
2225 insns
= get_insns ();
2232 /* Expand a call to the builtin trinary math functions (fma).
2233 Return NULL_RTX if a normal call should be emitted rather than expanding the
2234 function in-line. EXP is the expression that is a call to the builtin
2235 function; if convenient, the result should be placed in TARGET.
2236 SUBTARGET may be used as the target for computing one of EXP's
2240 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2242 optab builtin_optab
;
2243 rtx op0
, op1
, op2
, result
;
2245 tree fndecl
= get_callee_fndecl (exp
);
2246 tree arg0
, arg1
, arg2
;
2249 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2252 arg0
= CALL_EXPR_ARG (exp
, 0);
2253 arg1
= CALL_EXPR_ARG (exp
, 1);
2254 arg2
= CALL_EXPR_ARG (exp
, 2);
2256 switch (DECL_FUNCTION_CODE (fndecl
))
2258 CASE_FLT_FN (BUILT_IN_FMA
):
2259 builtin_optab
= fma_optab
; break;
2264 /* Make a suitable register to place result in. */
2265 mode
= TYPE_MODE (TREE_TYPE (exp
));
2267 /* Before working hard, check whether the instruction is available. */
2268 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2271 result
= gen_reg_rtx (mode
);
2273 /* Always stabilize the argument list. */
2274 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2275 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2276 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2278 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2279 op1
= expand_normal (arg1
);
2280 op2
= expand_normal (arg2
);
2284 /* Compute into RESULT.
2285 Set RESULT to wherever the result comes back. */
2286 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2289 /* If we were unable to expand via the builtin, stop the sequence
2290 (without outputting the insns) and call to the library function
2291 with the stabilized argument list. */
2295 return expand_call (exp
, target
, target
== const0_rtx
);
2298 /* Output the entire sequence. */
2299 insns
= get_insns ();
2306 /* Expand a call to the builtin sin and cos math functions.
2307 Return NULL_RTX if a normal call should be emitted rather than expanding the
2308 function in-line. EXP is the expression that is a call to the builtin
2309 function; if convenient, the result should be placed in TARGET.
2310 SUBTARGET may be used as the target for computing one of EXP's
2314 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2316 optab builtin_optab
;
2319 tree fndecl
= get_callee_fndecl (exp
);
2323 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2326 arg
= CALL_EXPR_ARG (exp
, 0);
2328 switch (DECL_FUNCTION_CODE (fndecl
))
2330 CASE_FLT_FN (BUILT_IN_SIN
):
2331 CASE_FLT_FN (BUILT_IN_COS
):
2332 builtin_optab
= sincos_optab
; break;
2337 /* Make a suitable register to place result in. */
2338 mode
= TYPE_MODE (TREE_TYPE (exp
));
2340 /* Check if sincos insn is available, otherwise fallback
2341 to sin or cos insn. */
2342 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2343 switch (DECL_FUNCTION_CODE (fndecl
))
2345 CASE_FLT_FN (BUILT_IN_SIN
):
2346 builtin_optab
= sin_optab
; break;
2347 CASE_FLT_FN (BUILT_IN_COS
):
2348 builtin_optab
= cos_optab
; break;
2353 /* Before working hard, check whether the instruction is available. */
2354 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2356 rtx result
= gen_reg_rtx (mode
);
2358 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2359 need to expand the argument again. This way, we will not perform
2360 side-effects more the once. */
2361 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2363 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2367 /* Compute into RESULT.
2368 Set RESULT to wherever the result comes back. */
2369 if (builtin_optab
== sincos_optab
)
2373 switch (DECL_FUNCTION_CODE (fndecl
))
2375 CASE_FLT_FN (BUILT_IN_SIN
):
2376 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2378 CASE_FLT_FN (BUILT_IN_COS
):
2379 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2387 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2391 /* Output the entire sequence. */
2392 insns
= get_insns ();
2398 /* If we were unable to expand via the builtin, stop the sequence
2399 (without outputting the insns) and call to the library function
2400 with the stabilized argument list. */
2404 return expand_call (exp
, target
, target
== const0_rtx
);
2407 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2408 return an RTL instruction code that implements the functionality.
2409 If that isn't possible or available return CODE_FOR_nothing. */
2411 static enum insn_code
2412 interclass_mathfn_icode (tree arg
, tree fndecl
)
2414 bool errno_set
= false;
2415 optab builtin_optab
= unknown_optab
;
2418 switch (DECL_FUNCTION_CODE (fndecl
))
2420 CASE_FLT_FN (BUILT_IN_ILOGB
):
2421 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2422 CASE_FLT_FN (BUILT_IN_ISINF
):
2423 builtin_optab
= isinf_optab
; break;
2424 case BUILT_IN_ISNORMAL
:
2425 case BUILT_IN_ISFINITE
:
2426 CASE_FLT_FN (BUILT_IN_FINITE
):
2427 case BUILT_IN_FINITED32
:
2428 case BUILT_IN_FINITED64
:
2429 case BUILT_IN_FINITED128
:
2430 case BUILT_IN_ISINFD32
:
2431 case BUILT_IN_ISINFD64
:
2432 case BUILT_IN_ISINFD128
:
2433 /* These builtins have no optabs (yet). */
2439 /* There's no easy way to detect the case we need to set EDOM. */
2440 if (flag_errno_math
&& errno_set
)
2441 return CODE_FOR_nothing
;
2443 /* Optab mode depends on the mode of the input argument. */
2444 mode
= TYPE_MODE (TREE_TYPE (arg
));
2447 return optab_handler (builtin_optab
, mode
);
2448 return CODE_FOR_nothing
;
2451 /* Expand a call to one of the builtin math functions that operate on
2452 floating point argument and output an integer result (ilogb, isinf,
2454 Return 0 if a normal call should be emitted rather than expanding the
2455 function in-line. EXP is the expression that is a call to the builtin
2456 function; if convenient, the result should be placed in TARGET. */
2459 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2461 enum insn_code icode
= CODE_FOR_nothing
;
2463 tree fndecl
= get_callee_fndecl (exp
);
2467 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2470 arg
= CALL_EXPR_ARG (exp
, 0);
2471 icode
= interclass_mathfn_icode (arg
, fndecl
);
2472 mode
= TYPE_MODE (TREE_TYPE (arg
));
2474 if (icode
!= CODE_FOR_nothing
)
2476 struct expand_operand ops
[1];
2477 rtx_insn
*last
= get_last_insn ();
2478 tree orig_arg
= arg
;
2480 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2481 need to expand the argument again. This way, we will not perform
2482 side-effects more the once. */
2483 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2485 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2487 if (mode
!= GET_MODE (op0
))
2488 op0
= convert_to_mode (mode
, op0
, 0);
2490 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2491 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2492 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2493 return ops
[0].value
;
2495 delete_insns_since (last
);
2496 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2502 /* Expand a call to the builtin sincos math function.
2503 Return NULL_RTX if a normal call should be emitted rather than expanding the
2504 function in-line. EXP is the expression that is a call to the builtin
2508 expand_builtin_sincos (tree exp
)
2510 rtx op0
, op1
, op2
, target1
, target2
;
2512 tree arg
, sinp
, cosp
;
2514 location_t loc
= EXPR_LOCATION (exp
);
2515 tree alias_type
, alias_off
;
2517 if (!validate_arglist (exp
, REAL_TYPE
,
2518 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2521 arg
= CALL_EXPR_ARG (exp
, 0);
2522 sinp
= CALL_EXPR_ARG (exp
, 1);
2523 cosp
= CALL_EXPR_ARG (exp
, 2);
2525 /* Make a suitable register to place result in. */
2526 mode
= TYPE_MODE (TREE_TYPE (arg
));
2528 /* Check if sincos insn is available, otherwise emit the call. */
2529 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2532 target1
= gen_reg_rtx (mode
);
2533 target2
= gen_reg_rtx (mode
);
2535 op0
= expand_normal (arg
);
2536 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2537 alias_off
= build_int_cst (alias_type
, 0);
2538 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2540 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2543 /* Compute into target1 and target2.
2544 Set TARGET to wherever the result comes back. */
2545 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2546 gcc_assert (result
);
2548 /* Move target1 and target2 to the memory locations indicated
2550 emit_move_insn (op1
, target1
);
2551 emit_move_insn (op2
, target2
);
2556 /* Expand a call to the internal cexpi builtin to the sincos math function.
2557 EXP is the expression that is a call to the builtin function; if convenient,
2558 the result should be placed in TARGET. */
2561 expand_builtin_cexpi (tree exp
, rtx target
)
2563 tree fndecl
= get_callee_fndecl (exp
);
2567 location_t loc
= EXPR_LOCATION (exp
);
2569 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2572 arg
= CALL_EXPR_ARG (exp
, 0);
2573 type
= TREE_TYPE (arg
);
2574 mode
= TYPE_MODE (TREE_TYPE (arg
));
2576 /* Try expanding via a sincos optab, fall back to emitting a libcall
2577 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2578 is only generated from sincos, cexp or if we have either of them. */
2579 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2581 op1
= gen_reg_rtx (mode
);
2582 op2
= gen_reg_rtx (mode
);
2584 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2586 /* Compute into op1 and op2. */
2587 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2589 else if (targetm
.libc_has_function (function_sincos
))
2591 tree call
, fn
= NULL_TREE
;
2595 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2596 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2597 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2598 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2599 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2600 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2604 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2605 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2606 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2607 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2608 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2609 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2611 /* Make sure not to fold the sincos call again. */
2612 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2613 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2614 call
, 3, arg
, top1
, top2
));
2618 tree call
, fn
= NULL_TREE
, narg
;
2619 tree ctype
= build_complex_type (type
);
2621 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2622 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2623 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2624 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2625 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2626 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2630 /* If we don't have a decl for cexp create one. This is the
2631 friendliest fallback if the user calls __builtin_cexpi
2632 without full target C99 function support. */
2633 if (fn
== NULL_TREE
)
2636 const char *name
= NULL
;
2638 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2640 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2642 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2645 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2646 fn
= build_fn_decl (name
, fntype
);
2649 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2650 build_real (type
, dconst0
), arg
);
2652 /* Make sure not to fold the cexp call again. */
2653 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2654 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2655 target
, VOIDmode
, EXPAND_NORMAL
);
2658 /* Now build the proper return type. */
2659 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2660 make_tree (TREE_TYPE (arg
), op2
),
2661 make_tree (TREE_TYPE (arg
), op1
)),
2662 target
, VOIDmode
, EXPAND_NORMAL
);
2665 /* Conveniently construct a function call expression. FNDECL names the
2666 function to be called, N is the number of arguments, and the "..."
2667 parameters are the argument expressions. Unlike build_call_exr
2668 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2671 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2674 tree fntype
= TREE_TYPE (fndecl
);
2675 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2678 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2680 SET_EXPR_LOCATION (fn
, loc
);
2684 /* Expand a call to one of the builtin rounding functions gcc defines
2685 as an extension (lfloor and lceil). As these are gcc extensions we
2686 do not need to worry about setting errno to EDOM.
2687 If expanding via optab fails, lower expression to (int)(floor(x)).
2688 EXP is the expression that is a call to the builtin function;
2689 if convenient, the result should be placed in TARGET. */
2692 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2694 convert_optab builtin_optab
;
2697 tree fndecl
= get_callee_fndecl (exp
);
2698 enum built_in_function fallback_fn
;
2699 tree fallback_fndecl
;
2703 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2706 arg
= CALL_EXPR_ARG (exp
, 0);
2708 switch (DECL_FUNCTION_CODE (fndecl
))
2710 CASE_FLT_FN (BUILT_IN_ICEIL
):
2711 CASE_FLT_FN (BUILT_IN_LCEIL
):
2712 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2713 builtin_optab
= lceil_optab
;
2714 fallback_fn
= BUILT_IN_CEIL
;
2717 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2718 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2719 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2720 builtin_optab
= lfloor_optab
;
2721 fallback_fn
= BUILT_IN_FLOOR
;
2728 /* Make a suitable register to place result in. */
2729 mode
= TYPE_MODE (TREE_TYPE (exp
));
2731 target
= gen_reg_rtx (mode
);
2733 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2734 need to expand the argument again. This way, we will not perform
2735 side-effects more the once. */
2736 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2738 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2742 /* Compute into TARGET. */
2743 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2745 /* Output the entire sequence. */
2746 insns
= get_insns ();
2752 /* If we were unable to expand via the builtin, stop the sequence
2753 (without outputting the insns). */
2756 /* Fall back to floating point rounding optab. */
2757 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2759 /* For non-C99 targets we may end up without a fallback fndecl here
2760 if the user called __builtin_lfloor directly. In this case emit
2761 a call to the floor/ceil variants nevertheless. This should result
2762 in the best user experience for not full C99 targets. */
2763 if (fallback_fndecl
== NULL_TREE
)
2766 const char *name
= NULL
;
2768 switch (DECL_FUNCTION_CODE (fndecl
))
2770 case BUILT_IN_ICEIL
:
2771 case BUILT_IN_LCEIL
:
2772 case BUILT_IN_LLCEIL
:
2775 case BUILT_IN_ICEILF
:
2776 case BUILT_IN_LCEILF
:
2777 case BUILT_IN_LLCEILF
:
2780 case BUILT_IN_ICEILL
:
2781 case BUILT_IN_LCEILL
:
2782 case BUILT_IN_LLCEILL
:
2785 case BUILT_IN_IFLOOR
:
2786 case BUILT_IN_LFLOOR
:
2787 case BUILT_IN_LLFLOOR
:
2790 case BUILT_IN_IFLOORF
:
2791 case BUILT_IN_LFLOORF
:
2792 case BUILT_IN_LLFLOORF
:
2795 case BUILT_IN_IFLOORL
:
2796 case BUILT_IN_LFLOORL
:
2797 case BUILT_IN_LLFLOORL
:
2804 fntype
= build_function_type_list (TREE_TYPE (arg
),
2805 TREE_TYPE (arg
), NULL_TREE
);
2806 fallback_fndecl
= build_fn_decl (name
, fntype
);
2809 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2811 tmp
= expand_normal (exp
);
2812 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2814 /* Truncate the result of floating point optab to integer
2815 via expand_fix (). */
2816 target
= gen_reg_rtx (mode
);
2817 expand_fix (target
, tmp
, 0);
2822 /* Expand a call to one of the builtin math functions doing integer
2824 Return 0 if a normal call should be emitted rather than expanding the
2825 function in-line. EXP is the expression that is a call to the builtin
2826 function; if convenient, the result should be placed in TARGET. */
2829 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2831 convert_optab builtin_optab
;
2834 tree fndecl
= get_callee_fndecl (exp
);
2837 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2839 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2842 arg
= CALL_EXPR_ARG (exp
, 0);
2844 switch (DECL_FUNCTION_CODE (fndecl
))
2846 CASE_FLT_FN (BUILT_IN_IRINT
):
2847 fallback_fn
= BUILT_IN_LRINT
;
2849 CASE_FLT_FN (BUILT_IN_LRINT
):
2850 CASE_FLT_FN (BUILT_IN_LLRINT
):
2851 builtin_optab
= lrint_optab
;
2854 CASE_FLT_FN (BUILT_IN_IROUND
):
2855 fallback_fn
= BUILT_IN_LROUND
;
2857 CASE_FLT_FN (BUILT_IN_LROUND
):
2858 CASE_FLT_FN (BUILT_IN_LLROUND
):
2859 builtin_optab
= lround_optab
;
2866 /* There's no easy way to detect the case we need to set EDOM. */
2867 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2870 /* Make a suitable register to place result in. */
2871 mode
= TYPE_MODE (TREE_TYPE (exp
));
2873 /* There's no easy way to detect the case we need to set EDOM. */
2874 if (!flag_errno_math
)
2876 rtx result
= gen_reg_rtx (mode
);
2878 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2879 need to expand the argument again. This way, we will not perform
2880 side-effects more the once. */
2881 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2883 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2887 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2889 /* Output the entire sequence. */
2890 insns
= get_insns ();
2896 /* If we were unable to expand via the builtin, stop the sequence
2897 (without outputting the insns) and call to the library function
2898 with the stabilized argument list. */
2902 if (fallback_fn
!= BUILT_IN_NONE
)
2904 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2905 targets, (int) round (x) should never be transformed into
2906 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2907 a call to lround in the hope that the target provides at least some
2908 C99 functions. This should result in the best user experience for
2909 not full C99 targets. */
2910 tree fallback_fndecl
= mathfn_built_in_1 (TREE_TYPE (arg
),
2913 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2914 fallback_fndecl
, 1, arg
);
2916 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2917 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2918 return convert_to_mode (mode
, target
, 0);
2921 return expand_call (exp
, target
, target
== const0_rtx
);
2924 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2925 a normal call should be emitted rather than expanding the function
2926 in-line. EXP is the expression that is a call to the builtin
2927 function; if convenient, the result should be placed in TARGET. */
2930 expand_builtin_powi (tree exp
, rtx target
)
2937 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2940 arg0
= CALL_EXPR_ARG (exp
, 0);
2941 arg1
= CALL_EXPR_ARG (exp
, 1);
2942 mode
= TYPE_MODE (TREE_TYPE (exp
));
2944 /* Emit a libcall to libgcc. */
2946 /* Mode of the 2nd argument must match that of an int. */
2947 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2949 if (target
== NULL_RTX
)
2950 target
= gen_reg_rtx (mode
);
2952 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2953 if (GET_MODE (op0
) != mode
)
2954 op0
= convert_to_mode (mode
, op0
, 0);
2955 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2956 if (GET_MODE (op1
) != mode2
)
2957 op1
= convert_to_mode (mode2
, op1
, 0);
2959 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2960 target
, LCT_CONST
, mode
, 2,
2961 op0
, mode
, op1
, mode2
);
2966 /* Expand expression EXP which is a call to the strlen builtin. Return
2967 NULL_RTX if we failed the caller should emit a normal call, otherwise
2968 try to get the result in TARGET, if convenient. */
2971 expand_builtin_strlen (tree exp
, rtx target
,
2972 machine_mode target_mode
)
2974 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2978 struct expand_operand ops
[4];
2981 tree src
= CALL_EXPR_ARG (exp
, 0);
2983 rtx_insn
*before_strlen
;
2984 machine_mode insn_mode
= target_mode
;
2985 enum insn_code icode
= CODE_FOR_nothing
;
2988 /* If the length can be computed at compile-time, return it. */
2989 len
= c_strlen (src
, 0);
2991 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2993 /* If the length can be computed at compile-time and is constant
2994 integer, but there are side-effects in src, evaluate
2995 src for side-effects, then return len.
2996 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2997 can be optimized into: i++; x = 3; */
2998 len
= c_strlen (src
, 1);
2999 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3001 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3002 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3005 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
3007 /* If SRC is not a pointer type, don't do this operation inline. */
3011 /* Bail out if we can't compute strlen in the right mode. */
3012 while (insn_mode
!= VOIDmode
)
3014 icode
= optab_handler (strlen_optab
, insn_mode
);
3015 if (icode
!= CODE_FOR_nothing
)
3018 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3020 if (insn_mode
== VOIDmode
)
3023 /* Make a place to hold the source address. We will not expand
3024 the actual source until we are sure that the expansion will
3025 not fail -- there are trees that cannot be expanded twice. */
3026 src_reg
= gen_reg_rtx (Pmode
);
3028 /* Mark the beginning of the strlen sequence so we can emit the
3029 source operand later. */
3030 before_strlen
= get_last_insn ();
3032 create_output_operand (&ops
[0], target
, insn_mode
);
3033 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3034 create_integer_operand (&ops
[2], 0);
3035 create_integer_operand (&ops
[3], align
);
3036 if (!maybe_expand_insn (icode
, 4, ops
))
3039 /* Now that we are assured of success, expand the source. */
3041 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3044 #ifdef POINTERS_EXTEND_UNSIGNED
3045 if (GET_MODE (pat
) != Pmode
)
3046 pat
= convert_to_mode (Pmode
, pat
,
3047 POINTERS_EXTEND_UNSIGNED
);
3049 emit_move_insn (src_reg
, pat
);
3055 emit_insn_after (pat
, before_strlen
);
3057 emit_insn_before (pat
, get_insns ());
3059 /* Return the value in the proper mode for this function. */
3060 if (GET_MODE (ops
[0].value
) == target_mode
)
3061 target
= ops
[0].value
;
3062 else if (target
!= 0)
3063 convert_move (target
, ops
[0].value
, 0);
3065 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3071 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3072 bytes from constant string DATA + OFFSET and return it as target
3076 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3079 const char *str
= (const char *) data
;
3081 gcc_assert (offset
>= 0
3082 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3083 <= strlen (str
) + 1));
3085 return c_readstr (str
+ offset
, mode
);
3088 /* LEN specify length of the block of memcpy/memset operation.
3089 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3090 In some cases we can make very likely guess on max size, then we
3091 set it into PROBABLE_MAX_SIZE. */
3094 determine_block_size (tree len
, rtx len_rtx
,
3095 unsigned HOST_WIDE_INT
*min_size
,
3096 unsigned HOST_WIDE_INT
*max_size
,
3097 unsigned HOST_WIDE_INT
*probable_max_size
)
3099 if (CONST_INT_P (len_rtx
))
3101 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
3107 enum value_range_type range_type
= VR_UNDEFINED
;
3109 /* Determine bounds from the type. */
3110 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
3111 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
3114 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
3115 *probable_max_size
= *max_size
3116 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
3118 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
3120 if (TREE_CODE (len
) == SSA_NAME
)
3121 range_type
= get_range_info (len
, &min
, &max
);
3122 if (range_type
== VR_RANGE
)
3124 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
3125 *min_size
= min
.to_uhwi ();
3126 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
3127 *probable_max_size
= *max_size
= max
.to_uhwi ();
3129 else if (range_type
== VR_ANTI_RANGE
)
3131 /* Anti range 0...N lets us to determine minimal size to N+1. */
3134 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
3135 *min_size
= max
.to_uhwi () + 1;
3143 Produce anti range allowing negative values of N. We still
3144 can use the information and make a guess that N is not negative.
3146 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
3147 *probable_max_size
= min
.to_uhwi () - 1;
3150 gcc_checking_assert (*max_size
<=
3151 (unsigned HOST_WIDE_INT
)
3152 GET_MODE_MASK (GET_MODE (len_rtx
)));
3155 /* Helper function to do the actual work for expand_builtin_memcpy. */
3158 expand_builtin_memcpy_args (tree dest
, tree src
, tree len
, rtx target
, tree exp
)
3160 const char *src_str
;
3161 unsigned int src_align
= get_pointer_alignment (src
);
3162 unsigned int dest_align
= get_pointer_alignment (dest
);
3163 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3164 HOST_WIDE_INT expected_size
= -1;
3165 unsigned int expected_align
= 0;
3166 unsigned HOST_WIDE_INT min_size
;
3167 unsigned HOST_WIDE_INT max_size
;
3168 unsigned HOST_WIDE_INT probable_max_size
;
3170 /* If DEST is not a pointer type, call the normal function. */
3171 if (dest_align
== 0)
3174 /* If either SRC is not a pointer type, don't do this
3175 operation in-line. */
3179 if (currently_expanding_gimple_stmt
)
3180 stringop_block_profile (currently_expanding_gimple_stmt
,
3181 &expected_align
, &expected_size
);
3183 if (expected_align
< dest_align
)
3184 expected_align
= dest_align
;
3185 dest_mem
= get_memory_rtx (dest
, len
);
3186 set_mem_align (dest_mem
, dest_align
);
3187 len_rtx
= expand_normal (len
);
3188 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3189 &probable_max_size
);
3190 src_str
= c_getstr (src
);
3192 /* If SRC is a string constant and block move would be done
3193 by pieces, we can avoid loading the string from memory
3194 and only stored the computed constants. */
3196 && CONST_INT_P (len_rtx
)
3197 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3198 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3199 CONST_CAST (char *, src_str
),
3202 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3203 builtin_memcpy_read_str
,
3204 CONST_CAST (char *, src_str
),
3205 dest_align
, false, 0);
3206 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3207 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3211 src_mem
= get_memory_rtx (src
, len
);
3212 set_mem_align (src_mem
, src_align
);
3214 /* Copy word part most expediently. */
3215 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3216 CALL_EXPR_TAILCALL (exp
)
3217 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3218 expected_align
, expected_size
,
3219 min_size
, max_size
, probable_max_size
);
3223 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3224 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3230 /* Expand a call EXP to the memcpy builtin.
3231 Return NULL_RTX if we failed, the caller should emit a normal call,
3232 otherwise try to get the result in TARGET, if convenient (and in
3233 mode MODE if that's convenient). */
3236 expand_builtin_memcpy (tree exp
, rtx target
)
3238 if (!validate_arglist (exp
,
3239 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3243 tree dest
= CALL_EXPR_ARG (exp
, 0);
3244 tree src
= CALL_EXPR_ARG (exp
, 1);
3245 tree len
= CALL_EXPR_ARG (exp
, 2);
3246 return expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3250 /* Expand an instrumented call EXP to the memcpy builtin.
3251 Return NULL_RTX if we failed, the caller should emit a normal call,
3252 otherwise try to get the result in TARGET, if convenient (and in
3253 mode MODE if that's convenient). */
3256 expand_builtin_memcpy_with_bounds (tree exp
, rtx target
)
3258 if (!validate_arglist (exp
,
3259 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3260 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3261 INTEGER_TYPE
, VOID_TYPE
))
3265 tree dest
= CALL_EXPR_ARG (exp
, 0);
3266 tree src
= CALL_EXPR_ARG (exp
, 2);
3267 tree len
= CALL_EXPR_ARG (exp
, 4);
3268 rtx res
= expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3270 /* Return src bounds with the result. */
3273 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3274 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3275 res
= chkp_join_splitted_slot (res
, bnd
);
3281 /* Expand a call EXP to the mempcpy builtin.
3282 Return NULL_RTX if we failed; the caller should emit a normal call,
3283 otherwise try to get the result in TARGET, if convenient (and in
3284 mode MODE if that's convenient). If ENDP is 0 return the
3285 destination pointer, if ENDP is 1 return the end pointer ala
3286 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3290 expand_builtin_mempcpy (tree exp
, rtx target
, machine_mode mode
)
3292 if (!validate_arglist (exp
,
3293 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3297 tree dest
= CALL_EXPR_ARG (exp
, 0);
3298 tree src
= CALL_EXPR_ARG (exp
, 1);
3299 tree len
= CALL_EXPR_ARG (exp
, 2);
3300 return expand_builtin_mempcpy_args (dest
, src
, len
,
3301 target
, mode
, /*endp=*/ 1,
3306 /* Expand an instrumented call EXP to the mempcpy builtin.
3307 Return NULL_RTX if we failed, the caller should emit a normal call,
3308 otherwise try to get the result in TARGET, if convenient (and in
3309 mode MODE if that's convenient). */
3312 expand_builtin_mempcpy_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3314 if (!validate_arglist (exp
,
3315 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3316 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3317 INTEGER_TYPE
, VOID_TYPE
))
3321 tree dest
= CALL_EXPR_ARG (exp
, 0);
3322 tree src
= CALL_EXPR_ARG (exp
, 2);
3323 tree len
= CALL_EXPR_ARG (exp
, 4);
3324 rtx res
= expand_builtin_mempcpy_args (dest
, src
, len
, target
,
3327 /* Return src bounds with the result. */
3330 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3331 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3332 res
= chkp_join_splitted_slot (res
, bnd
);
3338 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3339 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3340 so that this can also be called without constructing an actual CALL_EXPR.
3341 The other arguments and return value are the same as for
3342 expand_builtin_mempcpy. */
3345 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3346 rtx target
, machine_mode mode
, int endp
,
3349 tree fndecl
= get_callee_fndecl (orig_exp
);
3351 /* If return value is ignored, transform mempcpy into memcpy. */
3352 if (target
== const0_rtx
3353 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3354 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
))
3356 tree fn
= builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
);
3357 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3359 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3361 else if (target
== const0_rtx
3362 && builtin_decl_implicit_p (BUILT_IN_MEMCPY
))
3364 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3365 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3367 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3371 const char *src_str
;
3372 unsigned int src_align
= get_pointer_alignment (src
);
3373 unsigned int dest_align
= get_pointer_alignment (dest
);
3374 rtx dest_mem
, src_mem
, len_rtx
;
3376 /* If either SRC or DEST is not a pointer type, don't do this
3377 operation in-line. */
3378 if (dest_align
== 0 || src_align
== 0)
3381 /* If LEN is not constant, call the normal function. */
3382 if (! tree_fits_uhwi_p (len
))
3385 len_rtx
= expand_normal (len
);
3386 src_str
= c_getstr (src
);
3388 /* If SRC is a string constant and block move would be done
3389 by pieces, we can avoid loading the string from memory
3390 and only stored the computed constants. */
3392 && CONST_INT_P (len_rtx
)
3393 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3394 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3395 CONST_CAST (char *, src_str
),
3398 dest_mem
= get_memory_rtx (dest
, len
);
3399 set_mem_align (dest_mem
, dest_align
);
3400 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3401 builtin_memcpy_read_str
,
3402 CONST_CAST (char *, src_str
),
3403 dest_align
, false, endp
);
3404 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3405 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3409 if (CONST_INT_P (len_rtx
)
3410 && can_move_by_pieces (INTVAL (len_rtx
),
3411 MIN (dest_align
, src_align
)))
3413 dest_mem
= get_memory_rtx (dest
, len
);
3414 set_mem_align (dest_mem
, dest_align
);
3415 src_mem
= get_memory_rtx (src
, len
);
3416 set_mem_align (src_mem
, src_align
);
3417 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3418 MIN (dest_align
, src_align
), endp
);
3419 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3420 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3428 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3429 we failed, the caller should emit a normal call, otherwise try to
3430 get the result in TARGET, if convenient. If ENDP is 0 return the
3431 destination pointer, if ENDP is 1 return the end pointer ala
3432 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3436 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3438 struct expand_operand ops
[3];
3442 if (!targetm
.have_movstr ())
3445 dest_mem
= get_memory_rtx (dest
, NULL
);
3446 src_mem
= get_memory_rtx (src
, NULL
);
3449 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3450 dest_mem
= replace_equiv_address (dest_mem
, target
);
3453 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3454 create_fixed_operand (&ops
[1], dest_mem
);
3455 create_fixed_operand (&ops
[2], src_mem
);
3456 if (!maybe_expand_insn (targetm
.code_for_movstr
, 3, ops
))
3459 if (endp
&& target
!= const0_rtx
)
3461 target
= ops
[0].value
;
3462 /* movstr is supposed to set end to the address of the NUL
3463 terminator. If the caller requested a mempcpy-like return value,
3467 rtx tem
= plus_constant (GET_MODE (target
),
3468 gen_lowpart (GET_MODE (target
), target
), 1);
3469 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3475 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3476 NULL_RTX if we failed the caller should emit a normal call, otherwise
3477 try to get the result in TARGET, if convenient (and in mode MODE if that's
3481 expand_builtin_strcpy (tree exp
, rtx target
)
3483 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3485 tree dest
= CALL_EXPR_ARG (exp
, 0);
3486 tree src
= CALL_EXPR_ARG (exp
, 1);
3487 return expand_builtin_strcpy_args (dest
, src
, target
);
3492 /* Helper function to do the actual work for expand_builtin_strcpy. The
3493 arguments to the builtin_strcpy call DEST and SRC are broken out
3494 so that this can also be called without constructing an actual CALL_EXPR.
3495 The other arguments and return value are the same as for
3496 expand_builtin_strcpy. */
3499 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3501 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3504 /* Expand a call EXP to the stpcpy builtin.
3505 Return NULL_RTX if we failed the caller should emit a normal call,
3506 otherwise try to get the result in TARGET, if convenient (and in
3507 mode MODE if that's convenient). */
3510 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
3513 location_t loc
= EXPR_LOCATION (exp
);
3515 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3518 dst
= CALL_EXPR_ARG (exp
, 0);
3519 src
= CALL_EXPR_ARG (exp
, 1);
3521 /* If return value is ignored, transform stpcpy into strcpy. */
3522 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3524 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3525 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3526 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3533 /* Ensure we get an actual string whose length can be evaluated at
3534 compile-time, not an expression containing a string. This is
3535 because the latter will potentially produce pessimized code
3536 when used to produce the return value. */
3537 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3538 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3540 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3541 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3542 target
, mode
, /*endp=*/2,
3548 if (TREE_CODE (len
) == INTEGER_CST
)
3550 rtx len_rtx
= expand_normal (len
);
3552 if (CONST_INT_P (len_rtx
))
3554 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3560 if (mode
!= VOIDmode
)
3561 target
= gen_reg_rtx (mode
);
3563 target
= gen_reg_rtx (GET_MODE (ret
));
3565 if (GET_MODE (target
) != GET_MODE (ret
))
3566 ret
= gen_lowpart (GET_MODE (target
), ret
);
3568 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3569 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3577 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3581 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3582 bytes from constant string DATA + OFFSET and return it as target
3586 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3589 const char *str
= (const char *) data
;
3591 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3594 return c_readstr (str
+ offset
, mode
);
3597 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3598 NULL_RTX if we failed the caller should emit a normal call. */
3601 expand_builtin_strncpy (tree exp
, rtx target
)
3603 location_t loc
= EXPR_LOCATION (exp
);
3605 if (validate_arglist (exp
,
3606 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3608 tree dest
= CALL_EXPR_ARG (exp
, 0);
3609 tree src
= CALL_EXPR_ARG (exp
, 1);
3610 tree len
= CALL_EXPR_ARG (exp
, 2);
3611 tree slen
= c_strlen (src
, 1);
3613 /* We must be passed a constant len and src parameter. */
3614 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
3617 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3619 /* We're required to pad with trailing zeros if the requested
3620 len is greater than strlen(s2)+1. In that case try to
3621 use store_by_pieces, if it fails, punt. */
3622 if (tree_int_cst_lt (slen
, len
))
3624 unsigned int dest_align
= get_pointer_alignment (dest
);
3625 const char *p
= c_getstr (src
);
3628 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
3629 || !can_store_by_pieces (tree_to_uhwi (len
),
3630 builtin_strncpy_read_str
,
3631 CONST_CAST (char *, p
),
3635 dest_mem
= get_memory_rtx (dest
, len
);
3636 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3637 builtin_strncpy_read_str
,
3638 CONST_CAST (char *, p
), dest_align
, false, 0);
3639 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3640 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3647 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3648 bytes from constant string DATA + OFFSET and return it as target
3652 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3655 const char *c
= (const char *) data
;
3656 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3658 memset (p
, *c
, GET_MODE_SIZE (mode
));
3660 return c_readstr (p
, mode
);
3663 /* Callback routine for store_by_pieces. Return the RTL of a register
3664 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3665 char value given in the RTL register data. For example, if mode is
3666 4 bytes wide, return the RTL for 0x01010101*data. */
3669 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3676 size
= GET_MODE_SIZE (mode
);
3680 p
= XALLOCAVEC (char, size
);
3681 memset (p
, 1, size
);
3682 coeff
= c_readstr (p
, mode
);
3684 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3685 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3686 return force_reg (mode
, target
);
3689 /* Expand expression EXP, which is a call to the memset builtin. Return
3690 NULL_RTX if we failed the caller should emit a normal call, otherwise
3691 try to get the result in TARGET, if convenient (and in mode MODE if that's
3695 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
3697 if (!validate_arglist (exp
,
3698 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3702 tree dest
= CALL_EXPR_ARG (exp
, 0);
3703 tree val
= CALL_EXPR_ARG (exp
, 1);
3704 tree len
= CALL_EXPR_ARG (exp
, 2);
3705 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3709 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3710 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3711 try to get the result in TARGET, if convenient (and in mode MODE if that's
3715 expand_builtin_memset_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3717 if (!validate_arglist (exp
,
3718 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3719 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3723 tree dest
= CALL_EXPR_ARG (exp
, 0);
3724 tree val
= CALL_EXPR_ARG (exp
, 2);
3725 tree len
= CALL_EXPR_ARG (exp
, 3);
3726 rtx res
= expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3728 /* Return src bounds with the result. */
3731 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3732 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3733 res
= chkp_join_splitted_slot (res
, bnd
);
3739 /* Helper function to do the actual work for expand_builtin_memset. The
3740 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3741 so that this can also be called without constructing an actual CALL_EXPR.
3742 The other arguments and return value are the same as for
3743 expand_builtin_memset. */
3746 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3747 rtx target
, machine_mode mode
, tree orig_exp
)
3750 enum built_in_function fcode
;
3751 machine_mode val_mode
;
3753 unsigned int dest_align
;
3754 rtx dest_mem
, dest_addr
, len_rtx
;
3755 HOST_WIDE_INT expected_size
= -1;
3756 unsigned int expected_align
= 0;
3757 unsigned HOST_WIDE_INT min_size
;
3758 unsigned HOST_WIDE_INT max_size
;
3759 unsigned HOST_WIDE_INT probable_max_size
;
3761 dest_align
= get_pointer_alignment (dest
);
3763 /* If DEST is not a pointer type, don't do this operation in-line. */
3764 if (dest_align
== 0)
3767 if (currently_expanding_gimple_stmt
)
3768 stringop_block_profile (currently_expanding_gimple_stmt
,
3769 &expected_align
, &expected_size
);
3771 if (expected_align
< dest_align
)
3772 expected_align
= dest_align
;
3774 /* If the LEN parameter is zero, return DEST. */
3775 if (integer_zerop (len
))
3777 /* Evaluate and ignore VAL in case it has side-effects. */
3778 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3779 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3782 /* Stabilize the arguments in case we fail. */
3783 dest
= builtin_save_expr (dest
);
3784 val
= builtin_save_expr (val
);
3785 len
= builtin_save_expr (len
);
3787 len_rtx
= expand_normal (len
);
3788 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3789 &probable_max_size
);
3790 dest_mem
= get_memory_rtx (dest
, len
);
3791 val_mode
= TYPE_MODE (unsigned_char_type_node
);
3793 if (TREE_CODE (val
) != INTEGER_CST
)
3797 val_rtx
= expand_normal (val
);
3798 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
3800 /* Assume that we can memset by pieces if we can store
3801 * the coefficients by pieces (in the required modes).
3802 * We can't pass builtin_memset_gen_str as that emits RTL. */
3804 if (tree_fits_uhwi_p (len
)
3805 && can_store_by_pieces (tree_to_uhwi (len
),
3806 builtin_memset_read_str
, &c
, dest_align
,
3809 val_rtx
= force_reg (val_mode
, val_rtx
);
3810 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3811 builtin_memset_gen_str
, val_rtx
, dest_align
,
3814 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3815 dest_align
, expected_align
,
3816 expected_size
, min_size
, max_size
,
3820 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3821 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3825 if (target_char_cast (val
, &c
))
3830 if (tree_fits_uhwi_p (len
)
3831 && can_store_by_pieces (tree_to_uhwi (len
),
3832 builtin_memset_read_str
, &c
, dest_align
,
3834 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3835 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3836 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
3837 gen_int_mode (c
, val_mode
),
3838 dest_align
, expected_align
,
3839 expected_size
, min_size
, max_size
,
3843 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3844 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3848 set_mem_align (dest_mem
, dest_align
);
3849 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3850 CALL_EXPR_TAILCALL (orig_exp
)
3851 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3852 expected_align
, expected_size
,
3858 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3859 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3865 fndecl
= get_callee_fndecl (orig_exp
);
3866 fcode
= DECL_FUNCTION_CODE (fndecl
);
3867 if (fcode
== BUILT_IN_MEMSET
3868 || fcode
== BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
)
3869 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
3871 else if (fcode
== BUILT_IN_BZERO
)
3872 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
3876 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3877 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3878 return expand_call (fn
, target
, target
== const0_rtx
);
3881 /* Expand expression EXP, which is a call to the bzero builtin. Return
3882 NULL_RTX if we failed the caller should emit a normal call. */
3885 expand_builtin_bzero (tree exp
)
3888 location_t loc
= EXPR_LOCATION (exp
);
3890 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3893 dest
= CALL_EXPR_ARG (exp
, 0);
3894 size
= CALL_EXPR_ARG (exp
, 1);
3896 /* New argument list transforming bzero(ptr x, int y) to
3897 memset(ptr x, int 0, size_t y). This is done this way
3898 so that if it isn't expanded inline, we fallback to
3899 calling bzero instead of memset. */
3901 return expand_builtin_memset_args (dest
, integer_zero_node
,
3902 fold_convert_loc (loc
,
3903 size_type_node
, size
),
3904 const0_rtx
, VOIDmode
, exp
);
3907 /* Try to expand cmpstr operation ICODE with the given operands.
3908 Return the result rtx on success, otherwise return null. */
3911 expand_cmpstr (insn_code icode
, rtx target
, rtx arg1_rtx
, rtx arg2_rtx
,
3912 HOST_WIDE_INT align
)
3914 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
3916 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
3919 struct expand_operand ops
[4];
3920 create_output_operand (&ops
[0], target
, insn_mode
);
3921 create_fixed_operand (&ops
[1], arg1_rtx
);
3922 create_fixed_operand (&ops
[2], arg2_rtx
);
3923 create_integer_operand (&ops
[3], align
);
3924 if (maybe_expand_insn (icode
, 4, ops
))
3925 return ops
[0].value
;
3929 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3930 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3931 otherwise return null. */
3934 expand_cmpstrn_or_cmpmem (insn_code icode
, rtx target
, rtx arg1_rtx
,
3935 rtx arg2_rtx
, tree arg3_type
, rtx arg3_rtx
,
3936 HOST_WIDE_INT align
)
3938 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
3940 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
3943 struct expand_operand ops
[5];
3944 create_output_operand (&ops
[0], target
, insn_mode
);
3945 create_fixed_operand (&ops
[1], arg1_rtx
);
3946 create_fixed_operand (&ops
[2], arg2_rtx
);
3947 create_convert_operand_from (&ops
[3], arg3_rtx
, TYPE_MODE (arg3_type
),
3948 TYPE_UNSIGNED (arg3_type
));
3949 create_integer_operand (&ops
[4], align
);
3950 if (maybe_expand_insn (icode
, 5, ops
))
3951 return ops
[0].value
;
3955 /* Expand expression EXP, which is a call to the memcmp built-in function.
3956 Return NULL_RTX if we failed and the caller should emit a normal call,
3957 otherwise try to get the result in TARGET, if convenient. */
3960 expand_builtin_memcmp (tree exp
, rtx target
)
3962 if (!validate_arglist (exp
,
3963 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3966 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3967 implementing memcmp because it will stop if it encounters two
3969 insn_code icode
= direct_optab_handler (cmpmem_optab
, SImode
);
3970 if (icode
== CODE_FOR_nothing
)
3973 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3974 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3975 tree len
= CALL_EXPR_ARG (exp
, 2);
3977 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3978 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3980 /* If we don't have POINTER_TYPE, call the function. */
3981 if (arg1_align
== 0 || arg2_align
== 0)
3984 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
3985 location_t loc
= EXPR_LOCATION (exp
);
3986 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
3987 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
3988 rtx arg3_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
3990 /* Set MEM_SIZE as appropriate. */
3991 if (CONST_INT_P (arg3_rtx
))
3993 set_mem_size (arg1_rtx
, INTVAL (arg3_rtx
));
3994 set_mem_size (arg2_rtx
, INTVAL (arg3_rtx
));
3997 rtx result
= expand_cmpstrn_or_cmpmem (icode
, target
, arg1_rtx
, arg2_rtx
,
3998 TREE_TYPE (len
), arg3_rtx
,
3999 MIN (arg1_align
, arg2_align
));
4002 /* Return the value in the proper mode for this function. */
4003 if (GET_MODE (result
) == mode
)
4008 convert_move (target
, result
, 0);
4012 return convert_to_mode (mode
, result
, 0);
4017 && REG_P (result
) && GET_MODE (result
) == mode
4018 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4019 result
= gen_reg_rtx (mode
);
4021 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
4022 TYPE_MODE (integer_type_node
), 3,
4023 XEXP (arg1_rtx
, 0), Pmode
,
4024 XEXP (arg2_rtx
, 0), Pmode
,
4025 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
4026 TYPE_UNSIGNED (sizetype
)),
4027 TYPE_MODE (sizetype
));
4031 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4032 if we failed the caller should emit a normal call, otherwise try to get
4033 the result in TARGET, if convenient. */
4036 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4038 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4041 insn_code cmpstr_icode
= direct_optab_handler (cmpstr_optab
, SImode
);
4042 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4043 if (cmpstr_icode
!= CODE_FOR_nothing
|| cmpstrn_icode
!= CODE_FOR_nothing
)
4045 rtx arg1_rtx
, arg2_rtx
;
4047 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4048 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4049 rtx result
= NULL_RTX
;
4051 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4052 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4054 /* If we don't have POINTER_TYPE, call the function. */
4055 if (arg1_align
== 0 || arg2_align
== 0)
4058 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4059 arg1
= builtin_save_expr (arg1
);
4060 arg2
= builtin_save_expr (arg2
);
4062 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4063 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4065 /* Try to call cmpstrsi. */
4066 if (cmpstr_icode
!= CODE_FOR_nothing
)
4067 result
= expand_cmpstr (cmpstr_icode
, target
, arg1_rtx
, arg2_rtx
,
4068 MIN (arg1_align
, arg2_align
));
4070 /* Try to determine at least one length and call cmpstrnsi. */
4071 if (!result
&& cmpstrn_icode
!= CODE_FOR_nothing
)
4076 tree len1
= c_strlen (arg1
, 1);
4077 tree len2
= c_strlen (arg2
, 1);
4080 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4082 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4084 /* If we don't have a constant length for the first, use the length
4085 of the second, if we know it. We don't require a constant for
4086 this case; some cost analysis could be done if both are available
4087 but neither is constant. For now, assume they're equally cheap,
4088 unless one has side effects. If both strings have constant lengths,
4095 else if (TREE_SIDE_EFFECTS (len1
))
4097 else if (TREE_SIDE_EFFECTS (len2
))
4099 else if (TREE_CODE (len1
) != INTEGER_CST
)
4101 else if (TREE_CODE (len2
) != INTEGER_CST
)
4103 else if (tree_int_cst_lt (len1
, len2
))
4108 /* If both arguments have side effects, we cannot optimize. */
4109 if (len
&& !TREE_SIDE_EFFECTS (len
))
4111 arg3_rtx
= expand_normal (len
);
4112 result
= expand_cmpstrn_or_cmpmem
4113 (cmpstrn_icode
, target
, arg1_rtx
, arg2_rtx
, TREE_TYPE (len
),
4114 arg3_rtx
, MIN (arg1_align
, arg2_align
));
4120 /* Return the value in the proper mode for this function. */
4121 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4122 if (GET_MODE (result
) == mode
)
4125 return convert_to_mode (mode
, result
, 0);
4126 convert_move (target
, result
, 0);
4130 /* Expand the library call ourselves using a stabilized argument
4131 list to avoid re-evaluating the function's arguments twice. */
4132 fndecl
= get_callee_fndecl (exp
);
4133 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4134 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4135 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4136 return expand_call (fn
, target
, target
== const0_rtx
);
4141 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4142 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4143 the result in TARGET, if convenient. */
4146 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4147 ATTRIBUTE_UNUSED machine_mode mode
)
4149 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
4151 if (!validate_arglist (exp
,
4152 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4155 /* If c_strlen can determine an expression for one of the string
4156 lengths, and it doesn't have side effects, then emit cmpstrnsi
4157 using length MIN(strlen(string)+1, arg3). */
4158 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4159 if (cmpstrn_icode
!= CODE_FOR_nothing
)
4161 tree len
, len1
, len2
;
4162 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4165 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4166 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4167 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4169 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4170 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4172 len1
= c_strlen (arg1
, 1);
4173 len2
= c_strlen (arg2
, 1);
4176 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4178 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4180 /* If we don't have a constant length for the first, use the length
4181 of the second, if we know it. We don't require a constant for
4182 this case; some cost analysis could be done if both are available
4183 but neither is constant. For now, assume they're equally cheap,
4184 unless one has side effects. If both strings have constant lengths,
4191 else if (TREE_SIDE_EFFECTS (len1
))
4193 else if (TREE_SIDE_EFFECTS (len2
))
4195 else if (TREE_CODE (len1
) != INTEGER_CST
)
4197 else if (TREE_CODE (len2
) != INTEGER_CST
)
4199 else if (tree_int_cst_lt (len1
, len2
))
4204 /* If both arguments have side effects, we cannot optimize. */
4205 if (!len
|| TREE_SIDE_EFFECTS (len
))
4208 /* The actual new length parameter is MIN(len,arg3). */
4209 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
4210 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
4212 /* If we don't have POINTER_TYPE, call the function. */
4213 if (arg1_align
== 0 || arg2_align
== 0)
4216 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4217 arg1
= builtin_save_expr (arg1
);
4218 arg2
= builtin_save_expr (arg2
);
4219 len
= builtin_save_expr (len
);
4221 arg1_rtx
= get_memory_rtx (arg1
, len
);
4222 arg2_rtx
= get_memory_rtx (arg2
, len
);
4223 arg3_rtx
= expand_normal (len
);
4224 result
= expand_cmpstrn_or_cmpmem (cmpstrn_icode
, target
, arg1_rtx
,
4225 arg2_rtx
, TREE_TYPE (len
), arg3_rtx
,
4226 MIN (arg1_align
, arg2_align
));
4229 /* Return the value in the proper mode for this function. */
4230 mode
= TYPE_MODE (TREE_TYPE (exp
));
4231 if (GET_MODE (result
) == mode
)
4234 return convert_to_mode (mode
, result
, 0);
4235 convert_move (target
, result
, 0);
4239 /* Expand the library call ourselves using a stabilized argument
4240 list to avoid re-evaluating the function's arguments twice. */
4241 fndecl
= get_callee_fndecl (exp
);
4242 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4244 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4245 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4246 return expand_call (fn
, target
, target
== const0_rtx
);
4251 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4252 if that's convenient. */
4255 expand_builtin_saveregs (void)
4260 /* Don't do __builtin_saveregs more than once in a function.
4261 Save the result of the first call and reuse it. */
4262 if (saveregs_value
!= 0)
4263 return saveregs_value
;
4265 /* When this function is called, it means that registers must be
4266 saved on entry to this function. So we migrate the call to the
4267 first insn of this function. */
4271 /* Do whatever the machine needs done in this case. */
4272 val
= targetm
.calls
.expand_builtin_saveregs ();
4277 saveregs_value
= val
;
4279 /* Put the insns after the NOTE that starts the function. If this
4280 is inside a start_sequence, make the outer-level insn chain current, so
4281 the code is placed at the start of the function. */
4282 push_topmost_sequence ();
4283 emit_insn_after (seq
, entry_of_function ());
4284 pop_topmost_sequence ();
4289 /* Expand a call to __builtin_next_arg. */
4292 expand_builtin_next_arg (void)
4294 /* Checking arguments is already done in fold_builtin_next_arg
4295 that must be called before this function. */
4296 return expand_binop (ptr_mode
, add_optab
,
4297 crtl
->args
.internal_arg_pointer
,
4298 crtl
->args
.arg_offset_rtx
,
4299 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4302 /* Make it easier for the backends by protecting the valist argument
4303 from multiple evaluations. */
4306 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4308 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4310 /* The current way of determining the type of valist is completely
4311 bogus. We should have the information on the va builtin instead. */
4313 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4315 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4317 if (TREE_SIDE_EFFECTS (valist
))
4318 valist
= save_expr (valist
);
4320 /* For this case, the backends will be expecting a pointer to
4321 vatype, but it's possible we've actually been given an array
4322 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4324 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4326 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4327 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4332 tree pt
= build_pointer_type (vatype
);
4336 if (! TREE_SIDE_EFFECTS (valist
))
4339 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4340 TREE_SIDE_EFFECTS (valist
) = 1;
4343 if (TREE_SIDE_EFFECTS (valist
))
4344 valist
= save_expr (valist
);
4345 valist
= fold_build2_loc (loc
, MEM_REF
,
4346 vatype
, valist
, build_int_cst (pt
, 0));
4352 /* The "standard" definition of va_list is void*. */
4355 std_build_builtin_va_list (void)
4357 return ptr_type_node
;
4360 /* The "standard" abi va_list is va_list_type_node. */
4363 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4365 return va_list_type_node
;
4368 /* The "standard" type of va_list is va_list_type_node. */
4371 std_canonical_va_list_type (tree type
)
4375 if (INDIRECT_REF_P (type
))
4376 type
= TREE_TYPE (type
);
4377 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE (type
)))
4378 type
= TREE_TYPE (type
);
4379 wtype
= va_list_type_node
;
4381 /* Treat structure va_list types. */
4382 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4383 htype
= TREE_TYPE (htype
);
4384 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4386 /* If va_list is an array type, the argument may have decayed
4387 to a pointer type, e.g. by being passed to another function.
4388 In that case, unwrap both types so that we can compare the
4389 underlying records. */
4390 if (TREE_CODE (htype
) == ARRAY_TYPE
4391 || POINTER_TYPE_P (htype
))
4393 wtype
= TREE_TYPE (wtype
);
4394 htype
= TREE_TYPE (htype
);
4397 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4398 return va_list_type_node
;
4403 /* The "standard" implementation of va_start: just assign `nextarg' to
4407 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4409 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4410 convert_move (va_r
, nextarg
, 0);
4412 /* We do not have any valid bounds for the pointer, so
4413 just store zero bounds for it. */
4414 if (chkp_function_instrumented_p (current_function_decl
))
4415 chkp_expand_bounds_reset_for_mem (valist
,
4416 make_tree (TREE_TYPE (valist
),
4420 /* Expand EXP, a call to __builtin_va_start. */
4423 expand_builtin_va_start (tree exp
)
4427 location_t loc
= EXPR_LOCATION (exp
);
4429 if (call_expr_nargs (exp
) < 2)
4431 error_at (loc
, "too few arguments to function %<va_start%>");
4435 if (fold_builtin_next_arg (exp
, true))
4438 nextarg
= expand_builtin_next_arg ();
4439 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4441 if (targetm
.expand_builtin_va_start
)
4442 targetm
.expand_builtin_va_start (valist
, nextarg
);
4444 std_expand_builtin_va_start (valist
, nextarg
);
4449 /* Expand EXP, a call to __builtin_va_end. */
4452 expand_builtin_va_end (tree exp
)
4454 tree valist
= CALL_EXPR_ARG (exp
, 0);
4456 /* Evaluate for side effects, if needed. I hate macros that don't
4458 if (TREE_SIDE_EFFECTS (valist
))
4459 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4464 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4465 builtin rather than just as an assignment in stdarg.h because of the
4466 nastiness of array-type va_list types. */
4469 expand_builtin_va_copy (tree exp
)
4472 location_t loc
= EXPR_LOCATION (exp
);
4474 dst
= CALL_EXPR_ARG (exp
, 0);
4475 src
= CALL_EXPR_ARG (exp
, 1);
4477 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4478 src
= stabilize_va_list_loc (loc
, src
, 0);
4480 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4482 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4484 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4485 TREE_SIDE_EFFECTS (t
) = 1;
4486 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4490 rtx dstb
, srcb
, size
;
4492 /* Evaluate to pointers. */
4493 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4494 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4495 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4496 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4498 dstb
= convert_memory_address (Pmode
, dstb
);
4499 srcb
= convert_memory_address (Pmode
, srcb
);
4501 /* "Dereference" to BLKmode memories. */
4502 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4503 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4504 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4505 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4506 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4507 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4510 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4516 /* Expand a call to one of the builtin functions __builtin_frame_address or
4517 __builtin_return_address. */
4520 expand_builtin_frame_address (tree fndecl
, tree exp
)
4522 /* The argument must be a nonnegative integer constant.
4523 It counts the number of frames to scan up the stack.
4524 The value is either the frame pointer value or the return
4525 address saved in that frame. */
4526 if (call_expr_nargs (exp
) == 0)
4527 /* Warning about missing arg was already issued. */
4529 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
4531 error ("invalid argument to %qD", fndecl
);
4536 /* Number of frames to scan up the stack. */
4537 unsigned HOST_WIDE_INT count
= tree_to_uhwi (CALL_EXPR_ARG (exp
, 0));
4539 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
), count
);
4541 /* Some ports cannot access arbitrary stack frames. */
4544 warning (0, "unsupported argument to %qD", fndecl
);
4550 /* Warn since no effort is made to ensure that any frame
4551 beyond the current one exists or can be safely reached. */
4552 warning (OPT_Wframe_address
, "calling %qD with "
4553 "a nonzero argument is unsafe", fndecl
);
4556 /* For __builtin_frame_address, return what we've got. */
4557 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4561 && ! CONSTANT_P (tem
))
4562 tem
= copy_addr_to_reg (tem
);
4567 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4568 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4569 is the same as for allocate_dynamic_stack_space. */
4572 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
4578 bool alloca_with_align
= (DECL_FUNCTION_CODE (get_callee_fndecl (exp
))
4579 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4582 = (alloca_with_align
4583 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4584 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4589 /* Compute the argument. */
4590 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4592 /* Compute the alignment. */
4593 align
= (alloca_with_align
4594 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1))
4595 : BIGGEST_ALIGNMENT
);
4597 /* Allocate the desired space. */
4598 result
= allocate_dynamic_stack_space (op0
, 0, align
, cannot_accumulate
);
4599 result
= convert_memory_address (ptr_mode
, result
);
4604 /* Expand a call to bswap builtin in EXP.
4605 Return NULL_RTX if a normal call should be emitted rather than expanding the
4606 function in-line. If convenient, the result should be placed in TARGET.
4607 SUBTARGET may be used as the target for computing one of EXP's operands. */
4610 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
4616 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4619 arg
= CALL_EXPR_ARG (exp
, 0);
4620 op0
= expand_expr (arg
,
4621 subtarget
&& GET_MODE (subtarget
) == target_mode
4622 ? subtarget
: NULL_RTX
,
4623 target_mode
, EXPAND_NORMAL
);
4624 if (GET_MODE (op0
) != target_mode
)
4625 op0
= convert_to_mode (target_mode
, op0
, 1);
4627 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4629 gcc_assert (target
);
4631 return convert_to_mode (target_mode
, target
, 1);
4634 /* Expand a call to a unary builtin in EXP.
4635 Return NULL_RTX if a normal call should be emitted rather than expanding the
4636 function in-line. If convenient, the result should be placed in TARGET.
4637 SUBTARGET may be used as the target for computing one of EXP's operands. */
4640 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
4641 rtx subtarget
, optab op_optab
)
4645 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4648 /* Compute the argument. */
4649 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4651 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4652 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4653 VOIDmode
, EXPAND_NORMAL
);
4654 /* Compute op, into TARGET if possible.
4655 Set TARGET to wherever the result comes back. */
4656 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4657 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4658 gcc_assert (target
);
4660 return convert_to_mode (target_mode
, target
, 0);
4663 /* Expand a call to __builtin_expect. We just return our argument
4664 as the builtin_expect semantic should've been already executed by
4665 tree branch prediction pass. */
4668 expand_builtin_expect (tree exp
, rtx target
)
4672 if (call_expr_nargs (exp
) < 2)
4674 arg
= CALL_EXPR_ARG (exp
, 0);
4676 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4677 /* When guessing was done, the hints should be already stripped away. */
4678 gcc_assert (!flag_guess_branch_prob
4679 || optimize
== 0 || seen_error ());
4683 /* Expand a call to __builtin_assume_aligned. We just return our first
4684 argument as the builtin_assume_aligned semantic should've been already
4688 expand_builtin_assume_aligned (tree exp
, rtx target
)
4690 if (call_expr_nargs (exp
) < 2)
4692 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
4694 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
4695 && (call_expr_nargs (exp
) < 3
4696 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
4701 expand_builtin_trap (void)
4703 if (targetm
.have_trap ())
4705 rtx_insn
*insn
= emit_insn (targetm
.gen_trap ());
4706 /* For trap insns when not accumulating outgoing args force
4707 REG_ARGS_SIZE note to prevent crossjumping of calls with
4708 different args sizes. */
4709 if (!ACCUMULATE_OUTGOING_ARGS
)
4710 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4713 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4717 /* Expand a call to __builtin_unreachable. We do nothing except emit
4718 a barrier saying that control flow will not pass here.
4720 It is the responsibility of the program being compiled to ensure
4721 that control flow does never reach __builtin_unreachable. */
4723 expand_builtin_unreachable (void)
4728 /* Expand EXP, a call to fabs, fabsf or fabsl.
4729 Return NULL_RTX if a normal call should be emitted rather than expanding
4730 the function inline. If convenient, the result should be placed
4731 in TARGET. SUBTARGET may be used as the target for computing
4735 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
4741 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4744 arg
= CALL_EXPR_ARG (exp
, 0);
4745 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
4746 mode
= TYPE_MODE (TREE_TYPE (arg
));
4747 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4748 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4751 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4752 Return NULL is a normal call should be emitted rather than expanding the
4753 function inline. If convenient, the result should be placed in TARGET.
4754 SUBTARGET may be used as the target for computing the operand. */
4757 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
4762 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4765 arg
= CALL_EXPR_ARG (exp
, 0);
4766 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4768 arg
= CALL_EXPR_ARG (exp
, 1);
4769 op1
= expand_normal (arg
);
4771 return expand_copysign (op0
, op1
, target
);
4774 /* Expand a call to __builtin___clear_cache. */
4777 expand_builtin___clear_cache (tree exp
)
4779 if (!targetm
.code_for_clear_cache
)
4781 #ifdef CLEAR_INSN_CACHE
4782 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4783 does something. Just do the default expansion to a call to
4787 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4788 does nothing. There is no need to call it. Do nothing. */
4790 #endif /* CLEAR_INSN_CACHE */
4793 /* We have a "clear_cache" insn, and it will handle everything. */
4795 rtx begin_rtx
, end_rtx
;
4797 /* We must not expand to a library call. If we did, any
4798 fallback library function in libgcc that might contain a call to
4799 __builtin___clear_cache() would recurse infinitely. */
4800 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4802 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4806 if (targetm
.have_clear_cache ())
4808 struct expand_operand ops
[2];
4810 begin
= CALL_EXPR_ARG (exp
, 0);
4811 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4813 end
= CALL_EXPR_ARG (exp
, 1);
4814 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4816 create_address_operand (&ops
[0], begin_rtx
);
4817 create_address_operand (&ops
[1], end_rtx
);
4818 if (maybe_expand_insn (targetm
.code_for_clear_cache
, 2, ops
))
4824 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4827 round_trampoline_addr (rtx tramp
)
4829 rtx temp
, addend
, mask
;
4831 /* If we don't need too much alignment, we'll have been guaranteed
4832 proper alignment by get_trampoline_type. */
4833 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
4836 /* Round address up to desired boundary. */
4837 temp
= gen_reg_rtx (Pmode
);
4838 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
4839 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
4841 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
4842 temp
, 0, OPTAB_LIB_WIDEN
);
4843 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
4844 temp
, 0, OPTAB_LIB_WIDEN
);
4850 expand_builtin_init_trampoline (tree exp
, bool onstack
)
4852 tree t_tramp
, t_func
, t_chain
;
4853 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
4855 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
4856 POINTER_TYPE
, VOID_TYPE
))
4859 t_tramp
= CALL_EXPR_ARG (exp
, 0);
4860 t_func
= CALL_EXPR_ARG (exp
, 1);
4861 t_chain
= CALL_EXPR_ARG (exp
, 2);
4863 r_tramp
= expand_normal (t_tramp
);
4864 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
4865 MEM_NOTRAP_P (m_tramp
) = 1;
4867 /* If ONSTACK, the TRAMP argument should be the address of a field
4868 within the local function's FRAME decl. Either way, let's see if
4869 we can fill in the MEM_ATTRs for this memory. */
4870 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
4871 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
4873 /* Creator of a heap trampoline is responsible for making sure the
4874 address is aligned to at least STACK_BOUNDARY. Normally malloc
4875 will ensure this anyhow. */
4876 tmp
= round_trampoline_addr (r_tramp
);
4879 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
4880 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
4881 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
4884 /* The FUNC argument should be the address of the nested function.
4885 Extract the actual function decl to pass to the hook. */
4886 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
4887 t_func
= TREE_OPERAND (t_func
, 0);
4888 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
4890 r_chain
= expand_normal (t_chain
);
4892 /* Generate insns to initialize the trampoline. */
4893 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
4897 trampolines_created
= 1;
4899 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
4900 "trampoline generated for nested function %qD", t_func
);
4907 expand_builtin_adjust_trampoline (tree exp
)
4911 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4914 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4915 tramp
= round_trampoline_addr (tramp
);
4916 if (targetm
.calls
.trampoline_adjust_address
)
4917 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
4922 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4923 function. The function first checks whether the back end provides
4924 an insn to implement signbit for the respective mode. If not, it
4925 checks whether the floating point format of the value is such that
4926 the sign bit can be extracted. If that is not the case, error out.
4927 EXP is the expression that is a call to the builtin function; if
4928 convenient, the result should be placed in TARGET. */
4930 expand_builtin_signbit (tree exp
, rtx target
)
4932 const struct real_format
*fmt
;
4933 machine_mode fmode
, imode
, rmode
;
4936 enum insn_code icode
;
4938 location_t loc
= EXPR_LOCATION (exp
);
4940 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4943 arg
= CALL_EXPR_ARG (exp
, 0);
4944 fmode
= TYPE_MODE (TREE_TYPE (arg
));
4945 rmode
= TYPE_MODE (TREE_TYPE (exp
));
4946 fmt
= REAL_MODE_FORMAT (fmode
);
4948 arg
= builtin_save_expr (arg
);
4950 /* Expand the argument yielding a RTX expression. */
4951 temp
= expand_normal (arg
);
4953 /* Check if the back end provides an insn that handles signbit for the
4955 icode
= optab_handler (signbit_optab
, fmode
);
4956 if (icode
!= CODE_FOR_nothing
)
4958 rtx_insn
*last
= get_last_insn ();
4959 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
4960 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
4962 delete_insns_since (last
);
4965 /* For floating point formats without a sign bit, implement signbit
4967 bitpos
= fmt
->signbit_ro
;
4970 /* But we can't do this if the format supports signed zero. */
4971 gcc_assert (!fmt
->has_signed_zero
|| !HONOR_SIGNED_ZEROS (fmode
));
4973 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
4974 build_real (TREE_TYPE (arg
), dconst0
));
4975 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4978 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
4980 imode
= int_mode_for_mode (fmode
);
4981 gcc_assert (imode
!= BLKmode
);
4982 temp
= gen_lowpart (imode
, temp
);
4987 /* Handle targets with different FP word orders. */
4988 if (FLOAT_WORDS_BIG_ENDIAN
)
4989 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
4991 word
= bitpos
/ BITS_PER_WORD
;
4992 temp
= operand_subword_force (temp
, word
, fmode
);
4993 bitpos
= bitpos
% BITS_PER_WORD
;
4996 /* Force the intermediate word_mode (or narrower) result into a
4997 register. This avoids attempting to create paradoxical SUBREGs
4998 of floating point modes below. */
4999 temp
= force_reg (imode
, temp
);
5001 /* If the bitpos is within the "result mode" lowpart, the operation
5002 can be implement with a single bitwise AND. Otherwise, we need
5003 a right shift and an AND. */
5005 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5007 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
5009 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5010 temp
= gen_lowpart (rmode
, temp
);
5011 temp
= expand_binop (rmode
, and_optab
, temp
,
5012 immed_wide_int_const (mask
, rmode
),
5013 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5017 /* Perform a logical right shift to place the signbit in the least
5018 significant bit, then truncate the result to the desired mode
5019 and mask just this bit. */
5020 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5021 temp
= gen_lowpart (rmode
, temp
);
5022 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5023 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5029 /* Expand fork or exec calls. TARGET is the desired target of the
5030 call. EXP is the call. FN is the
5031 identificator of the actual function. IGNORE is nonzero if the
5032 value is to be ignored. */
5035 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5040 /* If we are not profiling, just call the function. */
5041 if (!profile_arc_flag
)
5044 /* Otherwise call the wrapper. This should be equivalent for the rest of
5045 compiler, so the code does not diverge, and the wrapper may run the
5046 code necessary for keeping the profiling sane. */
5048 switch (DECL_FUNCTION_CODE (fn
))
5051 id
= get_identifier ("__gcov_fork");
5054 case BUILT_IN_EXECL
:
5055 id
= get_identifier ("__gcov_execl");
5058 case BUILT_IN_EXECV
:
5059 id
= get_identifier ("__gcov_execv");
5062 case BUILT_IN_EXECLP
:
5063 id
= get_identifier ("__gcov_execlp");
5066 case BUILT_IN_EXECLE
:
5067 id
= get_identifier ("__gcov_execle");
5070 case BUILT_IN_EXECVP
:
5071 id
= get_identifier ("__gcov_execvp");
5074 case BUILT_IN_EXECVE
:
5075 id
= get_identifier ("__gcov_execve");
5082 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5083 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5084 DECL_EXTERNAL (decl
) = 1;
5085 TREE_PUBLIC (decl
) = 1;
5086 DECL_ARTIFICIAL (decl
) = 1;
5087 TREE_NOTHROW (decl
) = 1;
5088 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5089 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5090 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5091 return expand_call (call
, target
, ignore
);
5096 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5097 the pointer in these functions is void*, the tree optimizers may remove
5098 casts. The mode computed in expand_builtin isn't reliable either, due
5099 to __sync_bool_compare_and_swap.
5101 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5102 group of builtins. This gives us log2 of the mode size. */
5104 static inline machine_mode
5105 get_builtin_sync_mode (int fcode_diff
)
5107 /* The size is not negotiable, so ask not to get BLKmode in return
5108 if the target indicates that a smaller size would be better. */
5109 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5112 /* Expand the memory expression LOC and return the appropriate memory operand
5113 for the builtin_sync operations. */
5116 get_builtin_sync_mem (tree loc
, machine_mode mode
)
5120 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5121 addr
= convert_memory_address (Pmode
, addr
);
5123 /* Note that we explicitly do not want any alias information for this
5124 memory, so that we kill all other live memories. Otherwise we don't
5125 satisfy the full barrier semantics of the intrinsic. */
5126 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5128 /* The alignment needs to be at least according to that of the mode. */
5129 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5130 get_pointer_alignment (loc
)));
5131 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5132 MEM_VOLATILE_P (mem
) = 1;
5137 /* Make sure an argument is in the right mode.
5138 EXP is the tree argument.
5139 MODE is the mode it should be in. */
5142 expand_expr_force_mode (tree exp
, machine_mode mode
)
5145 machine_mode old_mode
;
5147 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5148 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5149 of CONST_INTs, where we know the old_mode only from the call argument. */
5151 old_mode
= GET_MODE (val
);
5152 if (old_mode
== VOIDmode
)
5153 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5154 val
= convert_modes (mode
, old_mode
, val
, 1);
5159 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5160 EXP is the CALL_EXPR. CODE is the rtx code
5161 that corresponds to the arithmetic or logical operation from the name;
5162 an exception here is that NOT actually means NAND. TARGET is an optional
5163 place for us to store the results; AFTER is true if this is the
5164 fetch_and_xxx form. */
5167 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
5168 enum rtx_code code
, bool after
,
5172 location_t loc
= EXPR_LOCATION (exp
);
5174 if (code
== NOT
&& warn_sync_nand
)
5176 tree fndecl
= get_callee_fndecl (exp
);
5177 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5179 static bool warned_f_a_n
, warned_n_a_f
;
5183 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5184 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5185 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5186 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5187 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5191 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5192 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5193 warned_f_a_n
= true;
5196 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5197 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5198 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5199 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5200 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5204 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5205 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5206 warned_n_a_f
= true;
5214 /* Expand the operands. */
5215 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5216 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5218 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
5222 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5223 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5224 true if this is the boolean form. TARGET is a place for us to store the
5225 results; this is NOT optional if IS_BOOL is true. */
5228 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
5229 bool is_bool
, rtx target
)
5231 rtx old_val
, new_val
, mem
;
5234 /* Expand the operands. */
5235 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5236 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5237 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5239 pbool
= poval
= NULL
;
5240 if (target
!= const0_rtx
)
5247 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5248 false, MEMMODEL_SYNC_SEQ_CST
,
5249 MEMMODEL_SYNC_SEQ_CST
))
5255 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5256 general form is actually an atomic exchange, and some targets only
5257 support a reduced form with the second argument being a constant 1.
5258 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5262 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
5267 /* Expand the operands. */
5268 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5269 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5271 return expand_sync_lock_test_and_set (target
, mem
, val
);
5274 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5277 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
5281 /* Expand the operands. */
5282 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5284 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
5287 /* Given an integer representing an ``enum memmodel'', verify its
5288 correctness and return the memory model enum. */
5290 static enum memmodel
5291 get_memmodel (tree exp
)
5294 unsigned HOST_WIDE_INT val
;
5296 /* If the parameter is not a constant, it's a run time value so we'll just
5297 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5298 if (TREE_CODE (exp
) != INTEGER_CST
)
5299 return MEMMODEL_SEQ_CST
;
5301 op
= expand_normal (exp
);
5304 if (targetm
.memmodel_check
)
5305 val
= targetm
.memmodel_check (val
);
5306 else if (val
& ~MEMMODEL_MASK
)
5308 warning (OPT_Winvalid_memory_model
,
5309 "Unknown architecture specifier in memory model to builtin.");
5310 return MEMMODEL_SEQ_CST
;
5313 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5314 if (memmodel_base (val
) >= MEMMODEL_LAST
)
5316 warning (OPT_Winvalid_memory_model
,
5317 "invalid memory model argument to builtin");
5318 return MEMMODEL_SEQ_CST
;
5321 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5322 be conservative and promote consume to acquire. */
5323 if (val
== MEMMODEL_CONSUME
)
5324 val
= MEMMODEL_ACQUIRE
;
5326 return (enum memmodel
) val
;
5329 /* Expand the __atomic_exchange intrinsic:
5330 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5331 EXP is the CALL_EXPR.
5332 TARGET is an optional place for us to store the results. */
5335 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
5338 enum memmodel model
;
5340 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5342 if (!flag_inline_atomics
)
5345 /* Expand the operands. */
5346 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5347 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5349 return expand_atomic_exchange (target
, mem
, val
, model
);
5352 /* Expand the __atomic_compare_exchange intrinsic:
5353 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5354 TYPE desired, BOOL weak,
5355 enum memmodel success,
5356 enum memmodel failure)
5357 EXP is the CALL_EXPR.
5358 TARGET is an optional place for us to store the results. */
5361 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
5364 rtx expect
, desired
, mem
, oldval
;
5365 rtx_code_label
*label
;
5366 enum memmodel success
, failure
;
5370 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5371 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5373 if (failure
> success
)
5375 warning (OPT_Winvalid_memory_model
,
5376 "failure memory model cannot be stronger than success memory "
5377 "model for %<__atomic_compare_exchange%>");
5378 success
= MEMMODEL_SEQ_CST
;
5381 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
5383 warning (OPT_Winvalid_memory_model
,
5384 "invalid failure memory model for "
5385 "%<__atomic_compare_exchange%>");
5386 failure
= MEMMODEL_SEQ_CST
;
5387 success
= MEMMODEL_SEQ_CST
;
5391 if (!flag_inline_atomics
)
5394 /* Expand the operands. */
5395 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5397 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5398 expect
= convert_memory_address (Pmode
, expect
);
5399 expect
= gen_rtx_MEM (mode
, expect
);
5400 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5402 weak
= CALL_EXPR_ARG (exp
, 3);
5404 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5407 if (target
== const0_rtx
)
5410 /* Lest the rtl backend create a race condition with an imporoper store
5411 to memory, always create a new pseudo for OLDVAL. */
5414 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
5415 is_weak
, success
, failure
))
5418 /* Conditionally store back to EXPECT, lest we create a race condition
5419 with an improper store to memory. */
5420 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5421 the normal case where EXPECT is totally private, i.e. a register. At
5422 which point the store can be unconditional. */
5423 label
= gen_label_rtx ();
5424 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
,
5425 GET_MODE (target
), 1, label
);
5426 emit_move_insn (expect
, oldval
);
5432 /* Expand the __atomic_load intrinsic:
5433 TYPE __atomic_load (TYPE *object, enum memmodel)
5434 EXP is the CALL_EXPR.
5435 TARGET is an optional place for us to store the results. */
5438 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
5441 enum memmodel model
;
5443 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5444 if (is_mm_release (model
) || is_mm_acq_rel (model
))
5446 warning (OPT_Winvalid_memory_model
,
5447 "invalid memory model for %<__atomic_load%>");
5448 model
= MEMMODEL_SEQ_CST
;
5451 if (!flag_inline_atomics
)
5454 /* Expand the operand. */
5455 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5457 return expand_atomic_load (target
, mem
, model
);
5461 /* Expand the __atomic_store intrinsic:
5462 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5463 EXP is the CALL_EXPR.
5464 TARGET is an optional place for us to store the results. */
5467 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
5470 enum memmodel model
;
5472 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5473 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
5474 || is_mm_release (model
)))
5476 warning (OPT_Winvalid_memory_model
,
5477 "invalid memory model for %<__atomic_store%>");
5478 model
= MEMMODEL_SEQ_CST
;
5481 if (!flag_inline_atomics
)
5484 /* Expand the operands. */
5485 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5486 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5488 return expand_atomic_store (mem
, val
, model
, false);
5491 /* Expand the __atomic_fetch_XXX intrinsic:
5492 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5493 EXP is the CALL_EXPR.
5494 TARGET is an optional place for us to store the results.
5495 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5496 FETCH_AFTER is true if returning the result of the operation.
5497 FETCH_AFTER is false if returning the value before the operation.
5498 IGNORE is true if the result is not used.
5499 EXT_CALL is the correct builtin for an external call if this cannot be
5500 resolved to an instruction sequence. */
5503 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
5504 enum rtx_code code
, bool fetch_after
,
5505 bool ignore
, enum built_in_function ext_call
)
5508 enum memmodel model
;
5512 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5514 /* Expand the operands. */
5515 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5516 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5518 /* Only try generating instructions if inlining is turned on. */
5519 if (flag_inline_atomics
)
5521 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
5526 /* Return if a different routine isn't needed for the library call. */
5527 if (ext_call
== BUILT_IN_NONE
)
5530 /* Change the call to the specified function. */
5531 fndecl
= get_callee_fndecl (exp
);
5532 addr
= CALL_EXPR_FN (exp
);
5535 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
5536 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
5538 /* Expand the call here so we can emit trailing code. */
5539 ret
= expand_call (exp
, target
, ignore
);
5541 /* Replace the original function just in case it matters. */
5542 TREE_OPERAND (addr
, 0) = fndecl
;
5544 /* Then issue the arithmetic correction to return the right result. */
5549 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
5551 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
5554 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
5560 /* Expand an atomic clear operation.
5561 void _atomic_clear (BOOL *obj, enum memmodel)
5562 EXP is the call expression. */
5565 expand_builtin_atomic_clear (tree exp
)
5569 enum memmodel model
;
5571 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5572 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5573 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5575 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
5577 warning (OPT_Winvalid_memory_model
,
5578 "invalid memory model for %<__atomic_store%>");
5579 model
= MEMMODEL_SEQ_CST
;
5582 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5583 Failing that, a store is issued by __atomic_store. The only way this can
5584 fail is if the bool type is larger than a word size. Unlikely, but
5585 handle it anyway for completeness. Assume a single threaded model since
5586 there is no atomic support in this case, and no barriers are required. */
5587 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
5589 emit_move_insn (mem
, const0_rtx
);
5593 /* Expand an atomic test_and_set operation.
5594 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5595 EXP is the call expression. */
5598 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
5601 enum memmodel model
;
5604 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5605 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5606 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5608 return expand_atomic_test_and_set (target
, mem
, model
);
5612 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5613 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5616 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
5620 unsigned int mode_align
, type_align
;
5622 if (TREE_CODE (arg0
) != INTEGER_CST
)
5625 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
5626 mode
= mode_for_size (size
, MODE_INT
, 0);
5627 mode_align
= GET_MODE_ALIGNMENT (mode
);
5629 if (TREE_CODE (arg1
) == INTEGER_CST
)
5631 unsigned HOST_WIDE_INT val
= UINTVAL (expand_normal (arg1
));
5633 /* Either this argument is null, or it's a fake pointer encoding
5634 the alignment of the object. */
5636 val
*= BITS_PER_UNIT
;
5638 if (val
== 0 || mode_align
< val
)
5639 type_align
= mode_align
;
5645 tree ttype
= TREE_TYPE (arg1
);
5647 /* This function is usually invoked and folded immediately by the front
5648 end before anything else has a chance to look at it. The pointer
5649 parameter at this point is usually cast to a void *, so check for that
5650 and look past the cast. */
5651 if (CONVERT_EXPR_P (arg1
) && POINTER_TYPE_P (ttype
)
5652 && VOID_TYPE_P (TREE_TYPE (ttype
)))
5653 arg1
= TREE_OPERAND (arg1
, 0);
5655 ttype
= TREE_TYPE (arg1
);
5656 gcc_assert (POINTER_TYPE_P (ttype
));
5658 /* Get the underlying type of the object. */
5659 ttype
= TREE_TYPE (ttype
);
5660 type_align
= TYPE_ALIGN (ttype
);
5663 /* If the object has smaller alignment, the lock free routines cannot
5665 if (type_align
< mode_align
)
5666 return boolean_false_node
;
5668 /* Check if a compare_and_swap pattern exists for the mode which represents
5669 the required size. The pattern is not allowed to fail, so the existence
5670 of the pattern indicates support is present. */
5671 if (can_compare_and_swap_p (mode
, true))
5672 return boolean_true_node
;
5674 return boolean_false_node
;
5677 /* Return true if the parameters to call EXP represent an object which will
5678 always generate lock free instructions. The first argument represents the
5679 size of the object, and the second parameter is a pointer to the object
5680 itself. If NULL is passed for the object, then the result is based on
5681 typical alignment for an object of the specified size. Otherwise return
5685 expand_builtin_atomic_always_lock_free (tree exp
)
5688 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5689 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5691 if (TREE_CODE (arg0
) != INTEGER_CST
)
5693 error ("non-constant argument 1 to __atomic_always_lock_free");
5697 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
5698 if (size
== boolean_true_node
)
5703 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5704 is lock free on this architecture. */
5707 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
5709 if (!flag_inline_atomics
)
5712 /* If it isn't always lock free, don't generate a result. */
5713 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
5714 return boolean_true_node
;
5719 /* Return true if the parameters to call EXP represent an object which will
5720 always generate lock free instructions. The first argument represents the
5721 size of the object, and the second parameter is a pointer to the object
5722 itself. If NULL is passed for the object, then the result is based on
5723 typical alignment for an object of the specified size. Otherwise return
5727 expand_builtin_atomic_is_lock_free (tree exp
)
5730 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5731 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5733 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5735 error ("non-integer argument 1 to __atomic_is_lock_free");
5739 if (!flag_inline_atomics
)
5742 /* If the value is known at compile time, return the RTX for it. */
5743 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
5744 if (size
== boolean_true_node
)
5750 /* Expand the __atomic_thread_fence intrinsic:
5751 void __atomic_thread_fence (enum memmodel)
5752 EXP is the CALL_EXPR. */
5755 expand_builtin_atomic_thread_fence (tree exp
)
5757 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5758 expand_mem_thread_fence (model
);
5761 /* Expand the __atomic_signal_fence intrinsic:
5762 void __atomic_signal_fence (enum memmodel)
5763 EXP is the CALL_EXPR. */
5766 expand_builtin_atomic_signal_fence (tree exp
)
5768 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5769 expand_mem_signal_fence (model
);
5772 /* Expand the __sync_synchronize intrinsic. */
5775 expand_builtin_sync_synchronize (void)
5777 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
5781 expand_builtin_thread_pointer (tree exp
, rtx target
)
5783 enum insn_code icode
;
5784 if (!validate_arglist (exp
, VOID_TYPE
))
5786 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
5787 if (icode
!= CODE_FOR_nothing
)
5789 struct expand_operand op
;
5790 /* If the target is not sutitable then create a new target. */
5791 if (target
== NULL_RTX
5793 || GET_MODE (target
) != Pmode
)
5794 target
= gen_reg_rtx (Pmode
);
5795 create_output_operand (&op
, target
, Pmode
);
5796 expand_insn (icode
, 1, &op
);
5799 error ("__builtin_thread_pointer is not supported on this target");
5804 expand_builtin_set_thread_pointer (tree exp
)
5806 enum insn_code icode
;
5807 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5809 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
5810 if (icode
!= CODE_FOR_nothing
)
5812 struct expand_operand op
;
5813 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
5814 Pmode
, EXPAND_NORMAL
);
5815 create_input_operand (&op
, val
, Pmode
);
5816 expand_insn (icode
, 1, &op
);
5819 error ("__builtin_set_thread_pointer is not supported on this target");
5823 /* Emit code to restore the current value of stack. */
5826 expand_stack_restore (tree var
)
5829 rtx sa
= expand_normal (var
);
5831 sa
= convert_memory_address (Pmode
, sa
);
5833 prev
= get_last_insn ();
5834 emit_stack_restore (SAVE_BLOCK
, sa
);
5836 record_new_stack_level ();
5838 fixup_args_size_notes (prev
, get_last_insn (), 0);
5841 /* Emit code to save the current value of stack. */
5844 expand_stack_save (void)
5848 emit_stack_save (SAVE_BLOCK
, &ret
);
5853 /* Expand an expression EXP that calls a built-in function,
5854 with result going to TARGET if that's convenient
5855 (and in mode MODE if that's convenient).
5856 SUBTARGET may be used as the target for computing one of EXP's operands.
5857 IGNORE is nonzero if the value is to be ignored. */
5860 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
5863 tree fndecl
= get_callee_fndecl (exp
);
5864 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5865 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5868 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5869 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5871 /* When ASan is enabled, we don't want to expand some memory/string
5872 builtins and rely on libsanitizer's hooks. This allows us to avoid
5873 redundant checks and be sure, that possible overflow will be detected
5876 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
5877 return expand_call (exp
, target
, ignore
);
5879 /* When not optimizing, generate calls to library functions for a certain
5882 && !called_as_built_in (fndecl
)
5883 && fcode
!= BUILT_IN_FORK
5884 && fcode
!= BUILT_IN_EXECL
5885 && fcode
!= BUILT_IN_EXECV
5886 && fcode
!= BUILT_IN_EXECLP
5887 && fcode
!= BUILT_IN_EXECLE
5888 && fcode
!= BUILT_IN_EXECVP
5889 && fcode
!= BUILT_IN_EXECVE
5890 && fcode
!= BUILT_IN_ALLOCA
5891 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
5892 && fcode
!= BUILT_IN_FREE
5893 && fcode
!= BUILT_IN_CHKP_SET_PTR_BOUNDS
5894 && fcode
!= BUILT_IN_CHKP_INIT_PTR_BOUNDS
5895 && fcode
!= BUILT_IN_CHKP_NULL_PTR_BOUNDS
5896 && fcode
!= BUILT_IN_CHKP_COPY_PTR_BOUNDS
5897 && fcode
!= BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5898 && fcode
!= BUILT_IN_CHKP_STORE_PTR_BOUNDS
5899 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5900 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5901 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5902 && fcode
!= BUILT_IN_CHKP_GET_PTR_LBOUND
5903 && fcode
!= BUILT_IN_CHKP_GET_PTR_UBOUND
5904 && fcode
!= BUILT_IN_CHKP_BNDRET
)
5905 return expand_call (exp
, target
, ignore
);
5907 /* The built-in function expanders test for target == const0_rtx
5908 to determine whether the function's result will be ignored. */
5910 target
= const0_rtx
;
5912 /* If the result of a pure or const built-in function is ignored, and
5913 none of its arguments are volatile, we can avoid expanding the
5914 built-in call and just evaluate the arguments for side-effects. */
5915 if (target
== const0_rtx
5916 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
5917 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
5919 bool volatilep
= false;
5921 call_expr_arg_iterator iter
;
5923 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5924 if (TREE_THIS_VOLATILE (arg
))
5932 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5933 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5938 /* expand_builtin_with_bounds is supposed to be used for
5939 instrumented builtin calls. */
5940 gcc_assert (!CALL_WITH_BOUNDS_P (exp
));
5944 CASE_FLT_FN (BUILT_IN_FABS
):
5945 case BUILT_IN_FABSD32
:
5946 case BUILT_IN_FABSD64
:
5947 case BUILT_IN_FABSD128
:
5948 target
= expand_builtin_fabs (exp
, target
, subtarget
);
5953 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
5954 target
= expand_builtin_copysign (exp
, target
, subtarget
);
5959 /* Just do a normal library call if we were unable to fold
5961 CASE_FLT_FN (BUILT_IN_CABS
):
5964 CASE_FLT_FN (BUILT_IN_EXP
):
5965 CASE_FLT_FN (BUILT_IN_EXP10
):
5966 CASE_FLT_FN (BUILT_IN_POW10
):
5967 CASE_FLT_FN (BUILT_IN_EXP2
):
5968 CASE_FLT_FN (BUILT_IN_EXPM1
):
5969 CASE_FLT_FN (BUILT_IN_LOGB
):
5970 CASE_FLT_FN (BUILT_IN_LOG
):
5971 CASE_FLT_FN (BUILT_IN_LOG10
):
5972 CASE_FLT_FN (BUILT_IN_LOG2
):
5973 CASE_FLT_FN (BUILT_IN_LOG1P
):
5974 CASE_FLT_FN (BUILT_IN_TAN
):
5975 CASE_FLT_FN (BUILT_IN_ASIN
):
5976 CASE_FLT_FN (BUILT_IN_ACOS
):
5977 CASE_FLT_FN (BUILT_IN_ATAN
):
5978 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
5979 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5980 because of possible accuracy problems. */
5981 if (! flag_unsafe_math_optimizations
)
5983 CASE_FLT_FN (BUILT_IN_SQRT
):
5984 CASE_FLT_FN (BUILT_IN_FLOOR
):
5985 CASE_FLT_FN (BUILT_IN_CEIL
):
5986 CASE_FLT_FN (BUILT_IN_TRUNC
):
5987 CASE_FLT_FN (BUILT_IN_ROUND
):
5988 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
5989 CASE_FLT_FN (BUILT_IN_RINT
):
5990 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
5995 CASE_FLT_FN (BUILT_IN_FMA
):
5996 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
6001 CASE_FLT_FN (BUILT_IN_ILOGB
):
6002 if (! flag_unsafe_math_optimizations
)
6004 CASE_FLT_FN (BUILT_IN_ISINF
):
6005 CASE_FLT_FN (BUILT_IN_FINITE
):
6006 case BUILT_IN_ISFINITE
:
6007 case BUILT_IN_ISNORMAL
:
6008 target
= expand_builtin_interclass_mathfn (exp
, target
);
6013 CASE_FLT_FN (BUILT_IN_ICEIL
):
6014 CASE_FLT_FN (BUILT_IN_LCEIL
):
6015 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6016 CASE_FLT_FN (BUILT_IN_LFLOOR
):
6017 CASE_FLT_FN (BUILT_IN_IFLOOR
):
6018 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6019 target
= expand_builtin_int_roundingfn (exp
, target
);
6024 CASE_FLT_FN (BUILT_IN_IRINT
):
6025 CASE_FLT_FN (BUILT_IN_LRINT
):
6026 CASE_FLT_FN (BUILT_IN_LLRINT
):
6027 CASE_FLT_FN (BUILT_IN_IROUND
):
6028 CASE_FLT_FN (BUILT_IN_LROUND
):
6029 CASE_FLT_FN (BUILT_IN_LLROUND
):
6030 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
6035 CASE_FLT_FN (BUILT_IN_POWI
):
6036 target
= expand_builtin_powi (exp
, target
);
6041 CASE_FLT_FN (BUILT_IN_ATAN2
):
6042 CASE_FLT_FN (BUILT_IN_LDEXP
):
6043 CASE_FLT_FN (BUILT_IN_SCALB
):
6044 CASE_FLT_FN (BUILT_IN_SCALBN
):
6045 CASE_FLT_FN (BUILT_IN_SCALBLN
):
6046 if (! flag_unsafe_math_optimizations
)
6049 CASE_FLT_FN (BUILT_IN_FMOD
):
6050 CASE_FLT_FN (BUILT_IN_REMAINDER
):
6051 CASE_FLT_FN (BUILT_IN_DREM
):
6052 CASE_FLT_FN (BUILT_IN_POW
):
6053 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
6058 CASE_FLT_FN (BUILT_IN_CEXPI
):
6059 target
= expand_builtin_cexpi (exp
, target
);
6060 gcc_assert (target
);
6063 CASE_FLT_FN (BUILT_IN_SIN
):
6064 CASE_FLT_FN (BUILT_IN_COS
):
6065 if (! flag_unsafe_math_optimizations
)
6067 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6072 CASE_FLT_FN (BUILT_IN_SINCOS
):
6073 if (! flag_unsafe_math_optimizations
)
6075 target
= expand_builtin_sincos (exp
);
6080 case BUILT_IN_APPLY_ARGS
:
6081 return expand_builtin_apply_args ();
6083 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6084 FUNCTION with a copy of the parameters described by
6085 ARGUMENTS, and ARGSIZE. It returns a block of memory
6086 allocated on the stack into which is stored all the registers
6087 that might possibly be used for returning the result of a
6088 function. ARGUMENTS is the value returned by
6089 __builtin_apply_args. ARGSIZE is the number of bytes of
6090 arguments that must be copied. ??? How should this value be
6091 computed? We'll also need a safe worst case value for varargs
6093 case BUILT_IN_APPLY
:
6094 if (!validate_arglist (exp
, POINTER_TYPE
,
6095 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6096 && !validate_arglist (exp
, REFERENCE_TYPE
,
6097 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6103 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6104 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6105 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6107 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6110 /* __builtin_return (RESULT) causes the function to return the
6111 value described by RESULT. RESULT is address of the block of
6112 memory returned by __builtin_apply. */
6113 case BUILT_IN_RETURN
:
6114 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6115 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6118 case BUILT_IN_SAVEREGS
:
6119 return expand_builtin_saveregs ();
6121 case BUILT_IN_VA_ARG_PACK
:
6122 /* All valid uses of __builtin_va_arg_pack () are removed during
6124 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6127 case BUILT_IN_VA_ARG_PACK_LEN
:
6128 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6130 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6133 /* Return the address of the first anonymous stack arg. */
6134 case BUILT_IN_NEXT_ARG
:
6135 if (fold_builtin_next_arg (exp
, false))
6137 return expand_builtin_next_arg ();
6139 case BUILT_IN_CLEAR_CACHE
:
6140 target
= expand_builtin___clear_cache (exp
);
6145 case BUILT_IN_CLASSIFY_TYPE
:
6146 return expand_builtin_classify_type (exp
);
6148 case BUILT_IN_CONSTANT_P
:
6151 case BUILT_IN_FRAME_ADDRESS
:
6152 case BUILT_IN_RETURN_ADDRESS
:
6153 return expand_builtin_frame_address (fndecl
, exp
);
6155 /* Returns the address of the area where the structure is returned.
6157 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6158 if (call_expr_nargs (exp
) != 0
6159 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6160 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6163 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6165 case BUILT_IN_ALLOCA
:
6166 case BUILT_IN_ALLOCA_WITH_ALIGN
:
6167 /* If the allocation stems from the declaration of a variable-sized
6168 object, it cannot accumulate. */
6169 target
= expand_builtin_alloca (exp
, CALL_ALLOCA_FOR_VAR_P (exp
));
6174 case BUILT_IN_STACK_SAVE
:
6175 return expand_stack_save ();
6177 case BUILT_IN_STACK_RESTORE
:
6178 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6181 case BUILT_IN_BSWAP16
:
6182 case BUILT_IN_BSWAP32
:
6183 case BUILT_IN_BSWAP64
:
6184 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6189 CASE_INT_FN (BUILT_IN_FFS
):
6190 target
= expand_builtin_unop (target_mode
, exp
, target
,
6191 subtarget
, ffs_optab
);
6196 CASE_INT_FN (BUILT_IN_CLZ
):
6197 target
= expand_builtin_unop (target_mode
, exp
, target
,
6198 subtarget
, clz_optab
);
6203 CASE_INT_FN (BUILT_IN_CTZ
):
6204 target
= expand_builtin_unop (target_mode
, exp
, target
,
6205 subtarget
, ctz_optab
);
6210 CASE_INT_FN (BUILT_IN_CLRSB
):
6211 target
= expand_builtin_unop (target_mode
, exp
, target
,
6212 subtarget
, clrsb_optab
);
6217 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6218 target
= expand_builtin_unop (target_mode
, exp
, target
,
6219 subtarget
, popcount_optab
);
6224 CASE_INT_FN (BUILT_IN_PARITY
):
6225 target
= expand_builtin_unop (target_mode
, exp
, target
,
6226 subtarget
, parity_optab
);
6231 case BUILT_IN_STRLEN
:
6232 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6237 case BUILT_IN_STRCPY
:
6238 target
= expand_builtin_strcpy (exp
, target
);
6243 case BUILT_IN_STRNCPY
:
6244 target
= expand_builtin_strncpy (exp
, target
);
6249 case BUILT_IN_STPCPY
:
6250 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6255 case BUILT_IN_MEMCPY
:
6256 target
= expand_builtin_memcpy (exp
, target
);
6261 case BUILT_IN_MEMPCPY
:
6262 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6267 case BUILT_IN_MEMSET
:
6268 target
= expand_builtin_memset (exp
, target
, mode
);
6273 case BUILT_IN_BZERO
:
6274 target
= expand_builtin_bzero (exp
);
6279 case BUILT_IN_STRCMP
:
6280 target
= expand_builtin_strcmp (exp
, target
);
6285 case BUILT_IN_STRNCMP
:
6286 target
= expand_builtin_strncmp (exp
, target
, mode
);
6292 case BUILT_IN_MEMCMP
:
6293 target
= expand_builtin_memcmp (exp
, target
);
6298 case BUILT_IN_SETJMP
:
6299 /* This should have been lowered to the builtins below. */
6302 case BUILT_IN_SETJMP_SETUP
:
6303 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6304 and the receiver label. */
6305 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6307 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6308 VOIDmode
, EXPAND_NORMAL
);
6309 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6310 rtx_insn
*label_r
= label_rtx (label
);
6312 /* This is copied from the handling of non-local gotos. */
6313 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6314 nonlocal_goto_handler_labels
6315 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
6316 nonlocal_goto_handler_labels
);
6317 /* ??? Do not let expand_label treat us as such since we would
6318 not want to be both on the list of non-local labels and on
6319 the list of forced labels. */
6320 FORCED_LABEL (label
) = 0;
6325 case BUILT_IN_SETJMP_RECEIVER
:
6326 /* __builtin_setjmp_receiver is passed the receiver label. */
6327 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6329 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6330 rtx_insn
*label_r
= label_rtx (label
);
6332 expand_builtin_setjmp_receiver (label_r
);
6337 /* __builtin_longjmp is passed a pointer to an array of five words.
6338 It's similar to the C library longjmp function but works with
6339 __builtin_setjmp above. */
6340 case BUILT_IN_LONGJMP
:
6341 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6343 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6344 VOIDmode
, EXPAND_NORMAL
);
6345 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6347 if (value
!= const1_rtx
)
6349 error ("%<__builtin_longjmp%> second argument must be 1");
6353 expand_builtin_longjmp (buf_addr
, value
);
6358 case BUILT_IN_NONLOCAL_GOTO
:
6359 target
= expand_builtin_nonlocal_goto (exp
);
6364 /* This updates the setjmp buffer that is its argument with the value
6365 of the current stack pointer. */
6366 case BUILT_IN_UPDATE_SETJMP_BUF
:
6367 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6370 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6372 expand_builtin_update_setjmp_buf (buf_addr
);
6378 expand_builtin_trap ();
6381 case BUILT_IN_UNREACHABLE
:
6382 expand_builtin_unreachable ();
6385 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6386 case BUILT_IN_SIGNBITD32
:
6387 case BUILT_IN_SIGNBITD64
:
6388 case BUILT_IN_SIGNBITD128
:
6389 target
= expand_builtin_signbit (exp
, target
);
6394 /* Various hooks for the DWARF 2 __throw routine. */
6395 case BUILT_IN_UNWIND_INIT
:
6396 expand_builtin_unwind_init ();
6398 case BUILT_IN_DWARF_CFA
:
6399 return virtual_cfa_rtx
;
6400 #ifdef DWARF2_UNWIND_INFO
6401 case BUILT_IN_DWARF_SP_COLUMN
:
6402 return expand_builtin_dwarf_sp_column ();
6403 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6404 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6407 case BUILT_IN_FROB_RETURN_ADDR
:
6408 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6409 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6410 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6411 case BUILT_IN_EH_RETURN
:
6412 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6413 CALL_EXPR_ARG (exp
, 1));
6415 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6416 return expand_builtin_eh_return_data_regno (exp
);
6417 case BUILT_IN_EXTEND_POINTER
:
6418 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6419 case BUILT_IN_EH_POINTER
:
6420 return expand_builtin_eh_pointer (exp
);
6421 case BUILT_IN_EH_FILTER
:
6422 return expand_builtin_eh_filter (exp
);
6423 case BUILT_IN_EH_COPY_VALUES
:
6424 return expand_builtin_eh_copy_values (exp
);
6426 case BUILT_IN_VA_START
:
6427 return expand_builtin_va_start (exp
);
6428 case BUILT_IN_VA_END
:
6429 return expand_builtin_va_end (exp
);
6430 case BUILT_IN_VA_COPY
:
6431 return expand_builtin_va_copy (exp
);
6432 case BUILT_IN_EXPECT
:
6433 return expand_builtin_expect (exp
, target
);
6434 case BUILT_IN_ASSUME_ALIGNED
:
6435 return expand_builtin_assume_aligned (exp
, target
);
6436 case BUILT_IN_PREFETCH
:
6437 expand_builtin_prefetch (exp
);
6440 case BUILT_IN_INIT_TRAMPOLINE
:
6441 return expand_builtin_init_trampoline (exp
, true);
6442 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
6443 return expand_builtin_init_trampoline (exp
, false);
6444 case BUILT_IN_ADJUST_TRAMPOLINE
:
6445 return expand_builtin_adjust_trampoline (exp
);
6448 case BUILT_IN_EXECL
:
6449 case BUILT_IN_EXECV
:
6450 case BUILT_IN_EXECLP
:
6451 case BUILT_IN_EXECLE
:
6452 case BUILT_IN_EXECVP
:
6453 case BUILT_IN_EXECVE
:
6454 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6459 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
6460 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
6461 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
6462 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
6463 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
6464 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
6465 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
6470 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
6471 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
6472 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
6473 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
6474 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
6475 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
6476 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
6481 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
6482 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
6483 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
6484 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
6485 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
6486 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
6487 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
6492 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
6493 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
6494 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
6495 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
6496 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
6497 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
6498 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
6503 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
6504 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
6505 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
6506 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
6507 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
6508 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
6509 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
6514 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6515 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6516 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6517 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6518 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6519 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
6520 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
6525 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
6526 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
6527 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
6528 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
6529 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
6530 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
6531 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
6536 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
6537 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
6538 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
6539 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
6540 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
6541 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
6542 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
6547 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
6548 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
6549 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
6550 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
6551 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
6552 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
6553 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
6558 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
6559 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
6560 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
6561 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
6562 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
6563 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
6564 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
6569 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
6570 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
6571 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
6572 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
6573 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
6574 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
6575 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
6580 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6581 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6582 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6583 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6584 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6585 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
6586 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
6591 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
6592 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
6593 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
6594 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
6595 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
6596 if (mode
== VOIDmode
)
6597 mode
= TYPE_MODE (boolean_type_node
);
6598 if (!target
|| !register_operand (target
, mode
))
6599 target
= gen_reg_rtx (mode
);
6601 mode
= get_builtin_sync_mode
6602 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
6603 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6608 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
6609 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
6610 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
6611 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
6612 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
6613 mode
= get_builtin_sync_mode
6614 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
6615 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6620 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
6621 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
6622 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
6623 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
6624 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
6625 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
6626 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
6631 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
6632 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
6633 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
6634 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
6635 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
6636 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
6637 expand_builtin_sync_lock_release (mode
, exp
);
6640 case BUILT_IN_SYNC_SYNCHRONIZE
:
6641 expand_builtin_sync_synchronize ();
6644 case BUILT_IN_ATOMIC_EXCHANGE_1
:
6645 case BUILT_IN_ATOMIC_EXCHANGE_2
:
6646 case BUILT_IN_ATOMIC_EXCHANGE_4
:
6647 case BUILT_IN_ATOMIC_EXCHANGE_8
:
6648 case BUILT_IN_ATOMIC_EXCHANGE_16
:
6649 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
6650 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
6655 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
6656 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
6657 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
6658 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
6659 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
6661 unsigned int nargs
, z
;
6662 vec
<tree
, va_gc
> *vec
;
6665 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
6666 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
6670 /* If this is turned into an external library call, the weak parameter
6671 must be dropped to match the expected parameter list. */
6672 nargs
= call_expr_nargs (exp
);
6673 vec_alloc (vec
, nargs
- 1);
6674 for (z
= 0; z
< 3; z
++)
6675 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6676 /* Skip the boolean weak parameter. */
6677 for (z
= 4; z
< 6; z
++)
6678 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6679 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
6683 case BUILT_IN_ATOMIC_LOAD_1
:
6684 case BUILT_IN_ATOMIC_LOAD_2
:
6685 case BUILT_IN_ATOMIC_LOAD_4
:
6686 case BUILT_IN_ATOMIC_LOAD_8
:
6687 case BUILT_IN_ATOMIC_LOAD_16
:
6688 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
6689 target
= expand_builtin_atomic_load (mode
, exp
, target
);
6694 case BUILT_IN_ATOMIC_STORE_1
:
6695 case BUILT_IN_ATOMIC_STORE_2
:
6696 case BUILT_IN_ATOMIC_STORE_4
:
6697 case BUILT_IN_ATOMIC_STORE_8
:
6698 case BUILT_IN_ATOMIC_STORE_16
:
6699 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
6700 target
= expand_builtin_atomic_store (mode
, exp
);
6705 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
6706 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
6707 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
6708 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
6709 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
6711 enum built_in_function lib
;
6712 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
6713 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
6714 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
6715 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
6721 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
6722 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
6723 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
6724 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
6725 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
6727 enum built_in_function lib
;
6728 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
6729 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
6730 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
6731 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
6737 case BUILT_IN_ATOMIC_AND_FETCH_1
:
6738 case BUILT_IN_ATOMIC_AND_FETCH_2
:
6739 case BUILT_IN_ATOMIC_AND_FETCH_4
:
6740 case BUILT_IN_ATOMIC_AND_FETCH_8
:
6741 case BUILT_IN_ATOMIC_AND_FETCH_16
:
6743 enum built_in_function lib
;
6744 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
6745 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
6746 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
6747 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
6753 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
6754 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
6755 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
6756 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
6757 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
6759 enum built_in_function lib
;
6760 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
6761 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
6762 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
6763 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
6769 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
6770 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
6771 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
6772 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
6773 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
6775 enum built_in_function lib
;
6776 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
6777 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
6778 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
6779 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
6785 case BUILT_IN_ATOMIC_OR_FETCH_1
:
6786 case BUILT_IN_ATOMIC_OR_FETCH_2
:
6787 case BUILT_IN_ATOMIC_OR_FETCH_4
:
6788 case BUILT_IN_ATOMIC_OR_FETCH_8
:
6789 case BUILT_IN_ATOMIC_OR_FETCH_16
:
6791 enum built_in_function lib
;
6792 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
6793 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
6794 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
6795 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
6801 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
6802 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
6803 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
6804 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
6805 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
6806 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
6807 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
6808 ignore
, BUILT_IN_NONE
);
6813 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
6814 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
6815 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
6816 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
6817 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
6818 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
6819 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
6820 ignore
, BUILT_IN_NONE
);
6825 case BUILT_IN_ATOMIC_FETCH_AND_1
:
6826 case BUILT_IN_ATOMIC_FETCH_AND_2
:
6827 case BUILT_IN_ATOMIC_FETCH_AND_4
:
6828 case BUILT_IN_ATOMIC_FETCH_AND_8
:
6829 case BUILT_IN_ATOMIC_FETCH_AND_16
:
6830 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
6831 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
6832 ignore
, BUILT_IN_NONE
);
6837 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
6838 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
6839 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
6840 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
6841 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
6842 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
6843 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
6844 ignore
, BUILT_IN_NONE
);
6849 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
6850 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
6851 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
6852 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
6853 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
6854 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
6855 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
6856 ignore
, BUILT_IN_NONE
);
6861 case BUILT_IN_ATOMIC_FETCH_OR_1
:
6862 case BUILT_IN_ATOMIC_FETCH_OR_2
:
6863 case BUILT_IN_ATOMIC_FETCH_OR_4
:
6864 case BUILT_IN_ATOMIC_FETCH_OR_8
:
6865 case BUILT_IN_ATOMIC_FETCH_OR_16
:
6866 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
6867 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
6868 ignore
, BUILT_IN_NONE
);
6873 case BUILT_IN_ATOMIC_TEST_AND_SET
:
6874 return expand_builtin_atomic_test_and_set (exp
, target
);
6876 case BUILT_IN_ATOMIC_CLEAR
:
6877 return expand_builtin_atomic_clear (exp
);
6879 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
6880 return expand_builtin_atomic_always_lock_free (exp
);
6882 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
6883 target
= expand_builtin_atomic_is_lock_free (exp
);
6888 case BUILT_IN_ATOMIC_THREAD_FENCE
:
6889 expand_builtin_atomic_thread_fence (exp
);
6892 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
6893 expand_builtin_atomic_signal_fence (exp
);
6896 case BUILT_IN_OBJECT_SIZE
:
6897 return expand_builtin_object_size (exp
);
6899 case BUILT_IN_MEMCPY_CHK
:
6900 case BUILT_IN_MEMPCPY_CHK
:
6901 case BUILT_IN_MEMMOVE_CHK
:
6902 case BUILT_IN_MEMSET_CHK
:
6903 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6908 case BUILT_IN_STRCPY_CHK
:
6909 case BUILT_IN_STPCPY_CHK
:
6910 case BUILT_IN_STRNCPY_CHK
:
6911 case BUILT_IN_STPNCPY_CHK
:
6912 case BUILT_IN_STRCAT_CHK
:
6913 case BUILT_IN_STRNCAT_CHK
:
6914 case BUILT_IN_SNPRINTF_CHK
:
6915 case BUILT_IN_VSNPRINTF_CHK
:
6916 maybe_emit_chk_warning (exp
, fcode
);
6919 case BUILT_IN_SPRINTF_CHK
:
6920 case BUILT_IN_VSPRINTF_CHK
:
6921 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6925 if (warn_free_nonheap_object
)
6926 maybe_emit_free_warning (exp
);
6929 case BUILT_IN_THREAD_POINTER
:
6930 return expand_builtin_thread_pointer (exp
, target
);
6932 case BUILT_IN_SET_THREAD_POINTER
:
6933 expand_builtin_set_thread_pointer (exp
);
6936 case BUILT_IN_CILK_DETACH
:
6937 expand_builtin_cilk_detach (exp
);
6940 case BUILT_IN_CILK_POP_FRAME
:
6941 expand_builtin_cilk_pop_frame (exp
);
6944 case BUILT_IN_CHKP_INIT_PTR_BOUNDS
:
6945 case BUILT_IN_CHKP_NULL_PTR_BOUNDS
:
6946 case BUILT_IN_CHKP_COPY_PTR_BOUNDS
:
6947 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
:
6948 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
:
6949 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS
:
6950 case BUILT_IN_CHKP_SET_PTR_BOUNDS
:
6951 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS
:
6952 case BUILT_IN_CHKP_STORE_PTR_BOUNDS
:
6953 case BUILT_IN_CHKP_GET_PTR_LBOUND
:
6954 case BUILT_IN_CHKP_GET_PTR_UBOUND
:
6955 /* We allow user CHKP builtins if Pointer Bounds
6957 if (!chkp_function_instrumented_p (current_function_decl
))
6959 if (fcode
== BUILT_IN_CHKP_SET_PTR_BOUNDS
6960 || fcode
== BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6961 || fcode
== BUILT_IN_CHKP_INIT_PTR_BOUNDS
6962 || fcode
== BUILT_IN_CHKP_NULL_PTR_BOUNDS
6963 || fcode
== BUILT_IN_CHKP_COPY_PTR_BOUNDS
)
6964 return expand_normal (CALL_EXPR_ARG (exp
, 0));
6965 else if (fcode
== BUILT_IN_CHKP_GET_PTR_LBOUND
)
6966 return expand_normal (size_zero_node
);
6967 else if (fcode
== BUILT_IN_CHKP_GET_PTR_UBOUND
)
6968 return expand_normal (size_int (-1));
6974 case BUILT_IN_CHKP_BNDMK
:
6975 case BUILT_IN_CHKP_BNDSTX
:
6976 case BUILT_IN_CHKP_BNDCL
:
6977 case BUILT_IN_CHKP_BNDCU
:
6978 case BUILT_IN_CHKP_BNDLDX
:
6979 case BUILT_IN_CHKP_BNDRET
:
6980 case BUILT_IN_CHKP_INTERSECT
:
6981 case BUILT_IN_CHKP_NARROW
:
6982 case BUILT_IN_CHKP_EXTRACT_LOWER
:
6983 case BUILT_IN_CHKP_EXTRACT_UPPER
:
6984 /* Software implementation of Pointer Bounds Checker is NYI.
6985 Target support is required. */
6986 error ("Your target platform does not support -fcheck-pointer-bounds");
6989 case BUILT_IN_ACC_ON_DEVICE
:
6990 /* Do library call, if we failed to expand the builtin when
6994 default: /* just do library call, if unknown builtin */
6998 /* The switch statement above can drop through to cause the function
6999 to be called normally. */
7000 return expand_call (exp
, target
, ignore
);
7003 /* Similar to expand_builtin but is used for instrumented calls. */
7006 expand_builtin_with_bounds (tree exp
, rtx target
,
7007 rtx subtarget ATTRIBUTE_UNUSED
,
7008 machine_mode mode
, int ignore
)
7010 tree fndecl
= get_callee_fndecl (exp
);
7011 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7013 gcc_assert (CALL_WITH_BOUNDS_P (exp
));
7015 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7016 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
7018 gcc_assert (fcode
> BEGIN_CHKP_BUILTINS
7019 && fcode
< END_CHKP_BUILTINS
);
7023 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
:
7024 target
= expand_builtin_memcpy_with_bounds (exp
, target
);
7029 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
:
7030 target
= expand_builtin_mempcpy_with_bounds (exp
, target
, mode
);
7035 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
:
7036 target
= expand_builtin_memset_with_bounds (exp
, target
, mode
);
7045 /* The switch statement above can drop through to cause the function
7046 to be called normally. */
7047 return expand_call (exp
, target
, ignore
);
7050 /* Determine whether a tree node represents a call to a built-in
7051 function. If the tree T is a call to a built-in function with
7052 the right number of arguments of the appropriate types, return
7053 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7054 Otherwise the return value is END_BUILTINS. */
7056 enum built_in_function
7057 builtin_mathfn_code (const_tree t
)
7059 const_tree fndecl
, arg
, parmlist
;
7060 const_tree argtype
, parmtype
;
7061 const_call_expr_arg_iterator iter
;
7063 if (TREE_CODE (t
) != CALL_EXPR
7064 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
7065 return END_BUILTINS
;
7067 fndecl
= get_callee_fndecl (t
);
7068 if (fndecl
== NULL_TREE
7069 || TREE_CODE (fndecl
) != FUNCTION_DECL
7070 || ! DECL_BUILT_IN (fndecl
)
7071 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7072 return END_BUILTINS
;
7074 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
7075 init_const_call_expr_arg_iterator (t
, &iter
);
7076 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
7078 /* If a function doesn't take a variable number of arguments,
7079 the last element in the list will have type `void'. */
7080 parmtype
= TREE_VALUE (parmlist
);
7081 if (VOID_TYPE_P (parmtype
))
7083 if (more_const_call_expr_args_p (&iter
))
7084 return END_BUILTINS
;
7085 return DECL_FUNCTION_CODE (fndecl
);
7088 if (! more_const_call_expr_args_p (&iter
))
7089 return END_BUILTINS
;
7091 arg
= next_const_call_expr_arg (&iter
);
7092 argtype
= TREE_TYPE (arg
);
7094 if (SCALAR_FLOAT_TYPE_P (parmtype
))
7096 if (! SCALAR_FLOAT_TYPE_P (argtype
))
7097 return END_BUILTINS
;
7099 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
7101 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
7102 return END_BUILTINS
;
7104 else if (POINTER_TYPE_P (parmtype
))
7106 if (! POINTER_TYPE_P (argtype
))
7107 return END_BUILTINS
;
7109 else if (INTEGRAL_TYPE_P (parmtype
))
7111 if (! INTEGRAL_TYPE_P (argtype
))
7112 return END_BUILTINS
;
7115 return END_BUILTINS
;
7118 /* Variable-length argument list. */
7119 return DECL_FUNCTION_CODE (fndecl
);
7122 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7123 evaluate to a constant. */
7126 fold_builtin_constant_p (tree arg
)
7128 /* We return 1 for a numeric type that's known to be a constant
7129 value at compile-time or for an aggregate type that's a
7130 literal constant. */
7133 /* If we know this is a constant, emit the constant of one. */
7134 if (CONSTANT_CLASS_P (arg
)
7135 || (TREE_CODE (arg
) == CONSTRUCTOR
7136 && TREE_CONSTANT (arg
)))
7137 return integer_one_node
;
7138 if (TREE_CODE (arg
) == ADDR_EXPR
)
7140 tree op
= TREE_OPERAND (arg
, 0);
7141 if (TREE_CODE (op
) == STRING_CST
7142 || (TREE_CODE (op
) == ARRAY_REF
7143 && integer_zerop (TREE_OPERAND (op
, 1))
7144 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
7145 return integer_one_node
;
7148 /* If this expression has side effects, show we don't know it to be a
7149 constant. Likewise if it's a pointer or aggregate type since in
7150 those case we only want literals, since those are only optimized
7151 when generating RTL, not later.
7152 And finally, if we are compiling an initializer, not code, we
7153 need to return a definite result now; there's not going to be any
7154 more optimization done. */
7155 if (TREE_SIDE_EFFECTS (arg
)
7156 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
7157 || POINTER_TYPE_P (TREE_TYPE (arg
))
7159 || folding_initializer
7160 || force_folding_builtin_constant_p
)
7161 return integer_zero_node
;
7166 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7167 return it as a truthvalue. */
7170 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
7173 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
7175 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
7176 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
7177 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
7178 pred_type
= TREE_VALUE (arg_types
);
7179 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
7181 pred
= fold_convert_loc (loc
, pred_type
, pred
);
7182 expected
= fold_convert_loc (loc
, expected_type
, expected
);
7183 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
7186 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7187 build_int_cst (ret_type
, 0));
7190 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7191 NULL_TREE if no simplification is possible. */
7194 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
)
7196 tree inner
, fndecl
, inner_arg0
;
7197 enum tree_code code
;
7199 /* Distribute the expected value over short-circuiting operators.
7200 See through the cast from truthvalue_type_node to long. */
7202 while (CONVERT_EXPR_P (inner_arg0
)
7203 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
7204 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
7205 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
7207 /* If this is a builtin_expect within a builtin_expect keep the
7208 inner one. See through a comparison against a constant. It
7209 might have been added to create a thruthvalue. */
7212 if (COMPARISON_CLASS_P (inner
)
7213 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7214 inner
= TREE_OPERAND (inner
, 0);
7216 if (TREE_CODE (inner
) == CALL_EXPR
7217 && (fndecl
= get_callee_fndecl (inner
))
7218 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7219 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7223 code
= TREE_CODE (inner
);
7224 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7226 tree op0
= TREE_OPERAND (inner
, 0);
7227 tree op1
= TREE_OPERAND (inner
, 1);
7229 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
);
7230 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
);
7231 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7233 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
7236 /* If the argument isn't invariant then there's nothing else we can do. */
7237 if (!TREE_CONSTANT (inner_arg0
))
7240 /* If we expect that a comparison against the argument will fold to
7241 a constant return the constant. In practice, this means a true
7242 constant or the address of a non-weak symbol. */
7245 if (TREE_CODE (inner
) == ADDR_EXPR
)
7249 inner
= TREE_OPERAND (inner
, 0);
7251 while (TREE_CODE (inner
) == COMPONENT_REF
7252 || TREE_CODE (inner
) == ARRAY_REF
);
7253 if ((TREE_CODE (inner
) == VAR_DECL
7254 || TREE_CODE (inner
) == FUNCTION_DECL
)
7255 && DECL_WEAK (inner
))
7259 /* Otherwise, ARG0 already has the proper type for the return value. */
7263 /* Fold a call to __builtin_classify_type with argument ARG. */
7266 fold_builtin_classify_type (tree arg
)
7269 return build_int_cst (integer_type_node
, no_type_class
);
7271 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7274 /* Fold a call to __builtin_strlen with argument ARG. */
7277 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7279 if (!validate_arg (arg
, POINTER_TYPE
))
7283 tree len
= c_strlen (arg
, 0);
7286 return fold_convert_loc (loc
, type
, len
);
7292 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7295 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7297 REAL_VALUE_TYPE real
;
7299 /* __builtin_inff is intended to be usable to define INFINITY on all
7300 targets. If an infinity is not available, INFINITY expands "to a
7301 positive constant of type float that overflows at translation
7302 time", footnote "In this case, using INFINITY will violate the
7303 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7304 Thus we pedwarn to ensure this constraint violation is
7306 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7307 pedwarn (loc
, 0, "target format does not support infinity");
7310 return build_real (type
, real
);
7313 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7316 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7318 REAL_VALUE_TYPE real
;
7321 if (!validate_arg (arg
, POINTER_TYPE
))
7323 str
= c_getstr (arg
);
7327 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7330 return build_real (type
, real
);
7333 /* Return true if the floating point expression T has an integer value.
7334 We also allow +Inf, -Inf and NaN to be considered integer values. */
7337 integer_valued_real_p (tree t
)
7339 switch (TREE_CODE (t
))
7346 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7351 return integer_valued_real_p (TREE_OPERAND (t
, 1));
7358 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7359 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7362 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7363 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7366 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7370 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7371 if (TREE_CODE (type
) == INTEGER_TYPE
)
7373 if (TREE_CODE (type
) == REAL_TYPE
)
7374 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7379 switch (builtin_mathfn_code (t
))
7381 CASE_FLT_FN (BUILT_IN_CEIL
):
7382 CASE_FLT_FN (BUILT_IN_FLOOR
):
7383 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7384 CASE_FLT_FN (BUILT_IN_RINT
):
7385 CASE_FLT_FN (BUILT_IN_ROUND
):
7386 CASE_FLT_FN (BUILT_IN_TRUNC
):
7389 CASE_FLT_FN (BUILT_IN_FMIN
):
7390 CASE_FLT_FN (BUILT_IN_FMAX
):
7391 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7392 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7405 /* FNDECL is assumed to be a builtin where truncation can be propagated
7406 across (for instance floor((double)f) == (double)floorf (f).
7407 Do the transformation for a call with argument ARG. */
7410 fold_trunc_transparent_mathfn (location_t loc
, tree fndecl
, tree arg
)
7412 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7414 if (!validate_arg (arg
, REAL_TYPE
))
7417 /* Integer rounding functions are idempotent. */
7418 if (fcode
== builtin_mathfn_code (arg
))
7421 /* If argument is already integer valued, and we don't need to worry
7422 about setting errno, there's no need to perform rounding. */
7423 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7428 tree arg0
= strip_float_extensions (arg
);
7429 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7430 tree newtype
= TREE_TYPE (arg0
);
7433 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7434 && (decl
= mathfn_built_in (newtype
, fcode
)))
7435 return fold_convert_loc (loc
, ftype
,
7436 build_call_expr_loc (loc
, decl
, 1,
7437 fold_convert_loc (loc
,
7444 /* FNDECL is assumed to be builtin which can narrow the FP type of
7445 the argument, for instance lround((double)f) -> lroundf (f).
7446 Do the transformation for a call with argument ARG. */
7449 fold_fixed_mathfn (location_t loc
, tree fndecl
, tree arg
)
7451 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7453 if (!validate_arg (arg
, REAL_TYPE
))
7456 /* If argument is already integer valued, and we don't need to worry
7457 about setting errno, there's no need to perform rounding. */
7458 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7459 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7460 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7464 tree ftype
= TREE_TYPE (arg
);
7465 tree arg0
= strip_float_extensions (arg
);
7466 tree newtype
= TREE_TYPE (arg0
);
7469 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7470 && (decl
= mathfn_built_in (newtype
, fcode
)))
7471 return build_call_expr_loc (loc
, decl
, 1,
7472 fold_convert_loc (loc
, newtype
, arg0
));
7475 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7476 sizeof (int) == sizeof (long). */
7477 if (TYPE_PRECISION (integer_type_node
)
7478 == TYPE_PRECISION (long_integer_type_node
))
7480 tree newfn
= NULL_TREE
;
7483 CASE_FLT_FN (BUILT_IN_ICEIL
):
7484 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7487 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7488 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7491 CASE_FLT_FN (BUILT_IN_IROUND
):
7492 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7495 CASE_FLT_FN (BUILT_IN_IRINT
):
7496 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7505 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7506 return fold_convert_loc (loc
,
7507 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7511 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7512 sizeof (long long) == sizeof (long). */
7513 if (TYPE_PRECISION (long_long_integer_type_node
)
7514 == TYPE_PRECISION (long_integer_type_node
))
7516 tree newfn
= NULL_TREE
;
7519 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7520 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7523 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7524 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7527 CASE_FLT_FN (BUILT_IN_LLROUND
):
7528 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7531 CASE_FLT_FN (BUILT_IN_LLRINT
):
7532 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7541 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7542 return fold_convert_loc (loc
,
7543 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7550 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7551 return type. Return NULL_TREE if no simplification can be made. */
7554 fold_builtin_cabs (location_t loc
, tree arg
, tree type
, tree fndecl
)
7558 if (!validate_arg (arg
, COMPLEX_TYPE
)
7559 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7562 /* Calculate the result when the argument is a constant. */
7563 if (TREE_CODE (arg
) == COMPLEX_CST
7564 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7568 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7570 tree real
= TREE_OPERAND (arg
, 0);
7571 tree imag
= TREE_OPERAND (arg
, 1);
7573 /* If either part is zero, cabs is fabs of the other. */
7574 if (real_zerop (real
))
7575 return fold_build1_loc (loc
, ABS_EXPR
, type
, imag
);
7576 if (real_zerop (imag
))
7577 return fold_build1_loc (loc
, ABS_EXPR
, type
, real
);
7579 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7580 if (flag_unsafe_math_optimizations
7581 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7584 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7585 fold_build1_loc (loc
, ABS_EXPR
, type
, real
),
7586 build_real_truncate (type
, dconst_sqrt2 ()));
7590 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7591 if (TREE_CODE (arg
) == NEGATE_EXPR
7592 || TREE_CODE (arg
) == CONJ_EXPR
)
7593 return build_call_expr_loc (loc
, fndecl
, 1, TREE_OPERAND (arg
, 0));
7595 /* Don't do this when optimizing for size. */
7596 if (flag_unsafe_math_optimizations
7597 && optimize
&& optimize_function_for_speed_p (cfun
))
7599 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7601 if (sqrtfn
!= NULL_TREE
)
7603 tree rpart
, ipart
, result
;
7605 arg
= builtin_save_expr (arg
);
7607 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, type
, arg
);
7608 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg
);
7610 rpart
= builtin_save_expr (rpart
);
7611 ipart
= builtin_save_expr (ipart
);
7613 result
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
7614 fold_build2_loc (loc
, MULT_EXPR
, type
,
7616 fold_build2_loc (loc
, MULT_EXPR
, type
,
7619 return build_call_expr_loc (loc
, sqrtfn
, 1, result
);
7626 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7627 complex tree type of the result. If NEG is true, the imaginary
7628 zero is negative. */
7631 build_complex_cproj (tree type
, bool neg
)
7633 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
7637 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
7638 build_real (TREE_TYPE (type
), rzero
));
7641 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7642 return type. Return NULL_TREE if no simplification can be made. */
7645 fold_builtin_cproj (location_t loc
, tree arg
, tree type
)
7647 if (!validate_arg (arg
, COMPLEX_TYPE
)
7648 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7651 /* If there are no infinities, return arg. */
7652 if (! HONOR_INFINITIES (type
))
7653 return non_lvalue_loc (loc
, arg
);
7655 /* Calculate the result when the argument is a constant. */
7656 if (TREE_CODE (arg
) == COMPLEX_CST
)
7658 const REAL_VALUE_TYPE
*real
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
7659 const REAL_VALUE_TYPE
*imag
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
7661 if (real_isinf (real
) || real_isinf (imag
))
7662 return build_complex_cproj (type
, imag
->sign
);
7666 else if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7668 tree real
= TREE_OPERAND (arg
, 0);
7669 tree imag
= TREE_OPERAND (arg
, 1);
7674 /* If the real part is inf and the imag part is known to be
7675 nonnegative, return (inf + 0i). Remember side-effects are
7676 possible in the imag part. */
7677 if (TREE_CODE (real
) == REAL_CST
7678 && real_isinf (TREE_REAL_CST_PTR (real
))
7679 && tree_expr_nonnegative_p (imag
))
7680 return omit_one_operand_loc (loc
, type
,
7681 build_complex_cproj (type
, false),
7684 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7685 Remember side-effects are possible in the real part. */
7686 if (TREE_CODE (imag
) == REAL_CST
7687 && real_isinf (TREE_REAL_CST_PTR (imag
)))
7689 omit_one_operand_loc (loc
, type
,
7690 build_complex_cproj (type
, TREE_REAL_CST_PTR
7691 (imag
)->sign
), arg
);
7697 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7698 TYPE is the type of the return value. Return NULL_TREE if no
7699 simplification can be made. */
7702 fold_builtin_cos (location_t loc
,
7703 tree arg
, tree type
, tree fndecl
)
7707 if (!validate_arg (arg
, REAL_TYPE
))
7710 /* Calculate the result when the argument is a constant. */
7711 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7714 /* Optimize cos(-x) into cos (x). */
7715 if ((narg
= fold_strip_sign_ops (arg
)))
7716 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7721 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7722 Return NULL_TREE if no simplification can be made. */
7725 fold_builtin_cosh (location_t loc
, tree arg
, tree type
, tree fndecl
)
7727 if (validate_arg (arg
, REAL_TYPE
))
7731 /* Calculate the result when the argument is a constant. */
7732 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7735 /* Optimize cosh(-x) into cosh (x). */
7736 if ((narg
= fold_strip_sign_ops (arg
)))
7737 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7743 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7744 argument ARG. TYPE is the type of the return value. Return
7745 NULL_TREE if no simplification can be made. */
7748 fold_builtin_ccos (location_t loc
, tree arg
, tree type
, tree fndecl
,
7751 if (validate_arg (arg
, COMPLEX_TYPE
)
7752 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
7756 /* Calculate the result when the argument is a constant. */
7757 if ((tmp
= do_mpc_arg1 (arg
, type
, (hyper
? mpc_cosh
: mpc_cos
))))
7760 /* Optimize fn(-x) into fn(x). */
7761 if ((tmp
= fold_strip_sign_ops (arg
)))
7762 return build_call_expr_loc (loc
, fndecl
, 1, tmp
);
7768 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7769 Return NULL_TREE if no simplification can be made. */
7772 fold_builtin_tan (tree arg
, tree type
)
7774 enum built_in_function fcode
;
7777 if (!validate_arg (arg
, REAL_TYPE
))
7780 /* Calculate the result when the argument is a constant. */
7781 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
7784 /* Optimize tan(atan(x)) = x. */
7785 fcode
= builtin_mathfn_code (arg
);
7786 if (flag_unsafe_math_optimizations
7787 && (fcode
== BUILT_IN_ATAN
7788 || fcode
== BUILT_IN_ATANF
7789 || fcode
== BUILT_IN_ATANL
))
7790 return CALL_EXPR_ARG (arg
, 0);
7795 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7796 NULL_TREE if no simplification can be made. */
7799 fold_builtin_sincos (location_t loc
,
7800 tree arg0
, tree arg1
, tree arg2
)
7805 if (!validate_arg (arg0
, REAL_TYPE
)
7806 || !validate_arg (arg1
, POINTER_TYPE
)
7807 || !validate_arg (arg2
, POINTER_TYPE
))
7810 type
= TREE_TYPE (arg0
);
7812 /* Calculate the result when the argument is a constant. */
7813 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7816 /* Canonicalize sincos to cexpi. */
7817 if (!targetm
.libc_has_function (function_c99_math_complex
))
7819 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
7823 call
= build_call_expr_loc (loc
, fn
, 1, arg0
);
7824 call
= builtin_save_expr (call
);
7826 return build2 (COMPOUND_EXPR
, void_type_node
,
7827 build2 (MODIFY_EXPR
, void_type_node
,
7828 build_fold_indirect_ref_loc (loc
, arg1
),
7829 build1 (IMAGPART_EXPR
, type
, call
)),
7830 build2 (MODIFY_EXPR
, void_type_node
,
7831 build_fold_indirect_ref_loc (loc
, arg2
),
7832 build1 (REALPART_EXPR
, type
, call
)));
7835 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7836 NULL_TREE if no simplification can be made. */
7839 fold_builtin_cexp (location_t loc
, tree arg0
, tree type
)
7842 tree realp
, imagp
, ifn
;
7845 if (!validate_arg (arg0
, COMPLEX_TYPE
)
7846 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) != REAL_TYPE
)
7849 /* Calculate the result when the argument is a constant. */
7850 if ((res
= do_mpc_arg1 (arg0
, type
, mpc_exp
)))
7853 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
7855 /* In case we can figure out the real part of arg0 and it is constant zero
7857 if (!targetm
.libc_has_function (function_c99_math_complex
))
7859 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
7863 if ((realp
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
))
7864 && real_zerop (realp
))
7866 tree narg
= fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7867 return build_call_expr_loc (loc
, ifn
, 1, narg
);
7870 /* In case we can easily decompose real and imaginary parts split cexp
7871 to exp (r) * cexpi (i). */
7872 if (flag_unsafe_math_optimizations
7875 tree rfn
, rcall
, icall
;
7877 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
7881 imagp
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7885 icall
= build_call_expr_loc (loc
, ifn
, 1, imagp
);
7886 icall
= builtin_save_expr (icall
);
7887 rcall
= build_call_expr_loc (loc
, rfn
, 1, realp
);
7888 rcall
= builtin_save_expr (rcall
);
7889 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
7890 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7892 fold_build1_loc (loc
, REALPART_EXPR
,
7894 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7896 fold_build1_loc (loc
, IMAGPART_EXPR
,
7903 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7904 Return NULL_TREE if no simplification can be made. */
7907 fold_builtin_trunc (location_t loc
, tree fndecl
, tree arg
)
7909 if (!validate_arg (arg
, REAL_TYPE
))
7912 /* Optimize trunc of constant value. */
7913 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7915 REAL_VALUE_TYPE r
, x
;
7916 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7918 x
= TREE_REAL_CST (arg
);
7919 real_trunc (&r
, TYPE_MODE (type
), &x
);
7920 return build_real (type
, r
);
7923 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7926 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7927 Return NULL_TREE if no simplification can be made. */
7930 fold_builtin_floor (location_t loc
, tree fndecl
, tree arg
)
7932 if (!validate_arg (arg
, REAL_TYPE
))
7935 /* Optimize floor of constant value. */
7936 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7940 x
= TREE_REAL_CST (arg
);
7941 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7943 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7946 real_floor (&r
, TYPE_MODE (type
), &x
);
7947 return build_real (type
, r
);
7951 /* Fold floor (x) where x is nonnegative to trunc (x). */
7952 if (tree_expr_nonnegative_p (arg
))
7954 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
7956 return build_call_expr_loc (loc
, truncfn
, 1, arg
);
7959 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7962 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7963 Return NULL_TREE if no simplification can be made. */
7966 fold_builtin_ceil (location_t loc
, tree fndecl
, tree arg
)
7968 if (!validate_arg (arg
, REAL_TYPE
))
7971 /* Optimize ceil of constant value. */
7972 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7976 x
= TREE_REAL_CST (arg
);
7977 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7979 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7982 real_ceil (&r
, TYPE_MODE (type
), &x
);
7983 return build_real (type
, r
);
7987 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7990 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7991 Return NULL_TREE if no simplification can be made. */
7994 fold_builtin_round (location_t loc
, tree fndecl
, tree arg
)
7996 if (!validate_arg (arg
, REAL_TYPE
))
7999 /* Optimize round of constant value. */
8000 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8004 x
= TREE_REAL_CST (arg
);
8005 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8007 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8010 real_round (&r
, TYPE_MODE (type
), &x
);
8011 return build_real (type
, r
);
8015 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8018 /* Fold function call to builtin lround, lroundf or lroundl (or the
8019 corresponding long long versions) and other rounding functions. ARG
8020 is the argument to the call. Return NULL_TREE if no simplification
8024 fold_builtin_int_roundingfn (location_t loc
, tree fndecl
, tree arg
)
8026 if (!validate_arg (arg
, REAL_TYPE
))
8029 /* Optimize lround of constant value. */
8030 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8032 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
8034 if (real_isfinite (&x
))
8036 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
8037 tree ftype
= TREE_TYPE (arg
);
8041 switch (DECL_FUNCTION_CODE (fndecl
))
8043 CASE_FLT_FN (BUILT_IN_IFLOOR
):
8044 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8045 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8046 real_floor (&r
, TYPE_MODE (ftype
), &x
);
8049 CASE_FLT_FN (BUILT_IN_ICEIL
):
8050 CASE_FLT_FN (BUILT_IN_LCEIL
):
8051 CASE_FLT_FN (BUILT_IN_LLCEIL
):
8052 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
8055 CASE_FLT_FN (BUILT_IN_IROUND
):
8056 CASE_FLT_FN (BUILT_IN_LROUND
):
8057 CASE_FLT_FN (BUILT_IN_LLROUND
):
8058 real_round (&r
, TYPE_MODE (ftype
), &x
);
8065 wide_int val
= real_to_integer (&r
, &fail
, TYPE_PRECISION (itype
));
8067 return wide_int_to_tree (itype
, val
);
8071 switch (DECL_FUNCTION_CODE (fndecl
))
8073 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8074 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8075 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8076 if (tree_expr_nonnegative_p (arg
))
8077 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
8078 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
8083 return fold_fixed_mathfn (loc
, fndecl
, arg
);
8086 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8087 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8088 the argument to the call. Return NULL_TREE if no simplification can
8092 fold_builtin_bitop (tree fndecl
, tree arg
)
8094 if (!validate_arg (arg
, INTEGER_TYPE
))
8097 /* Optimize for constant argument. */
8098 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8100 tree type
= TREE_TYPE (arg
);
8103 switch (DECL_FUNCTION_CODE (fndecl
))
8105 CASE_INT_FN (BUILT_IN_FFS
):
8106 result
= wi::ffs (arg
);
8109 CASE_INT_FN (BUILT_IN_CLZ
):
8110 if (wi::ne_p (arg
, 0))
8111 result
= wi::clz (arg
);
8112 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8113 result
= TYPE_PRECISION (type
);
8116 CASE_INT_FN (BUILT_IN_CTZ
):
8117 if (wi::ne_p (arg
, 0))
8118 result
= wi::ctz (arg
);
8119 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8120 result
= TYPE_PRECISION (type
);
8123 CASE_INT_FN (BUILT_IN_CLRSB
):
8124 result
= wi::clrsb (arg
);
8127 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8128 result
= wi::popcount (arg
);
8131 CASE_INT_FN (BUILT_IN_PARITY
):
8132 result
= wi::parity (arg
);
8139 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
8145 /* Fold function call to builtin_bswap and the short, long and long long
8146 variants. Return NULL_TREE if no simplification can be made. */
8148 fold_builtin_bswap (tree fndecl
, tree arg
)
8150 if (! validate_arg (arg
, INTEGER_TYPE
))
8153 /* Optimize constant value. */
8154 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8156 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8158 switch (DECL_FUNCTION_CODE (fndecl
))
8160 case BUILT_IN_BSWAP16
:
8161 case BUILT_IN_BSWAP32
:
8162 case BUILT_IN_BSWAP64
:
8164 signop sgn
= TYPE_SIGN (type
);
8166 wide_int_to_tree (type
,
8167 wide_int::from (arg
, TYPE_PRECISION (type
),
8179 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8180 NULL_TREE if no simplification can be made. */
8183 fold_builtin_hypot (location_t loc
, tree fndecl
,
8184 tree arg0
, tree arg1
, tree type
)
8186 tree res
, narg0
, narg1
;
8188 if (!validate_arg (arg0
, REAL_TYPE
)
8189 || !validate_arg (arg1
, REAL_TYPE
))
8192 /* Calculate the result when the argument is a constant. */
8193 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8196 /* If either argument to hypot has a negate or abs, strip that off.
8197 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8198 narg0
= fold_strip_sign_ops (arg0
);
8199 narg1
= fold_strip_sign_ops (arg1
);
8202 return build_call_expr_loc (loc
, fndecl
, 2, narg0
? narg0
: arg0
,
8203 narg1
? narg1
: arg1
);
8206 /* If either argument is zero, hypot is fabs of the other. */
8207 if (real_zerop (arg0
))
8208 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
);
8209 else if (real_zerop (arg1
))
8210 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
);
8212 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8213 if (flag_unsafe_math_optimizations
8214 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8215 return fold_build2_loc (loc
, MULT_EXPR
, type
,
8216 fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
),
8217 build_real_truncate (type
, dconst_sqrt2 ()));
8223 /* Fold a builtin function call to pow, powf, or powl. Return
8224 NULL_TREE if no simplification can be made. */
8226 fold_builtin_pow (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, tree type
)
8230 if (!validate_arg (arg0
, REAL_TYPE
)
8231 || !validate_arg (arg1
, REAL_TYPE
))
8234 /* Calculate the result when the argument is a constant. */
8235 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8238 /* Optimize pow(1.0,y) = 1.0. */
8239 if (real_onep (arg0
))
8240 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8242 if (TREE_CODE (arg1
) == REAL_CST
8243 && !TREE_OVERFLOW (arg1
))
8245 REAL_VALUE_TYPE cint
;
8249 c
= TREE_REAL_CST (arg1
);
8251 /* Optimize pow(x,0.0) = 1.0. */
8252 if (real_equal (&c
, &dconst0
))
8253 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8256 /* Optimize pow(x,1.0) = x. */
8257 if (real_equal (&c
, &dconst1
))
8260 /* Optimize pow(x,-1.0) = 1.0/x. */
8261 if (real_equal (&c
, &dconstm1
))
8262 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8263 build_real (type
, dconst1
), arg0
);
8265 /* Optimize pow(x,0.5) = sqrt(x). */
8266 if (flag_unsafe_math_optimizations
8267 && real_equal (&c
, &dconsthalf
))
8269 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8271 if (sqrtfn
!= NULL_TREE
)
8272 return build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
8275 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8276 if (flag_unsafe_math_optimizations
)
8278 const REAL_VALUE_TYPE dconstroot
8279 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8281 if (real_equal (&c
, &dconstroot
))
8283 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8284 if (cbrtfn
!= NULL_TREE
)
8285 return build_call_expr_loc (loc
, cbrtfn
, 1, arg0
);
8289 /* Check for an integer exponent. */
8290 n
= real_to_integer (&c
);
8291 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
8292 if (real_identical (&c
, &cint
))
8294 /* Attempt to evaluate pow at compile-time, unless this should
8295 raise an exception. */
8296 if (TREE_CODE (arg0
) == REAL_CST
8297 && !TREE_OVERFLOW (arg0
)
8299 || (!flag_trapping_math
&& !flag_errno_math
)
8300 || !real_equal (&TREE_REAL_CST (arg0
), &dconst0
)))
8305 x
= TREE_REAL_CST (arg0
);
8306 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8307 if (flag_unsafe_math_optimizations
|| !inexact
)
8308 return build_real (type
, x
);
8311 /* Strip sign ops from even integer powers. */
8312 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8314 tree narg0
= fold_strip_sign_ops (arg0
);
8316 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, arg1
);
8321 if (flag_unsafe_math_optimizations
)
8323 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8325 /* Optimize pow(expN(x),y) = expN(x*y). */
8326 if (BUILTIN_EXPONENT_P (fcode
))
8328 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8329 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8330 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg
, arg1
);
8331 return build_call_expr_loc (loc
, expfn
, 1, arg
);
8334 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8335 if (BUILTIN_SQRT_P (fcode
))
8337 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8338 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8339 build_real (type
, dconsthalf
));
8340 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, narg1
);
8343 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8344 if (BUILTIN_CBRT_P (fcode
))
8346 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8347 if (tree_expr_nonnegative_p (arg
))
8349 tree c
= build_real_truncate (type
, dconst_third ());
8350 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
, c
);
8351 return build_call_expr_loc (loc
, fndecl
, 2, arg
, narg1
);
8355 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8356 if (fcode
== BUILT_IN_POW
8357 || fcode
== BUILT_IN_POWF
8358 || fcode
== BUILT_IN_POWL
)
8360 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8361 if (tree_expr_nonnegative_p (arg00
))
8363 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8364 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
, arg1
);
8365 return build_call_expr_loc (loc
, fndecl
, 2, arg00
, narg1
);
8373 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8374 Return NULL_TREE if no simplification can be made. */
8376 fold_builtin_powi (location_t loc
, tree fndecl ATTRIBUTE_UNUSED
,
8377 tree arg0
, tree arg1
, tree type
)
8379 if (!validate_arg (arg0
, REAL_TYPE
)
8380 || !validate_arg (arg1
, INTEGER_TYPE
))
8383 /* Optimize pow(1.0,y) = 1.0. */
8384 if (real_onep (arg0
))
8385 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8387 if (tree_fits_shwi_p (arg1
))
8389 HOST_WIDE_INT c
= tree_to_shwi (arg1
);
8391 /* Evaluate powi at compile-time. */
8392 if (TREE_CODE (arg0
) == REAL_CST
8393 && !TREE_OVERFLOW (arg0
))
8396 x
= TREE_REAL_CST (arg0
);
8397 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8398 return build_real (type
, x
);
8401 /* Optimize pow(x,0) = 1.0. */
8403 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8406 /* Optimize pow(x,1) = x. */
8410 /* Optimize pow(x,-1) = 1.0/x. */
8412 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8413 build_real (type
, dconst1
), arg0
);
8419 /* A subroutine of fold_builtin to fold the various exponent
8420 functions. Return NULL_TREE if no simplification can be made.
8421 FUNC is the corresponding MPFR exponent function. */
8424 fold_builtin_exponent (location_t loc
, tree fndecl
, tree arg
,
8425 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8427 if (validate_arg (arg
, REAL_TYPE
))
8429 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8432 /* Calculate the result when the argument is a constant. */
8433 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8436 /* Optimize expN(logN(x)) = x. */
8437 if (flag_unsafe_math_optimizations
)
8439 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8441 if ((func
== mpfr_exp
8442 && (fcode
== BUILT_IN_LOG
8443 || fcode
== BUILT_IN_LOGF
8444 || fcode
== BUILT_IN_LOGL
))
8445 || (func
== mpfr_exp2
8446 && (fcode
== BUILT_IN_LOG2
8447 || fcode
== BUILT_IN_LOG2F
8448 || fcode
== BUILT_IN_LOG2L
))
8449 || (func
== mpfr_exp10
8450 && (fcode
== BUILT_IN_LOG10
8451 || fcode
== BUILT_IN_LOG10F
8452 || fcode
== BUILT_IN_LOG10L
)))
8453 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8460 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8461 arguments to the call, and TYPE is its return type.
8462 Return NULL_TREE if no simplification can be made. */
8465 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
8467 if (!validate_arg (arg1
, POINTER_TYPE
)
8468 || !validate_arg (arg2
, INTEGER_TYPE
)
8469 || !validate_arg (len
, INTEGER_TYPE
))
8475 if (TREE_CODE (arg2
) != INTEGER_CST
8476 || !tree_fits_uhwi_p (len
))
8479 p1
= c_getstr (arg1
);
8480 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
8486 if (target_char_cast (arg2
, &c
))
8489 r
= (const char *) memchr (p1
, c
, tree_to_uhwi (len
));
8492 return build_int_cst (TREE_TYPE (arg1
), 0);
8494 tem
= fold_build_pointer_plus_hwi_loc (loc
, arg1
, r
- p1
);
8495 return fold_convert_loc (loc
, type
, tem
);
8501 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8502 Return NULL_TREE if no simplification can be made. */
8505 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8507 const char *p1
, *p2
;
8509 if (!validate_arg (arg1
, POINTER_TYPE
)
8510 || !validate_arg (arg2
, POINTER_TYPE
)
8511 || !validate_arg (len
, INTEGER_TYPE
))
8514 /* If the LEN parameter is zero, return zero. */
8515 if (integer_zerop (len
))
8516 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8519 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8520 if (operand_equal_p (arg1
, arg2
, 0))
8521 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8523 p1
= c_getstr (arg1
);
8524 p2
= c_getstr (arg2
);
8526 /* If all arguments are constant, and the value of len is not greater
8527 than the lengths of arg1 and arg2, evaluate at compile-time. */
8528 if (tree_fits_uhwi_p (len
) && p1
&& p2
8529 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
8530 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
8532 const int r
= memcmp (p1
, p2
, tree_to_uhwi (len
));
8535 return integer_one_node
;
8537 return integer_minus_one_node
;
8539 return integer_zero_node
;
8542 /* If len parameter is one, return an expression corresponding to
8543 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8544 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8546 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8547 tree cst_uchar_ptr_node
8548 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8551 = fold_convert_loc (loc
, integer_type_node
,
8552 build1 (INDIRECT_REF
, cst_uchar_node
,
8553 fold_convert_loc (loc
,
8557 = fold_convert_loc (loc
, integer_type_node
,
8558 build1 (INDIRECT_REF
, cst_uchar_node
,
8559 fold_convert_loc (loc
,
8562 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8568 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8569 Return NULL_TREE if no simplification can be made. */
8572 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
8574 const char *p1
, *p2
;
8576 if (!validate_arg (arg1
, POINTER_TYPE
)
8577 || !validate_arg (arg2
, POINTER_TYPE
))
8580 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8581 if (operand_equal_p (arg1
, arg2
, 0))
8582 return integer_zero_node
;
8584 p1
= c_getstr (arg1
);
8585 p2
= c_getstr (arg2
);
8589 const int i
= strcmp (p1
, p2
);
8591 return integer_minus_one_node
;
8593 return integer_one_node
;
8595 return integer_zero_node
;
8598 /* If the second arg is "", return *(const unsigned char*)arg1. */
8599 if (p2
&& *p2
== '\0')
8601 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8602 tree cst_uchar_ptr_node
8603 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8605 return fold_convert_loc (loc
, integer_type_node
,
8606 build1 (INDIRECT_REF
, cst_uchar_node
,
8607 fold_convert_loc (loc
,
8612 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8613 if (p1
&& *p1
== '\0')
8615 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8616 tree cst_uchar_ptr_node
8617 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8620 = fold_convert_loc (loc
, integer_type_node
,
8621 build1 (INDIRECT_REF
, cst_uchar_node
,
8622 fold_convert_loc (loc
,
8625 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8631 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8632 Return NULL_TREE if no simplification can be made. */
8635 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8637 const char *p1
, *p2
;
8639 if (!validate_arg (arg1
, POINTER_TYPE
)
8640 || !validate_arg (arg2
, POINTER_TYPE
)
8641 || !validate_arg (len
, INTEGER_TYPE
))
8644 /* If the LEN parameter is zero, return zero. */
8645 if (integer_zerop (len
))
8646 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8649 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8650 if (operand_equal_p (arg1
, arg2
, 0))
8651 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8653 p1
= c_getstr (arg1
);
8654 p2
= c_getstr (arg2
);
8656 if (tree_fits_uhwi_p (len
) && p1
&& p2
)
8658 const int i
= strncmp (p1
, p2
, tree_to_uhwi (len
));
8660 return integer_one_node
;
8662 return integer_minus_one_node
;
8664 return integer_zero_node
;
8667 /* If the second arg is "", and the length is greater than zero,
8668 return *(const unsigned char*)arg1. */
8669 if (p2
&& *p2
== '\0'
8670 && TREE_CODE (len
) == INTEGER_CST
8671 && tree_int_cst_sgn (len
) == 1)
8673 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8674 tree cst_uchar_ptr_node
8675 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8677 return fold_convert_loc (loc
, integer_type_node
,
8678 build1 (INDIRECT_REF
, cst_uchar_node
,
8679 fold_convert_loc (loc
,
8684 /* If the first arg is "", and the length is greater than zero,
8685 return -*(const unsigned char*)arg2. */
8686 if (p1
&& *p1
== '\0'
8687 && TREE_CODE (len
) == INTEGER_CST
8688 && tree_int_cst_sgn (len
) == 1)
8690 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8691 tree cst_uchar_ptr_node
8692 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8694 tree temp
= fold_convert_loc (loc
, integer_type_node
,
8695 build1 (INDIRECT_REF
, cst_uchar_node
,
8696 fold_convert_loc (loc
,
8699 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8702 /* If len parameter is one, return an expression corresponding to
8703 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8704 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8706 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8707 tree cst_uchar_ptr_node
8708 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8710 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
8711 build1 (INDIRECT_REF
, cst_uchar_node
,
8712 fold_convert_loc (loc
,
8715 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
8716 build1 (INDIRECT_REF
, cst_uchar_node
,
8717 fold_convert_loc (loc
,
8720 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8726 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8727 ARG. Return NULL_TREE if no simplification can be made. */
8730 fold_builtin_signbit (location_t loc
, tree arg
, tree type
)
8732 if (!validate_arg (arg
, REAL_TYPE
))
8735 /* If ARG is a compile-time constant, determine the result. */
8736 if (TREE_CODE (arg
) == REAL_CST
8737 && !TREE_OVERFLOW (arg
))
8741 c
= TREE_REAL_CST (arg
);
8742 return (REAL_VALUE_NEGATIVE (c
)
8743 ? build_one_cst (type
)
8744 : build_zero_cst (type
));
8747 /* If ARG is non-negative, the result is always zero. */
8748 if (tree_expr_nonnegative_p (arg
))
8749 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8751 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8752 if (!HONOR_SIGNED_ZEROS (arg
))
8753 return fold_convert (type
,
8754 fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, arg
,
8755 build_real (TREE_TYPE (arg
), dconst0
)));
8760 /* Fold function call to builtin copysign, copysignf or copysignl with
8761 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8765 fold_builtin_copysign (location_t loc
, tree fndecl
,
8766 tree arg1
, tree arg2
, tree type
)
8770 if (!validate_arg (arg1
, REAL_TYPE
)
8771 || !validate_arg (arg2
, REAL_TYPE
))
8774 /* copysign(X,X) is X. */
8775 if (operand_equal_p (arg1
, arg2
, 0))
8776 return fold_convert_loc (loc
, type
, arg1
);
8778 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8779 if (TREE_CODE (arg1
) == REAL_CST
8780 && TREE_CODE (arg2
) == REAL_CST
8781 && !TREE_OVERFLOW (arg1
)
8782 && !TREE_OVERFLOW (arg2
))
8784 REAL_VALUE_TYPE c1
, c2
;
8786 c1
= TREE_REAL_CST (arg1
);
8787 c2
= TREE_REAL_CST (arg2
);
8788 /* c1.sign := c2.sign. */
8789 real_copysign (&c1
, &c2
);
8790 return build_real (type
, c1
);
8793 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8794 Remember to evaluate Y for side-effects. */
8795 if (tree_expr_nonnegative_p (arg2
))
8796 return omit_one_operand_loc (loc
, type
,
8797 fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
),
8800 /* Strip sign changing operations for the first argument. */
8801 tem
= fold_strip_sign_ops (arg1
);
8803 return build_call_expr_loc (loc
, fndecl
, 2, tem
, arg2
);
8808 /* Fold a call to builtin isascii with argument ARG. */
8811 fold_builtin_isascii (location_t loc
, tree arg
)
8813 if (!validate_arg (arg
, INTEGER_TYPE
))
8817 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8818 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
8819 build_int_cst (integer_type_node
,
8820 ~ (unsigned HOST_WIDE_INT
) 0x7f));
8821 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
8822 arg
, integer_zero_node
);
8826 /* Fold a call to builtin toascii with argument ARG. */
8829 fold_builtin_toascii (location_t loc
, tree arg
)
8831 if (!validate_arg (arg
, INTEGER_TYPE
))
8834 /* Transform toascii(c) -> (c & 0x7f). */
8835 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
8836 build_int_cst (integer_type_node
, 0x7f));
8839 /* Fold a call to builtin isdigit with argument ARG. */
8842 fold_builtin_isdigit (location_t loc
, tree arg
)
8844 if (!validate_arg (arg
, INTEGER_TYPE
))
8848 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8849 /* According to the C standard, isdigit is unaffected by locale.
8850 However, it definitely is affected by the target character set. */
8851 unsigned HOST_WIDE_INT target_digit0
8852 = lang_hooks
.to_target_charset ('0');
8854 if (target_digit0
== 0)
8857 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
8858 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
8859 build_int_cst (unsigned_type_node
, target_digit0
));
8860 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
8861 build_int_cst (unsigned_type_node
, 9));
8865 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8868 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
8870 if (!validate_arg (arg
, REAL_TYPE
))
8873 arg
= fold_convert_loc (loc
, type
, arg
);
8874 if (TREE_CODE (arg
) == REAL_CST
)
8875 return fold_abs_const (arg
, type
);
8876 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8879 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8882 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
8884 if (!validate_arg (arg
, INTEGER_TYPE
))
8887 arg
= fold_convert_loc (loc
, type
, arg
);
8888 if (TREE_CODE (arg
) == INTEGER_CST
)
8889 return fold_abs_const (arg
, type
);
8890 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8893 /* Fold a fma operation with arguments ARG[012]. */
8896 fold_fma (location_t loc ATTRIBUTE_UNUSED
,
8897 tree type
, tree arg0
, tree arg1
, tree arg2
)
8899 if (TREE_CODE (arg0
) == REAL_CST
8900 && TREE_CODE (arg1
) == REAL_CST
8901 && TREE_CODE (arg2
) == REAL_CST
)
8902 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
8907 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8910 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
8912 if (validate_arg (arg0
, REAL_TYPE
)
8913 && validate_arg (arg1
, REAL_TYPE
)
8914 && validate_arg (arg2
, REAL_TYPE
))
8916 tree tem
= fold_fma (loc
, type
, arg0
, arg1
, arg2
);
8920 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8921 if (optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
8922 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
8927 /* Fold a call to builtin fmin or fmax. */
8930 fold_builtin_fmin_fmax (location_t loc
, tree arg0
, tree arg1
,
8931 tree type
, bool max
)
8933 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
8935 /* Calculate the result when the argument is a constant. */
8936 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
8941 /* If either argument is NaN, return the other one. Avoid the
8942 transformation if we get (and honor) a signalling NaN. Using
8943 omit_one_operand() ensures we create a non-lvalue. */
8944 if (TREE_CODE (arg0
) == REAL_CST
8945 && real_isnan (&TREE_REAL_CST (arg0
))
8946 && (! HONOR_SNANS (arg0
)
8947 || ! TREE_REAL_CST (arg0
).signalling
))
8948 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
8949 if (TREE_CODE (arg1
) == REAL_CST
8950 && real_isnan (&TREE_REAL_CST (arg1
))
8951 && (! HONOR_SNANS (arg1
)
8952 || ! TREE_REAL_CST (arg1
).signalling
))
8953 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
8955 /* Transform fmin/fmax(x,x) -> x. */
8956 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8957 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
8959 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
8960 functions to return the numeric arg if the other one is NaN.
8961 These tree codes don't honor that, so only transform if
8962 -ffinite-math-only is set. C99 doesn't require -0.0 to be
8963 handled, so we don't have to worry about it either. */
8964 if (flag_finite_math_only
)
8965 return fold_build2_loc (loc
, (max
? MAX_EXPR
: MIN_EXPR
), type
,
8966 fold_convert_loc (loc
, type
, arg0
),
8967 fold_convert_loc (loc
, type
, arg1
));
8972 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8975 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
8977 if (validate_arg (arg
, COMPLEX_TYPE
)
8978 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
8980 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
8984 tree new_arg
= builtin_save_expr (arg
);
8985 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
8986 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
8987 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
8994 /* Fold a call to builtin logb/ilogb. */
8997 fold_builtin_logb (location_t loc
, tree arg
, tree rettype
)
8999 if (! validate_arg (arg
, REAL_TYPE
))
9004 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9006 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9012 /* If arg is Inf or NaN and we're logb, return it. */
9013 if (TREE_CODE (rettype
) == REAL_TYPE
)
9015 /* For logb(-Inf) we have to return +Inf. */
9016 if (real_isinf (value
) && real_isneg (value
))
9018 REAL_VALUE_TYPE tem
;
9020 return build_real (rettype
, tem
);
9022 return fold_convert_loc (loc
, rettype
, arg
);
9024 /* Fall through... */
9026 /* Zero may set errno and/or raise an exception for logb, also
9027 for ilogb we don't know FP_ILOGB0. */
9030 /* For normal numbers, proceed iff radix == 2. In GCC,
9031 normalized significands are in the range [0.5, 1.0). We
9032 want the exponent as if they were [1.0, 2.0) so get the
9033 exponent and subtract 1. */
9034 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9035 return fold_convert_loc (loc
, rettype
,
9036 build_int_cst (integer_type_node
,
9037 REAL_EXP (value
)-1));
9045 /* Fold a call to builtin significand, if radix == 2. */
9048 fold_builtin_significand (location_t loc
, tree arg
, tree rettype
)
9050 if (! validate_arg (arg
, REAL_TYPE
))
9055 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9057 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9064 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9065 return fold_convert_loc (loc
, rettype
, arg
);
9067 /* For normal numbers, proceed iff radix == 2. */
9068 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9070 REAL_VALUE_TYPE result
= *value
;
9071 /* In GCC, normalized significands are in the range [0.5,
9072 1.0). We want them to be [1.0, 2.0) so set the
9074 SET_REAL_EXP (&result
, 1);
9075 return build_real (rettype
, result
);
9084 /* Fold a call to builtin frexp, we can assume the base is 2. */
9087 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9089 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9094 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9097 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9099 /* Proceed if a valid pointer type was passed in. */
9100 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9102 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9108 /* For +-0, return (*exp = 0, +-0). */
9109 exp
= integer_zero_node
;
9114 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9115 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
9118 /* Since the frexp function always expects base 2, and in
9119 GCC normalized significands are already in the range
9120 [0.5, 1.0), we have exactly what frexp wants. */
9121 REAL_VALUE_TYPE frac_rvt
= *value
;
9122 SET_REAL_EXP (&frac_rvt
, 0);
9123 frac
= build_real (rettype
, frac_rvt
);
9124 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
9131 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9132 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
9133 TREE_SIDE_EFFECTS (arg1
) = 1;
9134 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
9140 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9141 then we can assume the base is two. If it's false, then we have to
9142 check the mode of the TYPE parameter in certain cases. */
9145 fold_builtin_load_exponent (location_t loc
, tree arg0
, tree arg1
,
9146 tree type
, bool ldexp
)
9148 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9153 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9154 if (real_zerop (arg0
) || integer_zerop (arg1
)
9155 || (TREE_CODE (arg0
) == REAL_CST
9156 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9157 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9159 /* If both arguments are constant, then try to evaluate it. */
9160 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9161 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9162 && tree_fits_shwi_p (arg1
))
9164 /* Bound the maximum adjustment to twice the range of the
9165 mode's valid exponents. Use abs to ensure the range is
9166 positive as a sanity check. */
9167 const long max_exp_adj
= 2 *
9168 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9169 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9171 /* Get the user-requested adjustment. */
9172 const HOST_WIDE_INT req_exp_adj
= tree_to_shwi (arg1
);
9174 /* The requested adjustment must be inside this range. This
9175 is a preliminary cap to avoid things like overflow, we
9176 may still fail to compute the result for other reasons. */
9177 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9179 REAL_VALUE_TYPE initial_result
;
9181 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9183 /* Ensure we didn't overflow. */
9184 if (! real_isinf (&initial_result
))
9186 const REAL_VALUE_TYPE trunc_result
9187 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9189 /* Only proceed if the target mode can hold the
9191 if (real_equal (&initial_result
, &trunc_result
))
9192 return build_real (type
, trunc_result
);
9201 /* Fold a call to builtin modf. */
9204 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9206 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9211 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9214 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9216 /* Proceed if a valid pointer type was passed in. */
9217 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9219 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9220 REAL_VALUE_TYPE trunc
, frac
;
9226 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9227 trunc
= frac
= *value
;
9230 /* For +-Inf, return (*arg1 = arg0, +-0). */
9232 frac
.sign
= value
->sign
;
9236 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9237 real_trunc (&trunc
, VOIDmode
, value
);
9238 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9239 /* If the original number was negative and already
9240 integral, then the fractional part is -0.0. */
9241 if (value
->sign
&& frac
.cl
== rvc_zero
)
9242 frac
.sign
= value
->sign
;
9246 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9247 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
9248 build_real (rettype
, trunc
));
9249 TREE_SIDE_EFFECTS (arg1
) = 1;
9250 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
9251 build_real (rettype
, frac
));
9257 /* Given a location LOC, an interclass builtin function decl FNDECL
9258 and its single argument ARG, return an folded expression computing
9259 the same, or NULL_TREE if we either couldn't or didn't want to fold
9260 (the latter happen if there's an RTL instruction available). */
9263 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
9267 if (!validate_arg (arg
, REAL_TYPE
))
9270 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9273 mode
= TYPE_MODE (TREE_TYPE (arg
));
9275 /* If there is no optab, try generic code. */
9276 switch (DECL_FUNCTION_CODE (fndecl
))
9280 CASE_FLT_FN (BUILT_IN_ISINF
):
9282 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9283 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9284 tree
const type
= TREE_TYPE (arg
);
9288 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9289 real_from_string (&r
, buf
);
9290 result
= build_call_expr (isgr_fn
, 2,
9291 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9292 build_real (type
, r
));
9295 CASE_FLT_FN (BUILT_IN_FINITE
):
9296 case BUILT_IN_ISFINITE
:
9298 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9299 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9300 tree
const type
= TREE_TYPE (arg
);
9304 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9305 real_from_string (&r
, buf
);
9306 result
= build_call_expr (isle_fn
, 2,
9307 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9308 build_real (type
, r
));
9309 /*result = fold_build2_loc (loc, UNGT_EXPR,
9310 TREE_TYPE (TREE_TYPE (fndecl)),
9311 fold_build1_loc (loc, ABS_EXPR, type, arg),
9312 build_real (type, r));
9313 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9314 TREE_TYPE (TREE_TYPE (fndecl)),
9318 case BUILT_IN_ISNORMAL
:
9320 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9321 islessequal(fabs(x),DBL_MAX). */
9322 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9323 tree
const isge_fn
= builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
9324 tree
const type
= TREE_TYPE (arg
);
9325 REAL_VALUE_TYPE rmax
, rmin
;
9328 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9329 real_from_string (&rmax
, buf
);
9330 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9331 real_from_string (&rmin
, buf
);
9332 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9333 result
= build_call_expr (isle_fn
, 2, arg
,
9334 build_real (type
, rmax
));
9335 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
9336 build_call_expr (isge_fn
, 2, arg
,
9337 build_real (type
, rmin
)));
9347 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9348 ARG is the argument for the call. */
9351 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
9353 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9356 if (!validate_arg (arg
, REAL_TYPE
))
9359 switch (builtin_index
)
9361 case BUILT_IN_ISINF
:
9362 if (!HONOR_INFINITIES (arg
))
9363 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9365 if (TREE_CODE (arg
) == REAL_CST
)
9367 r
= TREE_REAL_CST (arg
);
9368 if (real_isinf (&r
))
9369 return real_compare (GT_EXPR
, &r
, &dconst0
)
9370 ? integer_one_node
: integer_minus_one_node
;
9372 return integer_zero_node
;
9377 case BUILT_IN_ISINF_SIGN
:
9379 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9380 /* In a boolean context, GCC will fold the inner COND_EXPR to
9381 1. So e.g. "if (isinf_sign(x))" would be folded to just
9382 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9383 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
9384 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
9385 tree tmp
= NULL_TREE
;
9387 arg
= builtin_save_expr (arg
);
9389 if (signbit_fn
&& isinf_fn
)
9391 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
9392 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
9394 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9395 signbit_call
, integer_zero_node
);
9396 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9397 isinf_call
, integer_zero_node
);
9399 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
9400 integer_minus_one_node
, integer_one_node
);
9401 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9409 case BUILT_IN_ISFINITE
:
9410 if (!HONOR_NANS (arg
)
9411 && !HONOR_INFINITIES (arg
))
9412 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9414 if (TREE_CODE (arg
) == REAL_CST
)
9416 r
= TREE_REAL_CST (arg
);
9417 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
9422 case BUILT_IN_ISNAN
:
9423 if (!HONOR_NANS (arg
))
9424 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9426 if (TREE_CODE (arg
) == REAL_CST
)
9428 r
= TREE_REAL_CST (arg
);
9429 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9432 arg
= builtin_save_expr (arg
);
9433 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
9440 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9441 This builtin will generate code to return the appropriate floating
9442 point classification depending on the value of the floating point
9443 number passed in. The possible return values must be supplied as
9444 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9445 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9446 one floating point argument which is "type generic". */
9449 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
9451 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
9452 arg
, type
, res
, tmp
;
9457 /* Verify the required arguments in the original call. */
9459 || !validate_arg (args
[0], INTEGER_TYPE
)
9460 || !validate_arg (args
[1], INTEGER_TYPE
)
9461 || !validate_arg (args
[2], INTEGER_TYPE
)
9462 || !validate_arg (args
[3], INTEGER_TYPE
)
9463 || !validate_arg (args
[4], INTEGER_TYPE
)
9464 || !validate_arg (args
[5], REAL_TYPE
))
9468 fp_infinite
= args
[1];
9469 fp_normal
= args
[2];
9470 fp_subnormal
= args
[3];
9473 type
= TREE_TYPE (arg
);
9474 mode
= TYPE_MODE (type
);
9475 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9479 (fabs(x) == Inf ? FP_INFINITE :
9480 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9481 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9483 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9484 build_real (type
, dconst0
));
9485 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9486 tmp
, fp_zero
, fp_subnormal
);
9488 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9489 real_from_string (&r
, buf
);
9490 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
9491 arg
, build_real (type
, r
));
9492 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
9494 if (HONOR_INFINITIES (mode
))
9497 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9498 build_real (type
, r
));
9499 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
9503 if (HONOR_NANS (mode
))
9505 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
9506 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
9512 /* Fold a call to an unordered comparison function such as
9513 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9514 being called and ARG0 and ARG1 are the arguments for the call.
9515 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9516 the opposite of the desired result. UNORDERED_CODE is used
9517 for modes that can hold NaNs and ORDERED_CODE is used for
9521 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
9522 enum tree_code unordered_code
,
9523 enum tree_code ordered_code
)
9525 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9526 enum tree_code code
;
9528 enum tree_code code0
, code1
;
9529 tree cmp_type
= NULL_TREE
;
9531 type0
= TREE_TYPE (arg0
);
9532 type1
= TREE_TYPE (arg1
);
9534 code0
= TREE_CODE (type0
);
9535 code1
= TREE_CODE (type1
);
9537 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9538 /* Choose the wider of two real types. */
9539 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9541 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9543 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9546 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
9547 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
9549 if (unordered_code
== UNORDERED_EXPR
)
9551 if (!HONOR_NANS (arg0
))
9552 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
9553 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
9556 code
= HONOR_NANS (arg0
) ? unordered_code
: ordered_code
;
9557 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
9558 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
9561 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9562 arithmetics if it can never overflow, or into internal functions that
9563 return both result of arithmetics and overflowed boolean flag in
9564 a complex integer result, or some other check for overflow. */
9567 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
9568 tree arg0
, tree arg1
, tree arg2
)
9570 enum internal_fn ifn
= IFN_LAST
;
9571 tree type
= TREE_TYPE (TREE_TYPE (arg2
));
9572 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
9575 case BUILT_IN_ADD_OVERFLOW
:
9576 case BUILT_IN_SADD_OVERFLOW
:
9577 case BUILT_IN_SADDL_OVERFLOW
:
9578 case BUILT_IN_SADDLL_OVERFLOW
:
9579 case BUILT_IN_UADD_OVERFLOW
:
9580 case BUILT_IN_UADDL_OVERFLOW
:
9581 case BUILT_IN_UADDLL_OVERFLOW
:
9582 ifn
= IFN_ADD_OVERFLOW
;
9584 case BUILT_IN_SUB_OVERFLOW
:
9585 case BUILT_IN_SSUB_OVERFLOW
:
9586 case BUILT_IN_SSUBL_OVERFLOW
:
9587 case BUILT_IN_SSUBLL_OVERFLOW
:
9588 case BUILT_IN_USUB_OVERFLOW
:
9589 case BUILT_IN_USUBL_OVERFLOW
:
9590 case BUILT_IN_USUBLL_OVERFLOW
:
9591 ifn
= IFN_SUB_OVERFLOW
;
9593 case BUILT_IN_MUL_OVERFLOW
:
9594 case BUILT_IN_SMUL_OVERFLOW
:
9595 case BUILT_IN_SMULL_OVERFLOW
:
9596 case BUILT_IN_SMULLL_OVERFLOW
:
9597 case BUILT_IN_UMUL_OVERFLOW
:
9598 case BUILT_IN_UMULL_OVERFLOW
:
9599 case BUILT_IN_UMULLL_OVERFLOW
:
9600 ifn
= IFN_MUL_OVERFLOW
;
9605 tree ctype
= build_complex_type (type
);
9606 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
,
9608 tree tgt
= save_expr (call
);
9609 tree intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
9610 tree ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
9611 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
9613 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
9614 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
9617 /* Fold a call to built-in function FNDECL with 0 arguments.
9618 This function returns NULL_TREE if no simplification was possible. */
9621 fold_builtin_0 (location_t loc
, tree fndecl
)
9623 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9624 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9627 CASE_FLT_FN (BUILT_IN_INF
):
9628 case BUILT_IN_INFD32
:
9629 case BUILT_IN_INFD64
:
9630 case BUILT_IN_INFD128
:
9631 return fold_builtin_inf (loc
, type
, true);
9633 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9634 return fold_builtin_inf (loc
, type
, false);
9636 case BUILT_IN_CLASSIFY_TYPE
:
9637 return fold_builtin_classify_type (NULL_TREE
);
9645 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9646 This function returns NULL_TREE if no simplification was possible. */
9649 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
)
9651 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9652 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9655 case BUILT_IN_CONSTANT_P
:
9657 tree val
= fold_builtin_constant_p (arg0
);
9659 /* Gimplification will pull the CALL_EXPR for the builtin out of
9660 an if condition. When not optimizing, we'll not CSE it back.
9661 To avoid link error types of regressions, return false now. */
9662 if (!val
&& !optimize
)
9663 val
= integer_zero_node
;
9668 case BUILT_IN_CLASSIFY_TYPE
:
9669 return fold_builtin_classify_type (arg0
);
9671 case BUILT_IN_STRLEN
:
9672 return fold_builtin_strlen (loc
, type
, arg0
);
9674 CASE_FLT_FN (BUILT_IN_FABS
):
9675 case BUILT_IN_FABSD32
:
9676 case BUILT_IN_FABSD64
:
9677 case BUILT_IN_FABSD128
:
9678 return fold_builtin_fabs (loc
, arg0
, type
);
9682 case BUILT_IN_LLABS
:
9683 case BUILT_IN_IMAXABS
:
9684 return fold_builtin_abs (loc
, arg0
, type
);
9686 CASE_FLT_FN (BUILT_IN_CONJ
):
9687 if (validate_arg (arg0
, COMPLEX_TYPE
)
9688 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9689 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
9692 CASE_FLT_FN (BUILT_IN_CREAL
):
9693 if (validate_arg (arg0
, COMPLEX_TYPE
)
9694 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9695 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
9698 CASE_FLT_FN (BUILT_IN_CIMAG
):
9699 if (validate_arg (arg0
, COMPLEX_TYPE
)
9700 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9701 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
9704 CASE_FLT_FN (BUILT_IN_CCOS
):
9705 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ false);
9707 CASE_FLT_FN (BUILT_IN_CCOSH
):
9708 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ true);
9710 CASE_FLT_FN (BUILT_IN_CPROJ
):
9711 return fold_builtin_cproj (loc
, arg0
, type
);
9713 CASE_FLT_FN (BUILT_IN_CSIN
):
9714 if (validate_arg (arg0
, COMPLEX_TYPE
)
9715 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9716 return do_mpc_arg1 (arg0
, type
, mpc_sin
);
9719 CASE_FLT_FN (BUILT_IN_CSINH
):
9720 if (validate_arg (arg0
, COMPLEX_TYPE
)
9721 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9722 return do_mpc_arg1 (arg0
, type
, mpc_sinh
);
9725 CASE_FLT_FN (BUILT_IN_CTAN
):
9726 if (validate_arg (arg0
, COMPLEX_TYPE
)
9727 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9728 return do_mpc_arg1 (arg0
, type
, mpc_tan
);
9731 CASE_FLT_FN (BUILT_IN_CTANH
):
9732 if (validate_arg (arg0
, COMPLEX_TYPE
)
9733 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9734 return do_mpc_arg1 (arg0
, type
, mpc_tanh
);
9737 CASE_FLT_FN (BUILT_IN_CLOG
):
9738 if (validate_arg (arg0
, COMPLEX_TYPE
)
9739 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9740 return do_mpc_arg1 (arg0
, type
, mpc_log
);
9743 CASE_FLT_FN (BUILT_IN_CSQRT
):
9744 if (validate_arg (arg0
, COMPLEX_TYPE
)
9745 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9746 return do_mpc_arg1 (arg0
, type
, mpc_sqrt
);
9749 CASE_FLT_FN (BUILT_IN_CASIN
):
9750 if (validate_arg (arg0
, COMPLEX_TYPE
)
9751 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9752 return do_mpc_arg1 (arg0
, type
, mpc_asin
);
9755 CASE_FLT_FN (BUILT_IN_CACOS
):
9756 if (validate_arg (arg0
, COMPLEX_TYPE
)
9757 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9758 return do_mpc_arg1 (arg0
, type
, mpc_acos
);
9761 CASE_FLT_FN (BUILT_IN_CATAN
):
9762 if (validate_arg (arg0
, COMPLEX_TYPE
)
9763 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9764 return do_mpc_arg1 (arg0
, type
, mpc_atan
);
9767 CASE_FLT_FN (BUILT_IN_CASINH
):
9768 if (validate_arg (arg0
, COMPLEX_TYPE
)
9769 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9770 return do_mpc_arg1 (arg0
, type
, mpc_asinh
);
9773 CASE_FLT_FN (BUILT_IN_CACOSH
):
9774 if (validate_arg (arg0
, COMPLEX_TYPE
)
9775 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9776 return do_mpc_arg1 (arg0
, type
, mpc_acosh
);
9779 CASE_FLT_FN (BUILT_IN_CATANH
):
9780 if (validate_arg (arg0
, COMPLEX_TYPE
)
9781 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9782 return do_mpc_arg1 (arg0
, type
, mpc_atanh
);
9785 CASE_FLT_FN (BUILT_IN_CABS
):
9786 return fold_builtin_cabs (loc
, arg0
, type
, fndecl
);
9788 CASE_FLT_FN (BUILT_IN_CARG
):
9789 return fold_builtin_carg (loc
, arg0
, type
);
9791 CASE_FLT_FN (BUILT_IN_SQRT
):
9792 if (validate_arg (arg0
, REAL_TYPE
))
9793 return do_mpfr_arg1 (arg0
, type
, mpfr_sqrt
, &dconst0
, NULL
, true);
9796 CASE_FLT_FN (BUILT_IN_CBRT
):
9797 if (validate_arg (arg0
, REAL_TYPE
))
9798 return do_mpfr_arg1 (arg0
, type
, mpfr_cbrt
, NULL
, NULL
, 0);
9801 CASE_FLT_FN (BUILT_IN_ASIN
):
9802 if (validate_arg (arg0
, REAL_TYPE
))
9803 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
9804 &dconstm1
, &dconst1
, true);
9807 CASE_FLT_FN (BUILT_IN_ACOS
):
9808 if (validate_arg (arg0
, REAL_TYPE
))
9809 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
9810 &dconstm1
, &dconst1
, true);
9813 CASE_FLT_FN (BUILT_IN_ATAN
):
9814 if (validate_arg (arg0
, REAL_TYPE
))
9815 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
9818 CASE_FLT_FN (BUILT_IN_ASINH
):
9819 if (validate_arg (arg0
, REAL_TYPE
))
9820 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
9823 CASE_FLT_FN (BUILT_IN_ACOSH
):
9824 if (validate_arg (arg0
, REAL_TYPE
))
9825 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
9826 &dconst1
, NULL
, true);
9829 CASE_FLT_FN (BUILT_IN_ATANH
):
9830 if (validate_arg (arg0
, REAL_TYPE
))
9831 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
9832 &dconstm1
, &dconst1
, false);
9835 CASE_FLT_FN (BUILT_IN_SIN
):
9836 if (validate_arg (arg0
, REAL_TYPE
))
9837 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
9840 CASE_FLT_FN (BUILT_IN_COS
):
9841 return fold_builtin_cos (loc
, arg0
, type
, fndecl
);
9843 CASE_FLT_FN (BUILT_IN_TAN
):
9844 return fold_builtin_tan (arg0
, type
);
9846 CASE_FLT_FN (BUILT_IN_CEXP
):
9847 return fold_builtin_cexp (loc
, arg0
, type
);
9849 CASE_FLT_FN (BUILT_IN_CEXPI
):
9850 if (validate_arg (arg0
, REAL_TYPE
))
9851 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
9854 CASE_FLT_FN (BUILT_IN_SINH
):
9855 if (validate_arg (arg0
, REAL_TYPE
))
9856 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
9859 CASE_FLT_FN (BUILT_IN_COSH
):
9860 return fold_builtin_cosh (loc
, arg0
, type
, fndecl
);
9862 CASE_FLT_FN (BUILT_IN_TANH
):
9863 if (validate_arg (arg0
, REAL_TYPE
))
9864 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
9867 CASE_FLT_FN (BUILT_IN_ERF
):
9868 if (validate_arg (arg0
, REAL_TYPE
))
9869 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
9872 CASE_FLT_FN (BUILT_IN_ERFC
):
9873 if (validate_arg (arg0
, REAL_TYPE
))
9874 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
9877 CASE_FLT_FN (BUILT_IN_TGAMMA
):
9878 if (validate_arg (arg0
, REAL_TYPE
))
9879 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
9882 CASE_FLT_FN (BUILT_IN_EXP
):
9883 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp
);
9885 CASE_FLT_FN (BUILT_IN_EXP2
):
9886 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp2
);
9888 CASE_FLT_FN (BUILT_IN_EXP10
):
9889 CASE_FLT_FN (BUILT_IN_POW10
):
9890 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp10
);
9892 CASE_FLT_FN (BUILT_IN_EXPM1
):
9893 if (validate_arg (arg0
, REAL_TYPE
))
9894 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
9897 CASE_FLT_FN (BUILT_IN_LOG
):
9898 if (validate_arg (arg0
, REAL_TYPE
))
9899 return do_mpfr_arg1 (arg0
, type
, mpfr_log
, &dconst0
, NULL
, false);
9902 CASE_FLT_FN (BUILT_IN_LOG2
):
9903 if (validate_arg (arg0
, REAL_TYPE
))
9904 return do_mpfr_arg1 (arg0
, type
, mpfr_log2
, &dconst0
, NULL
, false);
9907 CASE_FLT_FN (BUILT_IN_LOG10
):
9908 if (validate_arg (arg0
, REAL_TYPE
))
9909 return do_mpfr_arg1 (arg0
, type
, mpfr_log10
, &dconst0
, NULL
, false);
9912 CASE_FLT_FN (BUILT_IN_LOG1P
):
9913 if (validate_arg (arg0
, REAL_TYPE
))
9914 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
9915 &dconstm1
, NULL
, false);
9918 CASE_FLT_FN (BUILT_IN_J0
):
9919 if (validate_arg (arg0
, REAL_TYPE
))
9920 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
9924 CASE_FLT_FN (BUILT_IN_J1
):
9925 if (validate_arg (arg0
, REAL_TYPE
))
9926 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
9930 CASE_FLT_FN (BUILT_IN_Y0
):
9931 if (validate_arg (arg0
, REAL_TYPE
))
9932 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
9933 &dconst0
, NULL
, false);
9936 CASE_FLT_FN (BUILT_IN_Y1
):
9937 if (validate_arg (arg0
, REAL_TYPE
))
9938 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
9939 &dconst0
, NULL
, false);
9942 CASE_FLT_FN (BUILT_IN_NAN
):
9943 case BUILT_IN_NAND32
:
9944 case BUILT_IN_NAND64
:
9945 case BUILT_IN_NAND128
:
9946 return fold_builtin_nan (arg0
, type
, true);
9948 CASE_FLT_FN (BUILT_IN_NANS
):
9949 return fold_builtin_nan (arg0
, type
, false);
9951 CASE_FLT_FN (BUILT_IN_FLOOR
):
9952 return fold_builtin_floor (loc
, fndecl
, arg0
);
9954 CASE_FLT_FN (BUILT_IN_CEIL
):
9955 return fold_builtin_ceil (loc
, fndecl
, arg0
);
9957 CASE_FLT_FN (BUILT_IN_TRUNC
):
9958 return fold_builtin_trunc (loc
, fndecl
, arg0
);
9960 CASE_FLT_FN (BUILT_IN_ROUND
):
9961 return fold_builtin_round (loc
, fndecl
, arg0
);
9963 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
9964 CASE_FLT_FN (BUILT_IN_RINT
):
9965 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg0
);
9967 CASE_FLT_FN (BUILT_IN_ICEIL
):
9968 CASE_FLT_FN (BUILT_IN_LCEIL
):
9969 CASE_FLT_FN (BUILT_IN_LLCEIL
):
9970 CASE_FLT_FN (BUILT_IN_LFLOOR
):
9971 CASE_FLT_FN (BUILT_IN_IFLOOR
):
9972 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
9973 CASE_FLT_FN (BUILT_IN_IROUND
):
9974 CASE_FLT_FN (BUILT_IN_LROUND
):
9975 CASE_FLT_FN (BUILT_IN_LLROUND
):
9976 return fold_builtin_int_roundingfn (loc
, fndecl
, arg0
);
9978 CASE_FLT_FN (BUILT_IN_IRINT
):
9979 CASE_FLT_FN (BUILT_IN_LRINT
):
9980 CASE_FLT_FN (BUILT_IN_LLRINT
):
9981 return fold_fixed_mathfn (loc
, fndecl
, arg0
);
9983 case BUILT_IN_BSWAP16
:
9984 case BUILT_IN_BSWAP32
:
9985 case BUILT_IN_BSWAP64
:
9986 return fold_builtin_bswap (fndecl
, arg0
);
9988 CASE_INT_FN (BUILT_IN_FFS
):
9989 CASE_INT_FN (BUILT_IN_CLZ
):
9990 CASE_INT_FN (BUILT_IN_CTZ
):
9991 CASE_INT_FN (BUILT_IN_CLRSB
):
9992 CASE_INT_FN (BUILT_IN_POPCOUNT
):
9993 CASE_INT_FN (BUILT_IN_PARITY
):
9994 return fold_builtin_bitop (fndecl
, arg0
);
9996 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
9997 return fold_builtin_signbit (loc
, arg0
, type
);
9999 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
10000 return fold_builtin_significand (loc
, arg0
, type
);
10002 CASE_FLT_FN (BUILT_IN_ILOGB
):
10003 CASE_FLT_FN (BUILT_IN_LOGB
):
10004 return fold_builtin_logb (loc
, arg0
, type
);
10006 case BUILT_IN_ISASCII
:
10007 return fold_builtin_isascii (loc
, arg0
);
10009 case BUILT_IN_TOASCII
:
10010 return fold_builtin_toascii (loc
, arg0
);
10012 case BUILT_IN_ISDIGIT
:
10013 return fold_builtin_isdigit (loc
, arg0
);
10015 CASE_FLT_FN (BUILT_IN_FINITE
):
10016 case BUILT_IN_FINITED32
:
10017 case BUILT_IN_FINITED64
:
10018 case BUILT_IN_FINITED128
:
10019 case BUILT_IN_ISFINITE
:
10021 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
10024 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10027 CASE_FLT_FN (BUILT_IN_ISINF
):
10028 case BUILT_IN_ISINFD32
:
10029 case BUILT_IN_ISINFD64
:
10030 case BUILT_IN_ISINFD128
:
10032 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
10035 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10038 case BUILT_IN_ISNORMAL
:
10039 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10041 case BUILT_IN_ISINF_SIGN
:
10042 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
10044 CASE_FLT_FN (BUILT_IN_ISNAN
):
10045 case BUILT_IN_ISNAND32
:
10046 case BUILT_IN_ISNAND64
:
10047 case BUILT_IN_ISNAND128
:
10048 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
10050 case BUILT_IN_FREE
:
10051 if (integer_zerop (arg0
))
10052 return build_empty_stmt (loc
);
10063 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10064 This function returns NULL_TREE if no simplification was possible. */
10067 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
)
10069 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10070 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10074 CASE_FLT_FN (BUILT_IN_JN
):
10075 if (validate_arg (arg0
, INTEGER_TYPE
)
10076 && validate_arg (arg1
, REAL_TYPE
))
10077 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10080 CASE_FLT_FN (BUILT_IN_YN
):
10081 if (validate_arg (arg0
, INTEGER_TYPE
)
10082 && validate_arg (arg1
, REAL_TYPE
))
10083 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10087 CASE_FLT_FN (BUILT_IN_DREM
):
10088 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10089 if (validate_arg (arg0
, REAL_TYPE
)
10090 && validate_arg (arg1
, REAL_TYPE
))
10091 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10094 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10095 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10096 if (validate_arg (arg0
, REAL_TYPE
)
10097 && validate_arg (arg1
, POINTER_TYPE
))
10098 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10101 CASE_FLT_FN (BUILT_IN_ATAN2
):
10102 if (validate_arg (arg0
, REAL_TYPE
)
10103 && validate_arg (arg1
, REAL_TYPE
))
10104 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10107 CASE_FLT_FN (BUILT_IN_FDIM
):
10108 if (validate_arg (arg0
, REAL_TYPE
)
10109 && validate_arg (arg1
, REAL_TYPE
))
10110 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10113 CASE_FLT_FN (BUILT_IN_HYPOT
):
10114 return fold_builtin_hypot (loc
, fndecl
, arg0
, arg1
, type
);
10116 CASE_FLT_FN (BUILT_IN_CPOW
):
10117 if (validate_arg (arg0
, COMPLEX_TYPE
)
10118 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10119 && validate_arg (arg1
, COMPLEX_TYPE
)
10120 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
)
10121 return do_mpc_arg2 (arg0
, arg1
, type
, /*do_nonfinite=*/ 0, mpc_pow
);
10124 CASE_FLT_FN (BUILT_IN_LDEXP
):
10125 return fold_builtin_load_exponent (loc
, arg0
, arg1
, type
, /*ldexp=*/true);
10126 CASE_FLT_FN (BUILT_IN_SCALBN
):
10127 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10128 return fold_builtin_load_exponent (loc
, arg0
, arg1
,
10129 type
, /*ldexp=*/false);
10131 CASE_FLT_FN (BUILT_IN_FREXP
):
10132 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
10134 CASE_FLT_FN (BUILT_IN_MODF
):
10135 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
10137 case BUILT_IN_STRSTR
:
10138 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
10140 case BUILT_IN_STRSPN
:
10141 return fold_builtin_strspn (loc
, arg0
, arg1
);
10143 case BUILT_IN_STRCSPN
:
10144 return fold_builtin_strcspn (loc
, arg0
, arg1
);
10146 case BUILT_IN_STRCHR
:
10147 case BUILT_IN_INDEX
:
10148 return fold_builtin_strchr (loc
, arg0
, arg1
, type
);
10150 case BUILT_IN_STRRCHR
:
10151 case BUILT_IN_RINDEX
:
10152 return fold_builtin_strrchr (loc
, arg0
, arg1
, type
);
10154 case BUILT_IN_STRCMP
:
10155 return fold_builtin_strcmp (loc
, arg0
, arg1
);
10157 case BUILT_IN_STRPBRK
:
10158 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
10160 case BUILT_IN_EXPECT
:
10161 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
);
10163 CASE_FLT_FN (BUILT_IN_POW
):
10164 return fold_builtin_pow (loc
, fndecl
, arg0
, arg1
, type
);
10166 CASE_FLT_FN (BUILT_IN_POWI
):
10167 return fold_builtin_powi (loc
, fndecl
, arg0
, arg1
, type
);
10169 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10170 return fold_builtin_copysign (loc
, fndecl
, arg0
, arg1
, type
);
10172 CASE_FLT_FN (BUILT_IN_FMIN
):
10173 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/false);
10175 CASE_FLT_FN (BUILT_IN_FMAX
):
10176 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/true);
10178 case BUILT_IN_ISGREATER
:
10179 return fold_builtin_unordered_cmp (loc
, fndecl
,
10180 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10181 case BUILT_IN_ISGREATEREQUAL
:
10182 return fold_builtin_unordered_cmp (loc
, fndecl
,
10183 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10184 case BUILT_IN_ISLESS
:
10185 return fold_builtin_unordered_cmp (loc
, fndecl
,
10186 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10187 case BUILT_IN_ISLESSEQUAL
:
10188 return fold_builtin_unordered_cmp (loc
, fndecl
,
10189 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10190 case BUILT_IN_ISLESSGREATER
:
10191 return fold_builtin_unordered_cmp (loc
, fndecl
,
10192 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10193 case BUILT_IN_ISUNORDERED
:
10194 return fold_builtin_unordered_cmp (loc
, fndecl
,
10195 arg0
, arg1
, UNORDERED_EXPR
,
10198 /* We do the folding for va_start in the expander. */
10199 case BUILT_IN_VA_START
:
10202 case BUILT_IN_OBJECT_SIZE
:
10203 return fold_builtin_object_size (arg0
, arg1
);
10205 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
10206 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
10208 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
10209 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
10217 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10219 This function returns NULL_TREE if no simplification was possible. */
10222 fold_builtin_3 (location_t loc
, tree fndecl
,
10223 tree arg0
, tree arg1
, tree arg2
)
10225 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10226 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10230 CASE_FLT_FN (BUILT_IN_SINCOS
):
10231 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
10233 CASE_FLT_FN (BUILT_IN_FMA
):
10234 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
10237 CASE_FLT_FN (BUILT_IN_REMQUO
):
10238 if (validate_arg (arg0
, REAL_TYPE
)
10239 && validate_arg (arg1
, REAL_TYPE
)
10240 && validate_arg (arg2
, POINTER_TYPE
))
10241 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10244 case BUILT_IN_STRNCMP
:
10245 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
10247 case BUILT_IN_MEMCHR
:
10248 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
10250 case BUILT_IN_BCMP
:
10251 case BUILT_IN_MEMCMP
:
10252 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
10254 case BUILT_IN_EXPECT
:
10255 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
);
10257 case BUILT_IN_ADD_OVERFLOW
:
10258 case BUILT_IN_SUB_OVERFLOW
:
10259 case BUILT_IN_MUL_OVERFLOW
:
10260 case BUILT_IN_SADD_OVERFLOW
:
10261 case BUILT_IN_SADDL_OVERFLOW
:
10262 case BUILT_IN_SADDLL_OVERFLOW
:
10263 case BUILT_IN_SSUB_OVERFLOW
:
10264 case BUILT_IN_SSUBL_OVERFLOW
:
10265 case BUILT_IN_SSUBLL_OVERFLOW
:
10266 case BUILT_IN_SMUL_OVERFLOW
:
10267 case BUILT_IN_SMULL_OVERFLOW
:
10268 case BUILT_IN_SMULLL_OVERFLOW
:
10269 case BUILT_IN_UADD_OVERFLOW
:
10270 case BUILT_IN_UADDL_OVERFLOW
:
10271 case BUILT_IN_UADDLL_OVERFLOW
:
10272 case BUILT_IN_USUB_OVERFLOW
:
10273 case BUILT_IN_USUBL_OVERFLOW
:
10274 case BUILT_IN_USUBLL_OVERFLOW
:
10275 case BUILT_IN_UMUL_OVERFLOW
:
10276 case BUILT_IN_UMULL_OVERFLOW
:
10277 case BUILT_IN_UMULLL_OVERFLOW
:
10278 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
10286 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10287 arguments. IGNORE is true if the result of the
10288 function call is ignored. This function returns NULL_TREE if no
10289 simplification was possible. */
10292 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool)
10294 tree ret
= NULL_TREE
;
10299 ret
= fold_builtin_0 (loc
, fndecl
);
10302 ret
= fold_builtin_1 (loc
, fndecl
, args
[0]);
10305 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1]);
10308 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
10311 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
10316 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10317 SET_EXPR_LOCATION (ret
, loc
);
10318 TREE_NO_WARNING (ret
) = 1;
10324 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10325 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10326 of arguments in ARGS to be omitted. OLDNARGS is the number of
10327 elements in ARGS. */
10330 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
10331 int skip
, tree fndecl
, int n
, va_list newargs
)
10333 int nargs
= oldnargs
- skip
+ n
;
10340 buffer
= XALLOCAVEC (tree
, nargs
);
10341 for (i
= 0; i
< n
; i
++)
10342 buffer
[i
] = va_arg (newargs
, tree
);
10343 for (j
= skip
; j
< oldnargs
; j
++, i
++)
10344 buffer
[i
] = args
[j
];
10347 buffer
= args
+ skip
;
10349 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
10352 /* Return true if FNDECL shouldn't be folded right now.
10353 If a built-in function has an inline attribute always_inline
10354 wrapper, defer folding it after always_inline functions have
10355 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10356 might not be performed. */
10359 avoid_folding_inline_builtin (tree fndecl
)
10361 return (DECL_DECLARED_INLINE_P (fndecl
)
10362 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
10364 && !cfun
->always_inline_functions_inlined
10365 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
10368 /* A wrapper function for builtin folding that prevents warnings for
10369 "statement without effect" and the like, caused by removing the
10370 call node earlier than the warning is generated. */
10373 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
10375 tree ret
= NULL_TREE
;
10376 tree fndecl
= get_callee_fndecl (exp
);
10378 && TREE_CODE (fndecl
) == FUNCTION_DECL
10379 && DECL_BUILT_IN (fndecl
)
10380 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10381 yet. Defer folding until we see all the arguments
10382 (after inlining). */
10383 && !CALL_EXPR_VA_ARG_PACK (exp
))
10385 int nargs
= call_expr_nargs (exp
);
10387 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10388 instead last argument is __builtin_va_arg_pack (). Defer folding
10389 even in that case, until arguments are finalized. */
10390 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
10392 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
10394 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10395 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10396 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10400 if (avoid_folding_inline_builtin (fndecl
))
10403 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10404 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
10405 CALL_EXPR_ARGP (exp
), ignore
);
10408 tree
*args
= CALL_EXPR_ARGP (exp
);
10409 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
10417 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10418 N arguments are passed in the array ARGARRAY. Return a folded
10419 expression or NULL_TREE if no simplification was possible. */
10422 fold_builtin_call_array (location_t loc
, tree
,
10427 if (TREE_CODE (fn
) != ADDR_EXPR
)
10430 tree fndecl
= TREE_OPERAND (fn
, 0);
10431 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10432 && DECL_BUILT_IN (fndecl
))
10434 /* If last argument is __builtin_va_arg_pack (), arguments to this
10435 function are not finalized yet. Defer folding until they are. */
10436 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
10438 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
10440 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10441 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10442 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10445 if (avoid_folding_inline_builtin (fndecl
))
10447 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10448 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
10450 return fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
10456 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10457 along with N new arguments specified as the "..." parameters. SKIP
10458 is the number of arguments in EXP to be omitted. This function is used
10459 to do varargs-to-varargs transformations. */
10462 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
10468 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
10469 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
10475 /* Validate a single argument ARG against a tree code CODE representing
10479 validate_arg (const_tree arg
, enum tree_code code
)
10483 else if (code
== POINTER_TYPE
)
10484 return POINTER_TYPE_P (TREE_TYPE (arg
));
10485 else if (code
== INTEGER_TYPE
)
10486 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
10487 return code
== TREE_CODE (TREE_TYPE (arg
));
10490 /* This function validates the types of a function call argument list
10491 against a specified list of tree_codes. If the last specifier is a 0,
10492 that represents an ellipses, otherwise the last specifier must be a
10495 This is the GIMPLE version of validate_arglist. Eventually we want to
10496 completely convert builtins.c to work from GIMPLEs and the tree based
10497 validate_arglist will then be removed. */
10500 validate_gimple_arglist (const gcall
*call
, ...)
10502 enum tree_code code
;
10508 va_start (ap
, call
);
10513 code
= (enum tree_code
) va_arg (ap
, int);
10517 /* This signifies an ellipses, any further arguments are all ok. */
10521 /* This signifies an endlink, if no arguments remain, return
10522 true, otherwise return false. */
10523 res
= (i
== gimple_call_num_args (call
));
10526 /* If no parameters remain or the parameter's code does not
10527 match the specified code, return false. Otherwise continue
10528 checking any remaining arguments. */
10529 arg
= gimple_call_arg (call
, i
++);
10530 if (!validate_arg (arg
, code
))
10537 /* We need gotos here since we can only have one VA_CLOSE in a
10545 /* Default target-specific builtin expander that does nothing. */
10548 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
10549 rtx target ATTRIBUTE_UNUSED
,
10550 rtx subtarget ATTRIBUTE_UNUSED
,
10551 machine_mode mode ATTRIBUTE_UNUSED
,
10552 int ignore ATTRIBUTE_UNUSED
)
10557 /* Returns true is EXP represents data that would potentially reside
10558 in a readonly section. */
10561 readonly_data_expr (tree exp
)
10565 if (TREE_CODE (exp
) != ADDR_EXPR
)
10568 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10572 /* Make sure we call decl_readonly_section only for trees it
10573 can handle (since it returns true for everything it doesn't
10575 if (TREE_CODE (exp
) == STRING_CST
10576 || TREE_CODE (exp
) == CONSTRUCTOR
10577 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
10578 return decl_readonly_section (exp
, 0);
10583 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10584 to the call, and TYPE is its return type.
10586 Return NULL_TREE if no simplification was possible, otherwise return the
10587 simplified form of the call as a tree.
10589 The simplified form may be a constant or other expression which
10590 computes the same value, but in a more efficient manner (including
10591 calls to other builtin functions).
10593 The call may contain arguments which need to be evaluated, but
10594 which are not useful to determine the result of the call. In
10595 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10596 COMPOUND_EXPR will be an argument which must be evaluated.
10597 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10598 COMPOUND_EXPR in the chain will contain the tree for the simplified
10599 form of the builtin function call. */
10602 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
10604 if (!validate_arg (s1
, POINTER_TYPE
)
10605 || !validate_arg (s2
, POINTER_TYPE
))
10610 const char *p1
, *p2
;
10612 p2
= c_getstr (s2
);
10616 p1
= c_getstr (s1
);
10619 const char *r
= strstr (p1
, p2
);
10623 return build_int_cst (TREE_TYPE (s1
), 0);
10625 /* Return an offset into the constant string argument. */
10626 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10627 return fold_convert_loc (loc
, type
, tem
);
10630 /* The argument is const char *, and the result is char *, so we need
10631 a type conversion here to avoid a warning. */
10633 return fold_convert_loc (loc
, type
, s1
);
10638 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10642 /* New argument list transforming strstr(s1, s2) to
10643 strchr(s1, s2[0]). */
10644 return build_call_expr_loc (loc
, fn
, 2, s1
,
10645 build_int_cst (integer_type_node
, p2
[0]));
10649 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10650 the call, and TYPE is its return type.
10652 Return NULL_TREE if no simplification was possible, otherwise return the
10653 simplified form of the call as a tree.
10655 The simplified form may be a constant or other expression which
10656 computes the same value, but in a more efficient manner (including
10657 calls to other builtin functions).
10659 The call may contain arguments which need to be evaluated, but
10660 which are not useful to determine the result of the call. In
10661 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10662 COMPOUND_EXPR will be an argument which must be evaluated.
10663 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10664 COMPOUND_EXPR in the chain will contain the tree for the simplified
10665 form of the builtin function call. */
10668 fold_builtin_strchr (location_t loc
, tree s1
, tree s2
, tree type
)
10670 if (!validate_arg (s1
, POINTER_TYPE
)
10671 || !validate_arg (s2
, INTEGER_TYPE
))
10677 if (TREE_CODE (s2
) != INTEGER_CST
)
10680 p1
= c_getstr (s1
);
10687 if (target_char_cast (s2
, &c
))
10690 r
= strchr (p1
, c
);
10693 return build_int_cst (TREE_TYPE (s1
), 0);
10695 /* Return an offset into the constant string argument. */
10696 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10697 return fold_convert_loc (loc
, type
, tem
);
10703 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10704 the call, and TYPE is its return type.
10706 Return NULL_TREE if no simplification was possible, otherwise return the
10707 simplified form of the call as a tree.
10709 The simplified form may be a constant or other expression which
10710 computes the same value, but in a more efficient manner (including
10711 calls to other builtin functions).
10713 The call may contain arguments which need to be evaluated, but
10714 which are not useful to determine the result of the call. In
10715 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10716 COMPOUND_EXPR will be an argument which must be evaluated.
10717 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10718 COMPOUND_EXPR in the chain will contain the tree for the simplified
10719 form of the builtin function call. */
10722 fold_builtin_strrchr (location_t loc
, tree s1
, tree s2
, tree type
)
10724 if (!validate_arg (s1
, POINTER_TYPE
)
10725 || !validate_arg (s2
, INTEGER_TYPE
))
10732 if (TREE_CODE (s2
) != INTEGER_CST
)
10735 p1
= c_getstr (s1
);
10742 if (target_char_cast (s2
, &c
))
10745 r
= strrchr (p1
, c
);
10748 return build_int_cst (TREE_TYPE (s1
), 0);
10750 /* Return an offset into the constant string argument. */
10751 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10752 return fold_convert_loc (loc
, type
, tem
);
10755 if (! integer_zerop (s2
))
10758 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10762 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10763 return build_call_expr_loc (loc
, fn
, 2, s1
, s2
);
10767 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10768 to the call, and TYPE is its return type.
10770 Return NULL_TREE if no simplification was possible, otherwise return the
10771 simplified form of the call as a tree.
10773 The simplified form may be a constant or other expression which
10774 computes the same value, but in a more efficient manner (including
10775 calls to other builtin functions).
10777 The call may contain arguments which need to be evaluated, but
10778 which are not useful to determine the result of the call. In
10779 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10780 COMPOUND_EXPR will be an argument which must be evaluated.
10781 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10782 COMPOUND_EXPR in the chain will contain the tree for the simplified
10783 form of the builtin function call. */
10786 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
10788 if (!validate_arg (s1
, POINTER_TYPE
)
10789 || !validate_arg (s2
, POINTER_TYPE
))
10794 const char *p1
, *p2
;
10796 p2
= c_getstr (s2
);
10800 p1
= c_getstr (s1
);
10803 const char *r
= strpbrk (p1
, p2
);
10807 return build_int_cst (TREE_TYPE (s1
), 0);
10809 /* Return an offset into the constant string argument. */
10810 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10811 return fold_convert_loc (loc
, type
, tem
);
10815 /* strpbrk(x, "") == NULL.
10816 Evaluate and ignore s1 in case it had side-effects. */
10817 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
10820 return NULL_TREE
; /* Really call strpbrk. */
10822 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10826 /* New argument list transforming strpbrk(s1, s2) to
10827 strchr(s1, s2[0]). */
10828 return build_call_expr_loc (loc
, fn
, 2, s1
,
10829 build_int_cst (integer_type_node
, p2
[0]));
10833 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10836 Return NULL_TREE if no simplification was possible, otherwise return the
10837 simplified form of the call as a tree.
10839 The simplified form may be a constant or other expression which
10840 computes the same value, but in a more efficient manner (including
10841 calls to other builtin functions).
10843 The call may contain arguments which need to be evaluated, but
10844 which are not useful to determine the result of the call. In
10845 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10846 COMPOUND_EXPR will be an argument which must be evaluated.
10847 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10848 COMPOUND_EXPR in the chain will contain the tree for the simplified
10849 form of the builtin function call. */
10852 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
10854 if (!validate_arg (s1
, POINTER_TYPE
)
10855 || !validate_arg (s2
, POINTER_TYPE
))
10859 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
10861 /* If both arguments are constants, evaluate at compile-time. */
10864 const size_t r
= strspn (p1
, p2
);
10865 return build_int_cst (size_type_node
, r
);
10868 /* If either argument is "", return NULL_TREE. */
10869 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
10870 /* Evaluate and ignore both arguments in case either one has
10872 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
10878 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10881 Return NULL_TREE if no simplification was possible, otherwise return the
10882 simplified form of the call as a tree.
10884 The simplified form may be a constant or other expression which
10885 computes the same value, but in a more efficient manner (including
10886 calls to other builtin functions).
10888 The call may contain arguments which need to be evaluated, but
10889 which are not useful to determine the result of the call. In
10890 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10891 COMPOUND_EXPR will be an argument which must be evaluated.
10892 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10893 COMPOUND_EXPR in the chain will contain the tree for the simplified
10894 form of the builtin function call. */
10897 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
10899 if (!validate_arg (s1
, POINTER_TYPE
)
10900 || !validate_arg (s2
, POINTER_TYPE
))
10904 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
10906 /* If both arguments are constants, evaluate at compile-time. */
10909 const size_t r
= strcspn (p1
, p2
);
10910 return build_int_cst (size_type_node
, r
);
10913 /* If the first argument is "", return NULL_TREE. */
10914 if (p1
&& *p1
== '\0')
10916 /* Evaluate and ignore argument s2 in case it has
10918 return omit_one_operand_loc (loc
, size_type_node
,
10919 size_zero_node
, s2
);
10922 /* If the second argument is "", return __builtin_strlen(s1). */
10923 if (p2
&& *p2
== '\0')
10925 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
10927 /* If the replacement _DECL isn't initialized, don't do the
10932 return build_call_expr_loc (loc
, fn
, 1, s1
);
10938 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10939 produced. False otherwise. This is done so that we don't output the error
10940 or warning twice or three times. */
10943 fold_builtin_next_arg (tree exp
, bool va_start_p
)
10945 tree fntype
= TREE_TYPE (current_function_decl
);
10946 int nargs
= call_expr_nargs (exp
);
10948 /* There is good chance the current input_location points inside the
10949 definition of the va_start macro (perhaps on the token for
10950 builtin) in a system header, so warnings will not be emitted.
10951 Use the location in real source code. */
10952 source_location current_location
=
10953 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
10956 if (!stdarg_p (fntype
))
10958 error ("%<va_start%> used in function with fixed args");
10964 if (va_start_p
&& (nargs
!= 2))
10966 error ("wrong number of arguments to function %<va_start%>");
10969 arg
= CALL_EXPR_ARG (exp
, 1);
10971 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10972 when we checked the arguments and if needed issued a warning. */
10977 /* Evidently an out of date version of <stdarg.h>; can't validate
10978 va_start's second argument, but can still work as intended. */
10979 warning_at (current_location
,
10981 "%<__builtin_next_arg%> called without an argument");
10984 else if (nargs
> 1)
10986 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10989 arg
= CALL_EXPR_ARG (exp
, 0);
10992 if (TREE_CODE (arg
) == SSA_NAME
)
10993 arg
= SSA_NAME_VAR (arg
);
10995 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10996 or __builtin_next_arg (0) the first time we see it, after checking
10997 the arguments and if needed issuing a warning. */
10998 if (!integer_zerop (arg
))
11000 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
11002 /* Strip off all nops for the sake of the comparison. This
11003 is not quite the same as STRIP_NOPS. It does more.
11004 We must also strip off INDIRECT_EXPR for C++ reference
11006 while (CONVERT_EXPR_P (arg
)
11007 || TREE_CODE (arg
) == INDIRECT_REF
)
11008 arg
= TREE_OPERAND (arg
, 0);
11009 if (arg
!= last_parm
)
11011 /* FIXME: Sometimes with the tree optimizers we can get the
11012 not the last argument even though the user used the last
11013 argument. We just warn and set the arg to be the last
11014 argument so that we will get wrong-code because of
11016 warning_at (current_location
,
11018 "second parameter of %<va_start%> not last named argument");
11021 /* Undefined by C99 7.15.1.4p4 (va_start):
11022 "If the parameter parmN is declared with the register storage
11023 class, with a function or array type, or with a type that is
11024 not compatible with the type that results after application of
11025 the default argument promotions, the behavior is undefined."
11027 else if (DECL_REGISTER (arg
))
11029 warning_at (current_location
,
11031 "undefined behaviour when second parameter of "
11032 "%<va_start%> is declared with %<register%> storage");
11035 /* We want to verify the second parameter just once before the tree
11036 optimizers are run and then avoid keeping it in the tree,
11037 as otherwise we could warn even for correct code like:
11038 void foo (int i, ...)
11039 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11041 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
11043 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
11049 /* Expand a call EXP to __builtin_object_size. */
11052 expand_builtin_object_size (tree exp
)
11055 int object_size_type
;
11056 tree fndecl
= get_callee_fndecl (exp
);
11058 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11060 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11062 expand_builtin_trap ();
11066 ost
= CALL_EXPR_ARG (exp
, 1);
11069 if (TREE_CODE (ost
) != INTEGER_CST
11070 || tree_int_cst_sgn (ost
) < 0
11071 || compare_tree_int (ost
, 3) > 0)
11073 error ("%Klast argument of %D is not integer constant between 0 and 3",
11075 expand_builtin_trap ();
11079 object_size_type
= tree_to_shwi (ost
);
11081 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
11084 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11085 FCODE is the BUILT_IN_* to use.
11086 Return NULL_RTX if we failed; the caller should emit a normal call,
11087 otherwise try to get the result in TARGET, if convenient (and in
11088 mode MODE if that's convenient). */
11091 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
11092 enum built_in_function fcode
)
11094 tree dest
, src
, len
, size
;
11096 if (!validate_arglist (exp
,
11098 fcode
== BUILT_IN_MEMSET_CHK
11099 ? INTEGER_TYPE
: POINTER_TYPE
,
11100 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11103 dest
= CALL_EXPR_ARG (exp
, 0);
11104 src
= CALL_EXPR_ARG (exp
, 1);
11105 len
= CALL_EXPR_ARG (exp
, 2);
11106 size
= CALL_EXPR_ARG (exp
, 3);
11108 if (! tree_fits_uhwi_p (size
))
11111 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
11115 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
11117 warning_at (tree_nonartificial_location (exp
),
11118 0, "%Kcall to %D will always overflow destination buffer",
11119 exp
, get_callee_fndecl (exp
));
11124 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11125 mem{cpy,pcpy,move,set} is available. */
11128 case BUILT_IN_MEMCPY_CHK
:
11129 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
11131 case BUILT_IN_MEMPCPY_CHK
:
11132 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
11134 case BUILT_IN_MEMMOVE_CHK
:
11135 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
11137 case BUILT_IN_MEMSET_CHK
:
11138 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
11147 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
11148 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11149 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11150 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11152 else if (fcode
== BUILT_IN_MEMSET_CHK
)
11156 unsigned int dest_align
= get_pointer_alignment (dest
);
11158 /* If DEST is not a pointer type, call the normal function. */
11159 if (dest_align
== 0)
11162 /* If SRC and DEST are the same (and not volatile), do nothing. */
11163 if (operand_equal_p (src
, dest
, 0))
11167 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11169 /* Evaluate and ignore LEN in case it has side-effects. */
11170 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
11171 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
11174 expr
= fold_build_pointer_plus (dest
, len
);
11175 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
11178 /* __memmove_chk special case. */
11179 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
11181 unsigned int src_align
= get_pointer_alignment (src
);
11183 if (src_align
== 0)
11186 /* If src is categorized for a readonly section we can use
11187 normal __memcpy_chk. */
11188 if (readonly_data_expr (src
))
11190 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
11193 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
11194 dest
, src
, len
, size
);
11195 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11196 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11197 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11204 /* Emit warning if a buffer overflow is detected at compile time. */
11207 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
11211 location_t loc
= tree_nonartificial_location (exp
);
11215 case BUILT_IN_STRCPY_CHK
:
11216 case BUILT_IN_STPCPY_CHK
:
11217 /* For __strcat_chk the warning will be emitted only if overflowing
11218 by at least strlen (dest) + 1 bytes. */
11219 case BUILT_IN_STRCAT_CHK
:
11220 len
= CALL_EXPR_ARG (exp
, 1);
11221 size
= CALL_EXPR_ARG (exp
, 2);
11224 case BUILT_IN_STRNCAT_CHK
:
11225 case BUILT_IN_STRNCPY_CHK
:
11226 case BUILT_IN_STPNCPY_CHK
:
11227 len
= CALL_EXPR_ARG (exp
, 2);
11228 size
= CALL_EXPR_ARG (exp
, 3);
11230 case BUILT_IN_SNPRINTF_CHK
:
11231 case BUILT_IN_VSNPRINTF_CHK
:
11232 len
= CALL_EXPR_ARG (exp
, 1);
11233 size
= CALL_EXPR_ARG (exp
, 3);
11236 gcc_unreachable ();
11242 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11247 len
= c_strlen (len
, 1);
11248 if (! len
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11251 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
11253 tree src
= CALL_EXPR_ARG (exp
, 1);
11254 if (! src
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11256 src
= c_strlen (src
, 1);
11257 if (! src
|| ! tree_fits_uhwi_p (src
))
11259 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
11260 exp
, get_callee_fndecl (exp
));
11263 else if (tree_int_cst_lt (src
, size
))
11266 else if (! tree_fits_uhwi_p (len
) || ! tree_int_cst_lt (size
, len
))
11269 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
11270 exp
, get_callee_fndecl (exp
));
11273 /* Emit warning if a buffer overflow is detected at compile time
11274 in __sprintf_chk/__vsprintf_chk calls. */
11277 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
11279 tree size
, len
, fmt
;
11280 const char *fmt_str
;
11281 int nargs
= call_expr_nargs (exp
);
11283 /* Verify the required arguments in the original call. */
11287 size
= CALL_EXPR_ARG (exp
, 2);
11288 fmt
= CALL_EXPR_ARG (exp
, 3);
11290 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11293 /* Check whether the format is a literal string constant. */
11294 fmt_str
= c_getstr (fmt
);
11295 if (fmt_str
== NULL
)
11298 if (!init_target_chars ())
11301 /* If the format doesn't contain % args or %%, we know its size. */
11302 if (strchr (fmt_str
, target_percent
) == 0)
11303 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
11304 /* If the format is "%s" and first ... argument is a string literal,
11306 else if (fcode
== BUILT_IN_SPRINTF_CHK
11307 && strcmp (fmt_str
, target_percent_s
) == 0)
11313 arg
= CALL_EXPR_ARG (exp
, 4);
11314 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
11317 len
= c_strlen (arg
, 1);
11318 if (!len
|| ! tree_fits_uhwi_p (len
))
11324 if (! tree_int_cst_lt (len
, size
))
11325 warning_at (tree_nonartificial_location (exp
),
11326 0, "%Kcall to %D will always overflow destination buffer",
11327 exp
, get_callee_fndecl (exp
));
11330 /* Emit warning if a free is called with address of a variable. */
11333 maybe_emit_free_warning (tree exp
)
11335 tree arg
= CALL_EXPR_ARG (exp
, 0);
11338 if (TREE_CODE (arg
) != ADDR_EXPR
)
11341 arg
= get_base_address (TREE_OPERAND (arg
, 0));
11342 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
11345 if (SSA_VAR_P (arg
))
11346 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11347 "%Kattempt to free a non-heap object %qD", exp
, arg
);
11349 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11350 "%Kattempt to free a non-heap object", exp
);
11353 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11357 fold_builtin_object_size (tree ptr
, tree ost
)
11359 unsigned HOST_WIDE_INT bytes
;
11360 int object_size_type
;
11362 if (!validate_arg (ptr
, POINTER_TYPE
)
11363 || !validate_arg (ost
, INTEGER_TYPE
))
11368 if (TREE_CODE (ost
) != INTEGER_CST
11369 || tree_int_cst_sgn (ost
) < 0
11370 || compare_tree_int (ost
, 3) > 0)
11373 object_size_type
= tree_to_shwi (ost
);
11375 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11376 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11377 and (size_t) 0 for types 2 and 3. */
11378 if (TREE_SIDE_EFFECTS (ptr
))
11379 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
11381 if (TREE_CODE (ptr
) == ADDR_EXPR
)
11383 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11384 if (wi::fits_to_tree_p (bytes
, size_type_node
))
11385 return build_int_cstu (size_type_node
, bytes
);
11387 else if (TREE_CODE (ptr
) == SSA_NAME
)
11389 /* If object size is not known yet, delay folding until
11390 later. Maybe subsequent passes will help determining
11392 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11393 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2 ? -1 : 0)
11394 && wi::fits_to_tree_p (bytes
, size_type_node
))
11395 return build_int_cstu (size_type_node
, bytes
);
11401 /* Builtins with folding operations that operate on "..." arguments
11402 need special handling; we need to store the arguments in a convenient
11403 data structure before attempting any folding. Fortunately there are
11404 only a few builtins that fall into this category. FNDECL is the
11405 function, EXP is the CALL_EXPR for the call. */
11408 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
11410 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11411 tree ret
= NULL_TREE
;
11415 case BUILT_IN_FPCLASSIFY
:
11416 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
11424 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11425 SET_EXPR_LOCATION (ret
, loc
);
11426 TREE_NO_WARNING (ret
) = 1;
11432 /* Initialize format string characters in the target charset. */
11435 init_target_chars (void)
11440 target_newline
= lang_hooks
.to_target_charset ('\n');
11441 target_percent
= lang_hooks
.to_target_charset ('%');
11442 target_c
= lang_hooks
.to_target_charset ('c');
11443 target_s
= lang_hooks
.to_target_charset ('s');
11444 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
11448 target_percent_c
[0] = target_percent
;
11449 target_percent_c
[1] = target_c
;
11450 target_percent_c
[2] = '\0';
11452 target_percent_s
[0] = target_percent
;
11453 target_percent_s
[1] = target_s
;
11454 target_percent_s
[2] = '\0';
11456 target_percent_s_newline
[0] = target_percent
;
11457 target_percent_s_newline
[1] = target_s
;
11458 target_percent_s_newline
[2] = target_newline
;
11459 target_percent_s_newline
[3] = '\0';
11466 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11467 and no overflow/underflow occurred. INEXACT is true if M was not
11468 exactly calculated. TYPE is the tree type for the result. This
11469 function assumes that you cleared the MPFR flags and then
11470 calculated M to see if anything subsequently set a flag prior to
11471 entering this function. Return NULL_TREE if any checks fail. */
11474 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
11476 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11477 overflow/underflow occurred. If -frounding-math, proceed iff the
11478 result of calling FUNC was exact. */
11479 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11480 && (!flag_rounding_math
|| !inexact
))
11482 REAL_VALUE_TYPE rr
;
11484 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
11485 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11486 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11487 but the mpft_t is not, then we underflowed in the
11489 if (real_isfinite (&rr
)
11490 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
11492 REAL_VALUE_TYPE rmode
;
11494 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
11495 /* Proceed iff the specified mode can hold the value. */
11496 if (real_identical (&rmode
, &rr
))
11497 return build_real (type
, rmode
);
11503 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11504 number and no overflow/underflow occurred. INEXACT is true if M
11505 was not exactly calculated. TYPE is the tree type for the result.
11506 This function assumes that you cleared the MPFR flags and then
11507 calculated M to see if anything subsequently set a flag prior to
11508 entering this function. Return NULL_TREE if any checks fail, if
11509 FORCE_CONVERT is true, then bypass the checks. */
11512 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
11514 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11515 overflow/underflow occurred. If -frounding-math, proceed iff the
11516 result of calling FUNC was exact. */
11518 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
11519 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11520 && (!flag_rounding_math
|| !inexact
)))
11522 REAL_VALUE_TYPE re
, im
;
11524 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
11525 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
11526 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11527 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11528 but the mpft_t is not, then we underflowed in the
11531 || (real_isfinite (&re
) && real_isfinite (&im
)
11532 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
11533 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
11535 REAL_VALUE_TYPE re_mode
, im_mode
;
11537 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
11538 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
11539 /* Proceed iff the specified mode can hold the value. */
11541 || (real_identical (&re_mode
, &re
)
11542 && real_identical (&im_mode
, &im
)))
11543 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
11544 build_real (TREE_TYPE (type
), im_mode
));
11550 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11551 FUNC on it and return the resulting value as a tree with type TYPE.
11552 If MIN and/or MAX are not NULL, then the supplied ARG must be
11553 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11554 acceptable values, otherwise they are not. The mpfr precision is
11555 set to the precision of TYPE. We assume that function FUNC returns
11556 zero if the result could be calculated exactly within the requested
11560 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
11561 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
11564 tree result
= NULL_TREE
;
11568 /* To proceed, MPFR must exactly represent the target floating point
11569 format, which only happens when the target base equals two. */
11570 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11571 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
11573 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
11575 if (real_isfinite (ra
)
11576 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
11577 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
11579 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11580 const int prec
= fmt
->p
;
11581 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11585 mpfr_init2 (m
, prec
);
11586 mpfr_from_real (m
, ra
, GMP_RNDN
);
11587 mpfr_clear_flags ();
11588 inexact
= func (m
, m
, rnd
);
11589 result
= do_mpfr_ckconv (m
, type
, inexact
);
11597 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11598 FUNC on it and return the resulting value as a tree with type TYPE.
11599 The mpfr precision is set to the precision of TYPE. We assume that
11600 function FUNC returns zero if the result could be calculated
11601 exactly within the requested precision. */
11604 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
11605 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
11607 tree result
= NULL_TREE
;
11612 /* To proceed, MPFR must exactly represent the target floating point
11613 format, which only happens when the target base equals two. */
11614 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11615 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
11616 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
11618 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
11619 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
11621 if (real_isfinite (ra1
) && real_isfinite (ra2
))
11623 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11624 const int prec
= fmt
->p
;
11625 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11629 mpfr_inits2 (prec
, m1
, m2
, NULL
);
11630 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11631 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
11632 mpfr_clear_flags ();
11633 inexact
= func (m1
, m1
, m2
, rnd
);
11634 result
= do_mpfr_ckconv (m1
, type
, inexact
);
11635 mpfr_clears (m1
, m2
, NULL
);
11642 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11643 FUNC on it and return the resulting value as a tree with type TYPE.
11644 The mpfr precision is set to the precision of TYPE. We assume that
11645 function FUNC returns zero if the result could be calculated
11646 exactly within the requested precision. */
11649 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
11650 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
11652 tree result
= NULL_TREE
;
11658 /* To proceed, MPFR must exactly represent the target floating point
11659 format, which only happens when the target base equals two. */
11660 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11661 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
11662 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
11663 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
11665 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
11666 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
11667 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
11669 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
11671 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11672 const int prec
= fmt
->p
;
11673 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11677 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
11678 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11679 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
11680 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
11681 mpfr_clear_flags ();
11682 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
11683 result
= do_mpfr_ckconv (m1
, type
, inexact
);
11684 mpfr_clears (m1
, m2
, m3
, NULL
);
11691 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11692 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11693 If ARG_SINP and ARG_COSP are NULL then the result is returned
11694 as a complex value.
11695 The type is taken from the type of ARG and is used for setting the
11696 precision of the calculation and results. */
11699 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
11701 tree
const type
= TREE_TYPE (arg
);
11702 tree result
= NULL_TREE
;
11706 /* To proceed, MPFR must exactly represent the target floating point
11707 format, which only happens when the target base equals two. */
11708 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11709 && TREE_CODE (arg
) == REAL_CST
11710 && !TREE_OVERFLOW (arg
))
11712 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
11714 if (real_isfinite (ra
))
11716 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11717 const int prec
= fmt
->p
;
11718 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11719 tree result_s
, result_c
;
11723 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
11724 mpfr_from_real (m
, ra
, GMP_RNDN
);
11725 mpfr_clear_flags ();
11726 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
11727 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
11728 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
11729 mpfr_clears (m
, ms
, mc
, NULL
);
11730 if (result_s
&& result_c
)
11732 /* If we are to return in a complex value do so. */
11733 if (!arg_sinp
&& !arg_cosp
)
11734 return build_complex (build_complex_type (type
),
11735 result_c
, result_s
);
11737 /* Dereference the sin/cos pointer arguments. */
11738 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
11739 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
11740 /* Proceed if valid pointer type were passed in. */
11741 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
11742 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
11744 /* Set the values. */
11745 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
11747 TREE_SIDE_EFFECTS (result_s
) = 1;
11748 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
11750 TREE_SIDE_EFFECTS (result_c
) = 1;
11751 /* Combine the assignments into a compound expr. */
11752 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
11753 result_s
, result_c
));
11761 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
11762 two-argument mpfr order N Bessel function FUNC on them and return
11763 the resulting value as a tree with type TYPE. The mpfr precision
11764 is set to the precision of TYPE. We assume that function FUNC
11765 returns zero if the result could be calculated exactly within the
11766 requested precision. */
11768 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
11769 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
11770 const REAL_VALUE_TYPE
*min
, bool inclusive
)
11772 tree result
= NULL_TREE
;
11777 /* To proceed, MPFR must exactly represent the target floating point
11778 format, which only happens when the target base equals two. */
11779 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11780 && tree_fits_shwi_p (arg1
)
11781 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
11783 const HOST_WIDE_INT n
= tree_to_shwi (arg1
);
11784 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
11787 && real_isfinite (ra
)
11788 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
11790 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11791 const int prec
= fmt
->p
;
11792 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11796 mpfr_init2 (m
, prec
);
11797 mpfr_from_real (m
, ra
, GMP_RNDN
);
11798 mpfr_clear_flags ();
11799 inexact
= func (m
, n
, m
, rnd
);
11800 result
= do_mpfr_ckconv (m
, type
, inexact
);
11808 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11809 the pointer *(ARG_QUO) and return the result. The type is taken
11810 from the type of ARG0 and is used for setting the precision of the
11811 calculation and results. */
11814 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
11816 tree
const type
= TREE_TYPE (arg0
);
11817 tree result
= NULL_TREE
;
11822 /* To proceed, MPFR must exactly represent the target floating point
11823 format, which only happens when the target base equals two. */
11824 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11825 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
11826 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
11828 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
11829 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
11831 if (real_isfinite (ra0
) && real_isfinite (ra1
))
11833 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11834 const int prec
= fmt
->p
;
11835 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11840 mpfr_inits2 (prec
, m0
, m1
, NULL
);
11841 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
11842 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11843 mpfr_clear_flags ();
11844 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
11845 /* Remquo is independent of the rounding mode, so pass
11846 inexact=0 to do_mpfr_ckconv(). */
11847 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
11848 mpfr_clears (m0
, m1
, NULL
);
11851 /* MPFR calculates quo in the host's long so it may
11852 return more bits in quo than the target int can hold
11853 if sizeof(host long) > sizeof(target int). This can
11854 happen even for native compilers in LP64 mode. In
11855 these cases, modulo the quo value with the largest
11856 number that the target int can hold while leaving one
11857 bit for the sign. */
11858 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
11859 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
11861 /* Dereference the quo pointer argument. */
11862 arg_quo
= build_fold_indirect_ref (arg_quo
);
11863 /* Proceed iff a valid pointer type was passed in. */
11864 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
11866 /* Set the value. */
11868 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
11869 build_int_cst (TREE_TYPE (arg_quo
),
11871 TREE_SIDE_EFFECTS (result_quo
) = 1;
11872 /* Combine the quo assignment with the rem. */
11873 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
11874 result_quo
, result_rem
));
11882 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11883 resulting value as a tree with type TYPE. The mpfr precision is
11884 set to the precision of TYPE. We assume that this mpfr function
11885 returns zero if the result could be calculated exactly within the
11886 requested precision. In addition, the integer pointer represented
11887 by ARG_SG will be dereferenced and set to the appropriate signgam
11891 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
11893 tree result
= NULL_TREE
;
11897 /* To proceed, MPFR must exactly represent the target floating point
11898 format, which only happens when the target base equals two. Also
11899 verify ARG is a constant and that ARG_SG is an int pointer. */
11900 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11901 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
11902 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
11903 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
11905 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
11907 /* In addition to NaN and Inf, the argument cannot be zero or a
11908 negative integer. */
11909 if (real_isfinite (ra
)
11910 && ra
->cl
!= rvc_zero
11911 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
11913 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11914 const int prec
= fmt
->p
;
11915 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11920 mpfr_init2 (m
, prec
);
11921 mpfr_from_real (m
, ra
, GMP_RNDN
);
11922 mpfr_clear_flags ();
11923 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
11924 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
11930 /* Dereference the arg_sg pointer argument. */
11931 arg_sg
= build_fold_indirect_ref (arg_sg
);
11932 /* Assign the signgam value into *arg_sg. */
11933 result_sg
= fold_build2 (MODIFY_EXPR
,
11934 TREE_TYPE (arg_sg
), arg_sg
,
11935 build_int_cst (TREE_TYPE (arg_sg
), sg
));
11936 TREE_SIDE_EFFECTS (result_sg
) = 1;
11937 /* Combine the signgam assignment with the lgamma result. */
11938 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
11939 result_sg
, result_lg
));
11947 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
11948 function FUNC on it and return the resulting value as a tree with
11949 type TYPE. The mpfr precision is set to the precision of TYPE. We
11950 assume that function FUNC returns zero if the result could be
11951 calculated exactly within the requested precision. */
11954 do_mpc_arg1 (tree arg
, tree type
, int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
))
11956 tree result
= NULL_TREE
;
11960 /* To proceed, MPFR must exactly represent the target floating point
11961 format, which only happens when the target base equals two. */
11962 if (TREE_CODE (arg
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg
)
11963 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
11964 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg
))))->b
== 2)
11966 const REAL_VALUE_TYPE
*const re
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
11967 const REAL_VALUE_TYPE
*const im
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
11969 if (real_isfinite (re
) && real_isfinite (im
))
11971 const struct real_format
*const fmt
=
11972 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
11973 const int prec
= fmt
->p
;
11974 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11975 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
11979 mpc_init2 (m
, prec
);
11980 mpfr_from_real (mpc_realref (m
), re
, rnd
);
11981 mpfr_from_real (mpc_imagref (m
), im
, rnd
);
11982 mpfr_clear_flags ();
11983 inexact
= func (m
, m
, crnd
);
11984 result
= do_mpc_ckconv (m
, type
, inexact
, /*force_convert=*/ 0);
11992 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11993 mpc function FUNC on it and return the resulting value as a tree
11994 with type TYPE. The mpfr precision is set to the precision of
11995 TYPE. We assume that function FUNC returns zero if the result
11996 could be calculated exactly within the requested precision. If
11997 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11998 in the arguments and/or results. */
12001 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
12002 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
12004 tree result
= NULL_TREE
;
12009 /* To proceed, MPFR must exactly represent the target floating point
12010 format, which only happens when the target base equals two. */
12011 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
12012 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
12013 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
12014 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
12015 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
12017 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
12018 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
12019 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
12020 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
12023 || (real_isfinite (re0
) && real_isfinite (im0
)
12024 && real_isfinite (re1
) && real_isfinite (im1
)))
12026 const struct real_format
*const fmt
=
12027 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
12028 const int prec
= fmt
->p
;
12029 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12030 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
12034 mpc_init2 (m0
, prec
);
12035 mpc_init2 (m1
, prec
);
12036 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
12037 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
12038 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
12039 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
12040 mpfr_clear_flags ();
12041 inexact
= func (m0
, m0
, m1
, crnd
);
12042 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
12051 /* A wrapper function for builtin folding that prevents warnings for
12052 "statement without effect" and the like, caused by removing the
12053 call node earlier than the warning is generated. */
12056 fold_call_stmt (gcall
*stmt
, bool ignore
)
12058 tree ret
= NULL_TREE
;
12059 tree fndecl
= gimple_call_fndecl (stmt
);
12060 location_t loc
= gimple_location (stmt
);
12062 && TREE_CODE (fndecl
) == FUNCTION_DECL
12063 && DECL_BUILT_IN (fndecl
)
12064 && !gimple_call_va_arg_pack_p (stmt
))
12066 int nargs
= gimple_call_num_args (stmt
);
12067 tree
*args
= (nargs
> 0
12068 ? gimple_call_arg_ptr (stmt
, 0)
12069 : &error_mark_node
);
12071 if (avoid_folding_inline_builtin (fndecl
))
12073 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
12075 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
12079 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
12082 /* Propagate location information from original call to
12083 expansion of builtin. Otherwise things like
12084 maybe_emit_chk_warning, that operate on the expansion
12085 of a builtin, will use the wrong location information. */
12086 if (gimple_has_location (stmt
))
12088 tree realret
= ret
;
12089 if (TREE_CODE (ret
) == NOP_EXPR
)
12090 realret
= TREE_OPERAND (ret
, 0);
12091 if (CAN_HAVE_LOCATION_P (realret
)
12092 && !EXPR_HAS_LOCATION (realret
))
12093 SET_EXPR_LOCATION (realret
, loc
);
12103 /* Look up the function in builtin_decl that corresponds to DECL
12104 and set ASMSPEC as its user assembler name. DECL must be a
12105 function decl that declares a builtin. */
12108 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
12111 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
12112 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
12115 builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
12116 set_user_assembler_name (builtin
, asmspec
);
12117 switch (DECL_FUNCTION_CODE (decl
))
12119 case BUILT_IN_MEMCPY
:
12120 init_block_move_fn (asmspec
);
12121 memcpy_libfunc
= set_user_assembler_libfunc ("memcpy", asmspec
);
12123 case BUILT_IN_MEMSET
:
12124 init_block_clear_fn (asmspec
);
12125 memset_libfunc
= set_user_assembler_libfunc ("memset", asmspec
);
12127 case BUILT_IN_MEMMOVE
:
12128 memmove_libfunc
= set_user_assembler_libfunc ("memmove", asmspec
);
12130 case BUILT_IN_MEMCMP
:
12131 memcmp_libfunc
= set_user_assembler_libfunc ("memcmp", asmspec
);
12133 case BUILT_IN_ABORT
:
12134 abort_libfunc
= set_user_assembler_libfunc ("abort", asmspec
);
12137 if (INT_TYPE_SIZE
< BITS_PER_WORD
)
12139 set_user_assembler_libfunc ("ffs", asmspec
);
12140 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
,
12141 MODE_INT
, 0), "ffs");
12149 /* Return true if DECL is a builtin that expands to a constant or similarly
12152 is_simple_builtin (tree decl
)
12154 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12155 switch (DECL_FUNCTION_CODE (decl
))
12157 /* Builtins that expand to constants. */
12158 case BUILT_IN_CONSTANT_P
:
12159 case BUILT_IN_EXPECT
:
12160 case BUILT_IN_OBJECT_SIZE
:
12161 case BUILT_IN_UNREACHABLE
:
12162 /* Simple register moves or loads from stack. */
12163 case BUILT_IN_ASSUME_ALIGNED
:
12164 case BUILT_IN_RETURN_ADDRESS
:
12165 case BUILT_IN_EXTRACT_RETURN_ADDR
:
12166 case BUILT_IN_FROB_RETURN_ADDR
:
12167 case BUILT_IN_RETURN
:
12168 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
12169 case BUILT_IN_FRAME_ADDRESS
:
12170 case BUILT_IN_VA_END
:
12171 case BUILT_IN_STACK_SAVE
:
12172 case BUILT_IN_STACK_RESTORE
:
12173 /* Exception state returns or moves registers around. */
12174 case BUILT_IN_EH_FILTER
:
12175 case BUILT_IN_EH_POINTER
:
12176 case BUILT_IN_EH_COPY_VALUES
:
12186 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12187 most probably expanded inline into reasonably simple code. This is a
12188 superset of is_simple_builtin. */
12190 is_inexpensive_builtin (tree decl
)
12194 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
12196 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12197 switch (DECL_FUNCTION_CODE (decl
))
12200 case BUILT_IN_ALLOCA
:
12201 case BUILT_IN_ALLOCA_WITH_ALIGN
:
12202 case BUILT_IN_BSWAP16
:
12203 case BUILT_IN_BSWAP32
:
12204 case BUILT_IN_BSWAP64
:
12206 case BUILT_IN_CLZIMAX
:
12207 case BUILT_IN_CLZL
:
12208 case BUILT_IN_CLZLL
:
12210 case BUILT_IN_CTZIMAX
:
12211 case BUILT_IN_CTZL
:
12212 case BUILT_IN_CTZLL
:
12214 case BUILT_IN_FFSIMAX
:
12215 case BUILT_IN_FFSL
:
12216 case BUILT_IN_FFSLL
:
12217 case BUILT_IN_IMAXABS
:
12218 case BUILT_IN_FINITE
:
12219 case BUILT_IN_FINITEF
:
12220 case BUILT_IN_FINITEL
:
12221 case BUILT_IN_FINITED32
:
12222 case BUILT_IN_FINITED64
:
12223 case BUILT_IN_FINITED128
:
12224 case BUILT_IN_FPCLASSIFY
:
12225 case BUILT_IN_ISFINITE
:
12226 case BUILT_IN_ISINF_SIGN
:
12227 case BUILT_IN_ISINF
:
12228 case BUILT_IN_ISINFF
:
12229 case BUILT_IN_ISINFL
:
12230 case BUILT_IN_ISINFD32
:
12231 case BUILT_IN_ISINFD64
:
12232 case BUILT_IN_ISINFD128
:
12233 case BUILT_IN_ISNAN
:
12234 case BUILT_IN_ISNANF
:
12235 case BUILT_IN_ISNANL
:
12236 case BUILT_IN_ISNAND32
:
12237 case BUILT_IN_ISNAND64
:
12238 case BUILT_IN_ISNAND128
:
12239 case BUILT_IN_ISNORMAL
:
12240 case BUILT_IN_ISGREATER
:
12241 case BUILT_IN_ISGREATEREQUAL
:
12242 case BUILT_IN_ISLESS
:
12243 case BUILT_IN_ISLESSEQUAL
:
12244 case BUILT_IN_ISLESSGREATER
:
12245 case BUILT_IN_ISUNORDERED
:
12246 case BUILT_IN_VA_ARG_PACK
:
12247 case BUILT_IN_VA_ARG_PACK_LEN
:
12248 case BUILT_IN_VA_COPY
:
12249 case BUILT_IN_TRAP
:
12250 case BUILT_IN_SAVEREGS
:
12251 case BUILT_IN_POPCOUNTL
:
12252 case BUILT_IN_POPCOUNTLL
:
12253 case BUILT_IN_POPCOUNTIMAX
:
12254 case BUILT_IN_POPCOUNT
:
12255 case BUILT_IN_PARITYL
:
12256 case BUILT_IN_PARITYLL
:
12257 case BUILT_IN_PARITYIMAX
:
12258 case BUILT_IN_PARITY
:
12259 case BUILT_IN_LABS
:
12260 case BUILT_IN_LLABS
:
12261 case BUILT_IN_PREFETCH
:
12262 case BUILT_IN_ACC_ON_DEVICE
:
12266 return is_simple_builtin (decl
);